steamship.data.plugin package#

Submodules#

steamship.data.plugin.hosting module#

class steamship.data.plugin.hosting.HostingCpu(value)[source]#

Bases: str, Enum

The amount of CPU required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingEnvironment(value)[source]#

Bases: str, Enum

The software environment required for deployment.

PYTHON38 = 'python38'#
STEAMSHIP_PYTORCH_CPU = 'inferenceCpu'#
class steamship.data.plugin.hosting.HostingMemory(value)[source]#

Bases: str, Enum

The amount of memory required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingTimeout(value)[source]#

Bases: str, Enum

The request timeout required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingType(value)[source]#

Bases: str, Enum

The type of hosting provider to deploy to.

ECS = 'ecs'#
LAMBDA = 'lambda'#

steamship.data.plugin.index_plugin_instance module#

class steamship.data.plugin.index_plugin_instance.EmbedderInvocation(*, pluginHandle: str, instanceHandle: Optional[str] = None, config: Optional[Dict[str, Any]] = None, version: Optional[str] = None, fetchIfExists: bool = True)[source]#

Bases: CamelModel

The parameters capable of creating/fetching an Embedder (Tagger) Plugin Instance.

config: Optional[Dict[str, Any]]#
fetch_if_exists: bool#
instance_handle: Optional[str]#
plugin_handle: str#
version: Optional[str]#
class steamship.data.plugin.index_plugin_instance.EmbeddingIndexPluginInstance(*, client: Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, pluginHandle: Optional[str] = None, pluginVersionHandle: Optional[str] = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[HostingType] = None, hostingCpu: Optional[HostingCpu] = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingEnvironment: Optional[HostingEnvironment] = None, initStatus: Optional[InvocableInitStatus] = None, embedder: PluginInstance = None, index: EmbeddingIndex = None)[source]#

Bases: PluginInstance

A persistent, read-optimized index over embeddings.

This is currently implemented as an object which behaves like a PluginInstance even though it isn’t from an implementation perspective on the back-end.

static create(client: Any, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) EmbeddingIndexPluginInstance[source]#

Create a class that simulates an embedding index re-implemented as a PluginInstance.

delete()[source]#

Delete the EmbeddingIndexPluginInstnace.

For now, we will have this correspond to deleting the index but not the embedder. This is likely a temporary design.

embedder: PluginInstance#
index: EmbeddingIndex#
insert(tags: Union[Tag, List[Tag]], allow_long_records: bool = False)[source]#

Insert tags into the embedding index.

reset()[source]#
search(query: str, k: Optional[int] = None) Task[SearchResults][source]#

Search the embedding index.

This wrapper implementation simply projects the Hit data structure into a Tag

class steamship.data.plugin.index_plugin_instance.SearchResult(*, tag: Optional[Tag] = None, score: Optional[float] = None)[source]#

Bases: CamelModel

A single scored search result – which is always a tag.

This class is intended to eventually replace the QueryResult object currently used with the Embedding layer.

static from_query_result(query_result: QueryResult) SearchResult[source]#
score: Optional[float]#
tag: Optional[Tag]#
class steamship.data.plugin.index_plugin_instance.SearchResults(*, items: List[SearchResult] = None)[source]#

Bases: CamelModel

Results of a search operation – which is always a list of ranked tag.

This class is intended to eventually replace the QueryResults object currently used with the Embedding layer. TODO: add in paging support.

static from_query_results(query_results: QueryResults) SearchResults[source]#
items: List[SearchResult]#

steamship.data.plugin.plugin module#

class steamship.data.plugin.plugin.CreatePluginRequest(*, trainingPlatform: Optional[HostingType] = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, handle: str = None, description: str = None, metadata: str = None, fetchIfExists: bool = False)[source]#

Bases: Request

description: str#
fetch_if_exists: bool#
handle: str#
id: str#
is_public: bool#
metadata: str#
training_platform: Optional[HostingType]#
transport: str#
type: str#
class steamship.data.plugin.plugin.ListPluginsRequest(*, pageSize: Optional[int] = None, pageToken: Optional[str] = None, sortOrder: Optional[SortOrder] = SortOrder.DESC, type: Optional[str] = None)[source]#

Bases: ListRequest

type: Optional[str]#
class steamship.data.plugin.plugin.ListPluginsResponse(*, nextPageToken: Optional[str] = None, plugins: List[Plugin])[source]#

Bases: ListResponse

plugins: List[Plugin]#
class steamship.data.plugin.plugin.Plugin(*, client: Client = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, trainingPlatform: Optional[HostingType] = None, handle: str = None, description: str = None, metadata: str = None, profile: Optional[Manifest] = None, readme: Optional[str] = None, userId: Optional[str] = None)[source]#

Bases: CamelModel

client: Client#
static create(client: Client, description: str, type_: str, transport: str, is_public: bool, handle: Optional[str] = None, training_platform: Optional[HostingType] = None, metadata: Optional[Union[str, Dict, List]] = None, fetch_if_exists: bool = False) Plugin[source]#
description: str#
static get(client: Client, handle: str)[source]#
handle: str#
id: str#
is_public: bool#
static list(client: Client, t: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_order: Optional[SortOrder] = SortOrder.DESC) ListPluginsResponse[source]#
metadata: str#
classmethod parse_obj(obj: Any) BaseModel[source]#
profile: Optional[Manifest]#
readme: Optional[str]#
training_platform: Optional[HostingType]#
transport: str#
type: str#
update(client: Client) Plugin[source]#
user_id: Optional[str]#
class steamship.data.plugin.plugin.PluginAdapterType(value)[source]#

Bases: str, Enum

An enumeration.

huggingface = 'huggingface'#
openai = 'openai'#
steamship_docker = 'steamshipDocker'#
steamship_sagemaker = 'steamshipSagemaker'#
class steamship.data.plugin.plugin.PluginTargetType(value)[source]#

Bases: str, Enum

An enumeration.

FILE = 'file'#
WORKSPACE = 'workspace'#
class steamship.data.plugin.plugin.PluginType(value)[source]#

Bases: str, Enum

An enumeration.

classifier = 'classifier'#
embedder = 'embedder'#
generator = 'generator'#
parser = 'parser'#
tagger = 'tagger'#
class steamship.data.plugin.plugin.PluginUpdateRequest(*, id: Optional[str] = None, handle: Optional[str] = None, description: Optional[str] = None, profile: Optional[Manifest] = None, readme: Optional[str] = None)[source]#

Bases: UpdateRequest

description: Optional[str]#
handle: Optional[str]#
id: Optional[str]#
profile: Optional[Manifest]#
readme: Optional[str]#

steamship.data.plugin.plugin_instance module#

class steamship.data.plugin.plugin_instance.CreatePluginInstanceRequest(*, id: str = None, pluginId: str = None, pluginHandle: str = None, pluginVersionId: str = None, pluginVersionHandle: str = None, handle: str = None, fetchIfExists: bool = None, config: Dict[str, Any] = None)[source]#

Bases: Request

config: Dict[str, Any]#
fetch_if_exists: bool#
handle: str#
id: str#
plugin_handle: str#
plugin_id: str#
plugin_version_handle: str#
plugin_version_id: str#
class steamship.data.plugin.plugin_instance.PluginInstance(*, client: Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, pluginHandle: Optional[str] = None, pluginVersionHandle: Optional[str] = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[HostingType] = None, hostingCpu: Optional[HostingCpu] = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingEnvironment: Optional[HostingEnvironment] = None, initStatus: Optional[InvocableInitStatus] = None)[source]#

Bases: CamelModel

client: Client#
config: Dict[str, Any]#
static create(client: Client, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) PluginInstance[source]#

Create a plugin instance

When handle is empty the engine will automatically assign one fetch_if_exists controls whether we want to re-use an existing plugin instance or not.

delete() PluginInstance[source]#
generate(input_file_id: str = None, input_file_start_block_index: int = None, input_file_end_block_index: Optional[int] = None, input_file_block_index_list: Optional[List[int]] = None, text: Optional[str] = None, block_query: Optional[str] = None, append_output_to_file: bool = False, output_file_id: Optional[str] = None, make_output_public: Optional[bool] = None, options: Optional[dict] = None) Task[GenerateResponse][source]#

See GenerateRequest for description of parameter options

static get(client: Client, handle: str) PluginInstance[source]#
get_training_parameters(training_request: TrainingParameterPluginInput) TrainingParameterPluginOutput[source]#
handle: str#
hosting_cpu: Optional[HostingCpu]#
hosting_environment: Optional[HostingEnvironment]#
hosting_memory: Optional[HostingMemory]#
hosting_timeout: Optional[HostingTimeout]#
hosting_type: Optional[HostingType]#
id: str#
init_status: Optional[InvocableInitStatus]#
classmethod parse_obj(obj: Any) BaseModel[source]#
plugin_handle: Optional[str]#
plugin_id: str#
plugin_version_handle: Optional[str]#
plugin_version_id: str#
refresh_init_status()[source]#
tag(doc: Union[str, File]) Task[TagResponse][source]#
train(training_request: TrainingParameterPluginInput = None, training_epochs: Optional[int] = None, export_query: Optional[str] = None, testing_holdout_percent: Optional[float] = None, test_split_seed: Optional[int] = None, training_params: Optional[Dict] = None, inference_params: Optional[Dict] = None) Task[TrainPluginOutput][source]#

Train a plugin instance. Please provide either training_request OR the other parameters; passing training_request ignores all other parameters, but is kept for backwards compatibility.

user_id: str#
wait_for_init(max_timeout_s: float = 180, retry_delay_s: float = 1)[source]#

Polls and blocks until the init has succeeded or failed (or timeout reached).

Parameters
  • max_timeout_s (int) – Max timeout in seconds. Default: 180s. After this timeout, an exception will be thrown.

  • retry_delay_s (float) – Delay between status checks. Default: 1s.

workspace_id: Optional[str]#

steamship.data.plugin.plugin_version module#

class steamship.data.plugin.plugin_version.CreatePluginVersionRequest(*, pluginId: str = None, handle: str = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, type: str = 'file', configTemplate: str = None)[source]#

Bases: Request

config_template: str#
handle: str#
hosting_handler: str#
hosting_memory: Optional[HostingMemory]#
hosting_timeout: Optional[HostingTimeout]#
is_default: bool#
is_public: bool#
plugin_id: str#
type: str#
class steamship.data.plugin.plugin_version.ListPluginVersionsRequest(*, handle: str, pluginId: str)[source]#

Bases: Request

handle: str#
plugin_id: str#
class steamship.data.plugin.plugin_version.ListPluginVersionsResponse(*, plugins: List[PluginVersion])[source]#

Bases: Response

plugins: List[PluginVersion]#
class steamship.data.plugin.plugin_version.PluginVersion(*, client: Client = None, id: str = None, pluginId: str = None, handle: str = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, configTemplate: Dict[str, Any] = None)[source]#

Bases: CamelModel

client: Client#
config_template: Dict[str, Any]#
static create(client: Client, handle: str, plugin_id: str = None, filename: str = None, filebytes: bytes = None, hosting_memory: Optional[HostingMemory] = None, hosting_timeout: Optional[HostingTimeout] = None, hosting_handler: str = None, is_public: bool = None, is_default: bool = None, config_template: Dict[str, Any] = None) Task[PluginVersion][source]#
handle: str#
hosting_handler: str#
hosting_memory: Optional[HostingMemory]#
hosting_timeout: Optional[HostingTimeout]#
id: str#
is_default: bool#
is_public: bool#
static list(client: Client, plugin_id: Optional[str] = None, handle: Optional[str] = None, public: bool = True) ListPluginVersionsResponse[source]#
classmethod parse_obj(obj: Any) BaseModel[source]#
plugin_id: str#

steamship.data.plugin.prompt_generation_plugin_instance module#

class steamship.data.plugin.prompt_generation_plugin_instance.PromptGenerationPluginInstance(*, client: Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, pluginHandle: Optional[str] = None, pluginVersionHandle: Optional[str] = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[HostingType] = None, hostingCpu: Optional[HostingCpu] = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingEnvironment: Optional[HostingEnvironment] = None, initStatus: Optional[InvocableInitStatus] = None)[source]#

Bases: PluginInstance

An instance of a configured prompt completion service such as GPT-3.

The generate method synchronously invokes the prompt against a set of variables that parameterize it. The return value is a single string.

Example Usage:

llm = Steamship.use(‘prompt-generation-default’, config={ “temperature”: 0.9 }) PROMPT = “Greet {name} as if he were a {relation}.” greeting = llm.generate(PROMPT, {“name”: “Ted”, “relation”: “old friend”})

static create(client: Client, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) PromptGenerationPluginInstance[source]#

Create a plugin instance

When handle is empty the engine will automatically assign one fetch_if_exists controls whether we want to re-use an existing plugin instance or not.

generate(prompt: str, variables: Optional[Dict] = None, clean_output: bool = True) str[source]#

Complete the provided prompt, interpolating any variables.

Module contents#

class steamship.data.plugin.HostingCpu(value)[source]#

Bases: str, Enum

The amount of CPU required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingEnvironment(value)[source]#

Bases: str, Enum

The software environment required for deployment.

PYTHON38 = 'python38'#
STEAMSHIP_PYTORCH_CPU = 'inferenceCpu'#
class steamship.data.plugin.HostingMemory(value)[source]#

Bases: str, Enum

The amount of memory required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingTimeout(value)[source]#

Bases: str, Enum

The request timeout required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingType(value)[source]#

Bases: str, Enum

The type of hosting provider to deploy to.

ECS = 'ecs'#
LAMBDA = 'lambda'#
class steamship.data.plugin.Plugin(*, client: Client = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, trainingPlatform: Optional[HostingType] = None, handle: str = None, description: str = None, metadata: str = None, profile: Optional[Manifest] = None, readme: Optional[str] = None, userId: Optional[str] = None)[source]#

Bases: CamelModel

client: Client#
static create(client: Client, description: str, type_: str, transport: str, is_public: bool, handle: Optional[str] = None, training_platform: Optional[HostingType] = None, metadata: Optional[Union[str, Dict, List]] = None, fetch_if_exists: bool = False) Plugin[source]#
description: str#
static get(client: Client, handle: str)[source]#
handle: str#
id: str#
is_public: bool#
static list(client: Client, t: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_order: Optional[SortOrder] = SortOrder.DESC) ListPluginsResponse[source]#
metadata: str#
classmethod parse_obj(obj: Any) BaseModel[source]#
profile: Optional[Manifest]#
readme: Optional[str]#
training_platform: Optional[HostingType]#
transport: str#
type: str#
update(client: Client) Plugin[source]#
user_id: Optional[str]#
class steamship.data.plugin.PluginAdapterType(value)[source]#

Bases: str, Enum

An enumeration.

huggingface = 'huggingface'#
openai = 'openai'#
steamship_docker = 'steamshipDocker'#
steamship_sagemaker = 'steamshipSagemaker'#
class steamship.data.plugin.PluginInstance(*, client: Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, pluginHandle: Optional[str] = None, pluginVersionHandle: Optional[str] = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[HostingType] = None, hostingCpu: Optional[HostingCpu] = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingEnvironment: Optional[HostingEnvironment] = None, initStatus: Optional[InvocableInitStatus] = None)[source]#

Bases: CamelModel

client: Client#
config: Dict[str, Any]#
static create(client: Client, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) PluginInstance[source]#

Create a plugin instance

When handle is empty the engine will automatically assign one fetch_if_exists controls whether we want to re-use an existing plugin instance or not.

delete() PluginInstance[source]#
generate(input_file_id: str = None, input_file_start_block_index: int = None, input_file_end_block_index: Optional[int] = None, input_file_block_index_list: Optional[List[int]] = None, text: Optional[str] = None, block_query: Optional[str] = None, append_output_to_file: bool = False, output_file_id: Optional[str] = None, make_output_public: Optional[bool] = None, options: Optional[dict] = None) Task[GenerateResponse][source]#

See GenerateRequest for description of parameter options

static get(client: Client, handle: str) PluginInstance[source]#
get_training_parameters(training_request: TrainingParameterPluginInput) TrainingParameterPluginOutput[source]#
handle: str#
hosting_cpu: Optional[HostingCpu]#
hosting_environment: Optional[HostingEnvironment]#
hosting_memory: Optional[HostingMemory]#
hosting_timeout: Optional[HostingTimeout]#
hosting_type: Optional[HostingType]#
id: str#
init_status: Optional[InvocableInitStatus]#
classmethod parse_obj(obj: Any) BaseModel[source]#
plugin_handle: Optional[str]#
plugin_id: str#
plugin_version_handle: Optional[str]#
plugin_version_id: str#
refresh_init_status()[source]#
tag(doc: Union[str, File]) Task[TagResponse][source]#
train(training_request: TrainingParameterPluginInput = None, training_epochs: Optional[int] = None, export_query: Optional[str] = None, testing_holdout_percent: Optional[float] = None, test_split_seed: Optional[int] = None, training_params: Optional[Dict] = None, inference_params: Optional[Dict] = None) Task[TrainPluginOutput][source]#

Train a plugin instance. Please provide either training_request OR the other parameters; passing training_request ignores all other parameters, but is kept for backwards compatibility.

user_id: str#
wait_for_init(max_timeout_s: float = 180, retry_delay_s: float = 1)[source]#

Polls and blocks until the init has succeeded or failed (or timeout reached).

Parameters
  • max_timeout_s (int) – Max timeout in seconds. Default: 180s. After this timeout, an exception will be thrown.

  • retry_delay_s (float) – Delay between status checks. Default: 1s.

workspace_id: Optional[str]#
class steamship.data.plugin.PluginTargetType(value)[source]#

Bases: str, Enum

An enumeration.

FILE = 'file'#
WORKSPACE = 'workspace'#
class steamship.data.plugin.PluginType(value)[source]#

Bases: str, Enum

An enumeration.

classifier = 'classifier'#
embedder = 'embedder'#
generator = 'generator'#
parser = 'parser'#
tagger = 'tagger'#
class steamship.data.plugin.PluginVersion(*, client: Client = None, id: str = None, pluginId: str = None, handle: str = None, hostingMemory: Optional[HostingMemory] = None, hostingTimeout: Optional[HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, configTemplate: Dict[str, Any] = None)[source]#

Bases: CamelModel

client: Client#
config_template: Dict[str, Any]#
static create(client: Client, handle: str, plugin_id: str = None, filename: str = None, filebytes: bytes = None, hosting_memory: Optional[HostingMemory] = None, hosting_timeout: Optional[HostingTimeout] = None, hosting_handler: str = None, is_public: bool = None, is_default: bool = None, config_template: Dict[str, Any] = None) Task[PluginVersion][source]#
handle: str#
hosting_handler: str#
hosting_memory: Optional[HostingMemory]#
hosting_timeout: Optional[HostingTimeout]#
id: str#
is_default: bool#
is_public: bool#
static list(client: Client, plugin_id: Optional[str] = None, handle: Optional[str] = None, public: bool = True) ListPluginVersionsResponse[source]#
classmethod parse_obj(obj: Any) BaseModel[source]#
plugin_id: str#