steamship.data.plugin package#

Submodules#

steamship.data.plugin.hosting module#

class steamship.data.plugin.hosting.HostingCpu(value)[source]#

Bases: str, enum.Enum

The amount of CPU required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingEnvironment(value)[source]#

Bases: str, enum.Enum

The software environment required for deployment.

PYTHON38 = 'python38'#
STEAMSHIP_PYTORCH_CPU = 'inferenceCpu'#
class steamship.data.plugin.hosting.HostingMemory(value)[source]#

Bases: str, enum.Enum

The amount of memory required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingTimeout(value)[source]#

Bases: str, enum.Enum

The request timeout required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.hosting.HostingType(value)[source]#

Bases: str, enum.Enum

The type of hosting provider to deploy to.

ECS = 'ecs'#
LAMBDA = 'lambda'#

steamship.data.plugin.index_plugin_instance module#

class steamship.data.plugin.index_plugin_instance.EmbedderInvocation(*, pluginHandle: str, instanceHandle: Optional[str] = None, config: Optional[Dict[str, Any]] = None, version: Optional[str] = None, fetchIfExists: bool = True)[source]#

Bases: steamship.base.model.CamelModel

The parameters capable of creating/fetching an Embedder (Tagger) Plugin Instance.

config: Optional[Dict[str, Any]]#
fetch_if_exists: bool#
instance_handle: Optional[str]#
plugin_handle: str#
version: Optional[str]#
class steamship.data.plugin.index_plugin_instance.EmbeddingIndexPluginInstance(*, client: steamship.base.client.Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[steamship.data.plugin.hosting.HostingType] = None, hostingCpu: Optional[steamship.data.plugin.hosting.HostingCpu] = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingEnvironment: Optional[steamship.data.plugin.hosting.HostingEnvironment] = None, embedder: steamship.data.plugin.plugin_instance.PluginInstance = None, index: steamship.data.embeddings.EmbeddingIndex = None)[source]#

Bases: steamship.data.plugin.plugin_instance.PluginInstance

A persistent, read-optimized index over embeddings.

This is currently implemented as an object which behaves like a PluginInstance even though it isn’t from an implementation perspective on the back-end.

client: steamship.base.client.Client#
static create(client: Any, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) steamship.data.plugin.index_plugin_instance.EmbeddingIndexPluginInstance[source]#

Create a class that simulates an embedding index re-implemented as a PluginInstance.

delete()[source]#

Delete the EmbeddingIndexPluginInstnace.

For now, we will have this correspond to deleting the index but not the embedder. This is likely a temporary design.

embedder: steamship.data.plugin.plugin_instance.PluginInstance#
index: steamship.data.embeddings.EmbeddingIndex#
insert(tags: Union[steamship.data.tags.tag.Tag, List[steamship.data.tags.tag.Tag]])[source]#

Insert tags into the embedding index.

search(query: str, k: Optional[int] = None) steamship.data.plugin.index_plugin_instance.Task[SearchResults][source]#

Search the embedding index.

This wrapper implementation simply projects the Hit data structure into a Tag

class steamship.data.plugin.index_plugin_instance.SearchResult(*, tag: Optional[steamship.data.tags.tag.Tag] = None, score: Optional[float] = None)[source]#

Bases: steamship.base.model.CamelModel

A single scored search result – which is always a tag.

This class is intended to eventually replace the QueryResult object currently used with the Embedding layer.

static from_query_result(query_result: steamship.data.embeddings.QueryResult) steamship.data.plugin.index_plugin_instance.SearchResult[source]#
score: Optional[float]#
tag: Optional[steamship.data.tags.tag.Tag]#
class steamship.data.plugin.index_plugin_instance.SearchResults(*, items: List[steamship.data.plugin.index_plugin_instance.SearchResult] = None)[source]#

Bases: steamship.base.model.CamelModel

Results of a search operation – which is always a list of ranked tag.

This class is intended to eventually replace the QueryResults object currently used with the Embedding layer. TODO: add in paging support.

static from_query_results(query_results: steamship.data.embeddings.QueryResults) steamship.data.plugin.index_plugin_instance.SearchResults[source]#
items: List[steamship.data.plugin.index_plugin_instance.SearchResult]#

steamship.data.plugin.plugin module#

class steamship.data.plugin.plugin.CreatePluginRequest(*, trainingPlatform: Optional[steamship.data.plugin.hosting.HostingType] = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, handle: str = None, description: str = None, metadata: str = None)[source]#

Bases: steamship.base.request.Request

description: str#
handle: str#
id: str#
is_public: bool#
metadata: str#
training_platform: Optional[steamship.data.plugin.hosting.HostingType]#
transport: str#
type: str#
class steamship.data.plugin.plugin.ListPluginsRequest(*, type: Optional[str] = None)[source]#

Bases: steamship.base.request.Request

type: Optional[str]#
class steamship.data.plugin.plugin.ListPluginsResponse(*, plugins: List[Plugin])[source]#

Bases: steamship.base.response.Response

plugins: List[steamship.data.plugin.plugin.Plugin]#
class steamship.data.plugin.plugin.Plugin(*, client: steamship.base.client.Client = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, trainingPlatform: Optional[steamship.data.plugin.hosting.HostingType] = None, handle: str = None, description: str = None, metadata: str = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
static create(client: steamship.base.client.Client, description: str, type_: str, transport: str, is_public: bool, handle: Optional[str] = None, training_platform: Optional[steamship.data.plugin.hosting.HostingType] = None, metadata: Optional[Union[str, Dict, List]] = None) steamship.data.plugin.plugin.Plugin[source]#
description: str#
static get(client: steamship.base.client.Client, handle: str)[source]#
handle: str#
id: str#
is_public: bool#
static list(client: steamship.base.client.Client, t: Optional[str] = None) steamship.data.plugin.plugin.ListPluginsResponse[source]#
metadata: str#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
training_platform: Optional[steamship.data.plugin.hosting.HostingType]#
transport: str#
type: str#
class steamship.data.plugin.plugin.PluginAdapterType(value)[source]#

Bases: str, enum.Enum

An enumeration.

huggingface = 'huggingface'#
openai = 'openai'#
steamship_docker = 'steamshipDocker'#
steamship_sagemaker = 'steamshipSagemaker'#
class steamship.data.plugin.plugin.PluginTargetType(value)[source]#

Bases: str, enum.Enum

An enumeration.

FILE = 'file'#
WORKSPACE = 'workspace'#
class steamship.data.plugin.plugin.PluginType(value)[source]#

Bases: str, enum.Enum

An enumeration.

classifier = 'classifier'#
embedder = 'embedder'#
parser = 'parser'#
tagger = 'tagger'#

steamship.data.plugin.plugin_instance module#

class steamship.data.plugin.plugin_instance.CreatePluginInstanceRequest(*, id: str = None, pluginId: str = None, pluginHandle: str = None, pluginVersionId: str = None, pluginVersionHandle: str = None, handle: str = None, fetchIfExists: bool = None, config: Dict[str, Any] = None)[source]#

Bases: steamship.base.request.Request

config: Dict[str, Any]#
fetch_if_exists: bool#
handle: str#
id: str#
plugin_handle: str#
plugin_id: str#
plugin_version_handle: str#
plugin_version_id: str#
class steamship.data.plugin.plugin_instance.PluginInstance(*, client: steamship.base.client.Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[steamship.data.plugin.hosting.HostingType] = None, hostingCpu: Optional[steamship.data.plugin.hosting.HostingCpu] = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingEnvironment: Optional[steamship.data.plugin.hosting.HostingEnvironment] = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
config: Dict[str, Any]#
static create(client: steamship.base.client.Client, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) steamship.data.plugin.plugin_instance.PluginInstance[source]#

Create a plugin instance

When handle is empty the engine will automatically assign one fetch_if_exists controls whether we want to re-use an existing plugin instance or not.

delete() steamship.data.plugin.plugin_instance.PluginInstance[source]#
static get(client: steamship.base.client.Client, handle: str) steamship.data.plugin.plugin_instance.PluginInstance[source]#
get_training_parameters(training_request: steamship.plugin.inputs.training_parameter_plugin_input.TrainingParameterPluginInput) steamship.plugin.outputs.training_parameter_plugin_output.TrainingParameterPluginOutput[source]#
handle: str#
hosting_cpu: Optional[steamship.data.plugin.hosting.HostingCpu]#
hosting_environment: Optional[steamship.data.plugin.hosting.HostingEnvironment]#
hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory]#
hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout]#
hosting_type: Optional[steamship.data.plugin.hosting.HostingType]#
id: str#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
plugin_id: str#
plugin_version_id: str#
tag(doc: Union[str, steamship.data.file.File]) steamship.data.plugin.plugin_instance.Task[TagResponse][source]#
train(training_request: steamship.plugin.inputs.training_parameter_plugin_input.TrainingParameterPluginInput) steamship.data.plugin.plugin_instance.Task[TrainPluginOutput][source]#
user_id: str#
workspace_id: Optional[str]#

steamship.data.plugin.plugin_version module#

class steamship.data.plugin.plugin_version.CreatePluginVersionRequest(*, pluginId: str = None, handle: str = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, type: str = 'file', configTemplate: str = None)[source]#

Bases: steamship.base.request.Request

config_template: str#
handle: str#
hosting_handler: str#
hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory]#
hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout]#
is_default: bool#
is_public: bool#
plugin_id: str#
type: str#
class steamship.data.plugin.plugin_version.ListPluginVersionsRequest(*, handle: str, pluginId: str)[source]#

Bases: steamship.base.request.Request

handle: str#
plugin_id: str#
class steamship.data.plugin.plugin_version.ListPluginVersionsResponse(*, plugins: List[PluginVersion])[source]#

Bases: steamship.base.response.Response

plugins: List[steamship.data.plugin.plugin_version.PluginVersion]#
class steamship.data.plugin.plugin_version.PluginVersion(*, client: steamship.base.client.Client = None, id: str = None, pluginId: str = None, handle: str = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, configTemplate: Dict[str, Any] = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
config_template: Dict[str, Any]#
static create(client: steamship.base.client.Client, handle: str, plugin_id: Optional[str] = None, filename: Optional[str] = None, filebytes: Optional[bytes] = None, hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hosting_handler: Optional[str] = None, is_public: Optional[bool] = None, is_default: Optional[bool] = None, config_template: Optional[Dict[str, Any]] = None) steamship.data.plugin.plugin_version.Task[PluginVersion][source]#
handle: str#
hosting_handler: str#
hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory]#
hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout]#
id: str#
is_default: bool#
is_public: bool#
static list(client: steamship.base.client.Client, plugin_id: Optional[str] = None, handle: Optional[str] = None, public: bool = True) steamship.data.plugin.plugin_version.ListPluginVersionsResponse[source]#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
plugin_id: str#

Module contents#

class steamship.data.plugin.HostingCpu(value)[source]#

Bases: str, enum.Enum

The amount of CPU required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingEnvironment(value)[source]#

Bases: str, enum.Enum

The software environment required for deployment.

PYTHON38 = 'python38'#
STEAMSHIP_PYTORCH_CPU = 'inferenceCpu'#
class steamship.data.plugin.HostingMemory(value)[source]#

Bases: str, enum.Enum

The amount of memory required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingTimeout(value)[source]#

Bases: str, enum.Enum

The request timeout required for deployment.

This is mapped to a value dependent on the HostingType it is combined with.

LG = 'lg'#
MAX = 'max'#
MD = 'md'#
MIN = 'min'#
SM = 'sm'#
XL = 'xl'#
XS = 'xs'#
XXL = 'xxl'#
XXS = 'xxs'#
class steamship.data.plugin.HostingType(value)[source]#

Bases: str, enum.Enum

The type of hosting provider to deploy to.

ECS = 'ecs'#
LAMBDA = 'lambda'#
class steamship.data.plugin.Plugin(*, client: steamship.base.client.Client = None, id: str = None, type: str = None, transport: str = None, isPublic: bool = None, trainingPlatform: Optional[steamship.data.plugin.hosting.HostingType] = None, handle: str = None, description: str = None, metadata: str = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
static create(client: steamship.base.client.Client, description: str, type_: str, transport: str, is_public: bool, handle: Optional[str] = None, training_platform: Optional[steamship.data.plugin.hosting.HostingType] = None, metadata: Optional[Union[str, Dict, List]] = None) steamship.data.plugin.plugin.Plugin[source]#
description: str#
static get(client: steamship.base.client.Client, handle: str)[source]#
handle: str#
id: str#
is_public: bool#
static list(client: steamship.base.client.Client, t: Optional[str] = None) steamship.data.plugin.plugin.ListPluginsResponse[source]#
metadata: str#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
training_platform: Optional[steamship.data.plugin.hosting.HostingType]#
transport: str#
type: str#
class steamship.data.plugin.PluginAdapterType(value)[source]#

Bases: str, enum.Enum

An enumeration.

huggingface = 'huggingface'#
openai = 'openai'#
steamship_docker = 'steamshipDocker'#
steamship_sagemaker = 'steamshipSagemaker'#
class steamship.data.plugin.PluginInstance(*, client: steamship.base.client.Client = None, id: str = None, handle: str = None, pluginId: str = None, pluginVersionId: str = None, workspaceId: Optional[str] = None, userId: str = None, config: Dict[str, Any] = None, hostingType: Optional[steamship.data.plugin.hosting.HostingType] = None, hostingCpu: Optional[steamship.data.plugin.hosting.HostingCpu] = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingEnvironment: Optional[steamship.data.plugin.hosting.HostingEnvironment] = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
config: Dict[str, Any]#
static create(client: steamship.base.client.Client, plugin_id: Optional[str] = None, plugin_handle: Optional[str] = None, plugin_version_id: Optional[str] = None, plugin_version_handle: Optional[str] = None, handle: Optional[str] = None, fetch_if_exists: bool = True, config: Optional[Dict[str, Any]] = None) steamship.data.plugin.plugin_instance.PluginInstance[source]#

Create a plugin instance

When handle is empty the engine will automatically assign one fetch_if_exists controls whether we want to re-use an existing plugin instance or not.

delete() steamship.data.plugin.plugin_instance.PluginInstance[source]#
static get(client: steamship.base.client.Client, handle: str) steamship.data.plugin.plugin_instance.PluginInstance[source]#
get_training_parameters(training_request: steamship.plugin.inputs.training_parameter_plugin_input.TrainingParameterPluginInput) steamship.plugin.outputs.training_parameter_plugin_output.TrainingParameterPluginOutput[source]#
handle: str#
hosting_cpu: Optional[steamship.data.plugin.hosting.HostingCpu]#
hosting_environment: Optional[steamship.data.plugin.hosting.HostingEnvironment]#
hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory]#
hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout]#
hosting_type: Optional[steamship.data.plugin.hosting.HostingType]#
id: str#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
plugin_id: str#
plugin_version_id: str#
tag(doc: Union[str, steamship.data.file.File]) steamship.data.plugin.plugin_instance.Task[TagResponse][source]#
train(training_request: steamship.plugin.inputs.training_parameter_plugin_input.TrainingParameterPluginInput) steamship.data.plugin.plugin_instance.Task[TrainPluginOutput][source]#
user_id: str#
workspace_id: Optional[str]#
class steamship.data.plugin.PluginTargetType(value)[source]#

Bases: str, enum.Enum

An enumeration.

FILE = 'file'#
WORKSPACE = 'workspace'#
class steamship.data.plugin.PluginType(value)[source]#

Bases: str, enum.Enum

An enumeration.

classifier = 'classifier'#
embedder = 'embedder'#
parser = 'parser'#
tagger = 'tagger'#
class steamship.data.plugin.PluginVersion(*, client: steamship.base.client.Client = None, id: str = None, pluginId: str = None, handle: str = None, hostingMemory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hostingTimeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hostingHandler: str = None, isPublic: bool = None, isDefault: bool = None, configTemplate: Dict[str, Any] = None)[source]#

Bases: steamship.base.model.CamelModel

client: steamship.base.client.Client#
config_template: Dict[str, Any]#
static create(client: steamship.base.client.Client, handle: str, plugin_id: Optional[str] = None, filename: Optional[str] = None, filebytes: Optional[bytes] = None, hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory] = None, hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout] = None, hosting_handler: Optional[str] = None, is_public: Optional[bool] = None, is_default: Optional[bool] = None, config_template: Optional[Dict[str, Any]] = None) steamship.data.plugin.plugin_version.Task[PluginVersion][source]#
handle: str#
hosting_handler: str#
hosting_memory: Optional[steamship.data.plugin.hosting.HostingMemory]#
hosting_timeout: Optional[steamship.data.plugin.hosting.HostingTimeout]#
id: str#
is_default: bool#
is_public: bool#
static list(client: steamship.base.client.Client, plugin_id: Optional[str] = None, handle: Optional[str] = None, public: bool = True) steamship.data.plugin.plugin_version.ListPluginVersionsResponse[source]#
classmethod parse_obj(obj: Any) pydantic.main.BaseModel[source]#
plugin_id: str#