promptmeteo.models package#

Submodules#

promptmeteo.models.azure_openai module#

class promptmeteo.models.azure_openai.AzureOpenAILLM(model_name: str | None = '', model_params: Dict | None = None, model_provider_token: str | None = '', **kwargs)#

Bases: BaseModel

OpenAI LLM model.

class promptmeteo.models.azure_openai.ModelEnum(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model types with their parameters.

GPT35Turbo = <class 'promptmeteo.models.azure_openai.ModelEnum.GPT35Turbo'>#
GPT35TurboInstruct = <class 'promptmeteo.models.azure_openai.ModelEnum.GPT35TurboInstruct'>#
class promptmeteo.models.azure_openai.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

GPT35Turbo: str = 'gpt-3.5-turbo-16k'#
GPT35TurboInstruct: str = 'gpt-3.5-turbo-instruct'#
classmethod has_value(value: str) bool#

Checks if the value is in the enum or not.

promptmeteo.models.base module#

class promptmeteo.models.base.BaseModel(**kwargs)#

Bases: ABC

Model Interface.

property embeddings: Embeddings#

Get Model Embeddings.

property llm: BaseLLM#

Get Model LLM.

run(sample: str) str#

Executes the model LLM and return its prediction.

promptmeteo.models.bedrock module#

class promptmeteo.models.bedrock.BedrockLLM(model_name: str | None = '', model_params: Dict | None = None, model_provider_token: str | None = '', **kwargs)#

Bases: BaseModel

Bedrock LLM model.

class promptmeteo.models.bedrock.ModelEnum(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model Parameters.

AnthropicClaudeV2 = <class 'promptmeteo.models.bedrock.ModelEnum.AnthropicClaudeV2'>#
class promptmeteo.models.bedrock.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

AnthropicClaudeV2: str = 'anthropic.claude-v2'#
classmethod has_value(value: str) bool#

Checks if the value is in the enum or not.

promptmeteo.models.fake_llm module#

class promptmeteo.models.fake_llm.FakeLLM(model_name: str | None = '', model_params: Dict | None = None, model_provider_token: str | None = '')#

Bases: BaseModel

Fake LLM class.

LLM_MAPPING: Dict[str, LLM] = {'fake-list': <class 'promptmeteo.models.fake_llm.FakeListLLM'>, 'fake-prompt_copy': <class 'promptmeteo.models.fake_llm.FakePromptCopyLLM'>, 'fake-static': <class 'promptmeteo.models.fake_llm.FakeStaticLLM'>}#
class promptmeteo.models.fake_llm.FakeListLLM(*, name: str | None = None, cache: bool | None = None, verbose: bool = None, callbacks: List[BaseCallbackHandler] | BaseCallbackManager | None = None, tags: List[str] | None = None, metadata: Dict[str, Any] | None = None, callback_manager: BaseCallbackManager | None = None, responses: List = ['uno', 'dos', 'tres'], i: int = 0)#

Bases: LLM

Fake LLM wrapper for testing purposes.

i: int#
responses: List#
class promptmeteo.models.fake_llm.FakePromptCopyLLM(*, name: str | None = None, cache: bool | None = None, verbose: bool = None, callbacks: List[BaseCallbackHandler] | BaseCallbackManager | None = None, tags: List[str] | None = None, metadata: Dict[str, Any] | None = None, callback_manager: BaseCallbackManager | None = None)#

Bases: LLM

Fake Prompt Copy LLM wrapper for testing purposes.

class promptmeteo.models.fake_llm.FakeStaticLLM(*, name: str | None = None, cache: bool | None = None, verbose: bool = None, callbacks: List[BaseCallbackHandler] | BaseCallbackManager | None = None, tags: List[str] | None = None, metadata: Dict[str, Any] | None = None, callback_manager: BaseCallbackManager | None = None, response: str = 'positive')#

Bases: LLM

Fake Static LLM wrapper for testing purposes.

response: str#
class promptmeteo.models.fake_llm.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

FakeLLM Model Types.

MODEL_1: str = 'fake-static'#
MODEL_2: str = 'fake-prompt_copy'#
MODEL_3: str = 'fake-list'#

promptmeteo.models.google_vertexai module#

class promptmeteo.models.google_vertexai.GoogleVertexAILLM(model_name: str | None = '', model_params: Dict | None = None, model_provider_token: str | None = '', model_provider_project: str | None = None)#

Bases: BaseModel

Google VertexAI LLM model.

class promptmeteo.models.google_vertexai.ModelEnum(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model types with their parameters.

TextBison = <class 'promptmeteo.models.google_vertexai.ModelEnum.TextBison'>#
TextBison001 = <class 'promptmeteo.models.google_vertexai.ModelEnum.TextBison001'>#
TextBison32k = <class 'promptmeteo.models.google_vertexai.ModelEnum.TextBison32k'>#
class promptmeteo.models.google_vertexai.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

TextBison: str = 'text-bison'#
TextBison001: str = 'text-bison@001'#
TextBison32k: str = 'text-bison-32k'#
classmethod has_value(value: str) bool#

Checks if the value is in the enum or not.

promptmeteo.models.hf_hub_api module#

class promptmeteo.models.hf_hub_api.HFHubApiLLM(model_name: str | None = '', model_params: ModelEnum | None = None, model_provider_token: str | None = '')#

Bases: BaseModel

HuggingFace API call.

class promptmeteo.models.hf_hub_api.ModelEnum(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model Parameters Enum

FlanT5Xxl = <class 'promptmeteo.models.hf_hub_api.ModelEnum.FlanT5Xxl'>#
class promptmeteo.models.hf_hub_api.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

FlanT5Xxl: str = 'google/flan-t5-xxl'#
classmethod has_value(value: str) bool#

Checks if the value is in the enum or not.

promptmeteo.models.hf_pipeline module#

class promptmeteo.models.hf_pipeline.HFPipelineLLM(model_name: str | None = '', model_params: ModelParams | None = None, model_provider_token: str | None = '')#

Bases: BaseModel

HuggingFace Local Pipeline.

class promptmeteo.models.hf_pipeline.ModelParams(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model Parameters.

MODEL_1 = <class 'promptmeteo.models.hf_pipeline.ModelParams.MODEL_1'>#
class promptmeteo.models.hf_pipeline.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

MODEL_1 = 'google/flan-t5-small'#
classmethod has_value(value)#

Return whether the value is in the class or not.

promptmeteo.models.openai module#

class promptmeteo.models.openai.ModelEnum(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: Enum

Model Parameters.

GPT35Turbo = <class 'promptmeteo.models.openai.ModelEnum.GPT35Turbo'>#
GPT35TurboInstruct = <class 'promptmeteo.models.openai.ModelEnum.GPT35TurboInstruct'>#
class promptmeteo.models.openai.ModelTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

Enum of available model types.

GPT35Turbo: str = 'gpt-3.5-turbo-16k'#
GPT35TurboInstruct: str = 'gpt-3.5-turbo-instruct'#
classmethod has_value(value: str) bool#

Checks if the value is in the enum or not.

class promptmeteo.models.openai.OpenAILLM(model_name: str | None = '', model_params: Dict | None = None, model_provider_token: str | None = '')#

Bases: BaseModel

OpenAI LLM model.

Module contents#

class promptmeteo.models.ModelFactory#

Bases: object

The ModelFactory class is used to create a BaseModel object from the given configuration.

MAPPING = {ModelProvider.PROVIDER_5: <class 'promptmeteo.models.bedrock.BedrockLLM'>, ModelProvider.PROVIDER_0: <class 'promptmeteo.models.fake_llm.FakeLLM'>, ModelProvider.PROVIDER_2: <class 'promptmeteo.models.hf_hub_api.HFHubApiLLM'>, ModelProvider.PROVIDER_3: <class 'promptmeteo.models.google_vertexai.GoogleVertexAILLM'>, ModelProvider.PROVIDER_1: <class 'promptmeteo.models.openai.OpenAILLM'>}#
classmethod factory_method(model_name: str, model_provider_name: str, model_provider_token: str, model_params: Dict) BaseModel#

Returns a BaseModel object configured with the settings found in the provided parameters.

class promptmeteo.models.ModelProvider(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Bases: str, Enum

LLM providers currently supported by Promptmeteo

PROVIDER_0: str = 'fake-llm'#
PROVIDER_1: str = 'openai'#
PROVIDER_2: str = 'hf_hub_api'#
PROVIDER_3: str = 'hf_pipeline'#
PROVIDER_4: str = 'google-vertexai'#
PROVIDER_5: str = 'bedrock'#