Skyvern/skyvern/forge/sdk/api/llm/config_registry.py
2024-05-17 11:22:08 -04:00

164 lines
4.8 KiB
Python

import structlog
from skyvern.forge.sdk.api.llm.exceptions import (
DuplicateLLMConfigError,
InvalidLLMConfigError,
MissingLLMProviderEnvVarsError,
NoProviderEnabledError,
)
from skyvern.forge.sdk.api.llm.models import LLMConfig, LLMRouterConfig
from skyvern.forge.sdk.settings_manager import SettingsManager
LOG = structlog.get_logger()
class LLMConfigRegistry:
_configs: dict[str, LLMRouterConfig | LLMConfig] = {}
@staticmethod
def is_router_config(llm_key: str) -> bool:
return isinstance(LLMConfigRegistry.get_config(llm_key), LLMRouterConfig)
@staticmethod
def validate_config(llm_key: str, config: LLMRouterConfig | LLMConfig) -> None:
missing_env_vars = config.get_missing_env_vars()
if missing_env_vars:
raise MissingLLMProviderEnvVarsError(llm_key, missing_env_vars)
@classmethod
def register_config(cls, llm_key: str, config: LLMRouterConfig | LLMConfig) -> None:
if llm_key in cls._configs:
raise DuplicateLLMConfigError(llm_key)
cls.validate_config(llm_key, config)
LOG.info("Registering LLM config", llm_key=llm_key)
cls._configs[llm_key] = config
@classmethod
def get_config(cls, llm_key: str) -> LLMRouterConfig | LLMConfig:
if llm_key not in cls._configs:
raise InvalidLLMConfigError(llm_key)
return cls._configs[llm_key]
# if none of the LLM providers are enabled, raise an error
if not any(
[
SettingsManager.get_settings().ENABLE_OPENAI,
SettingsManager.get_settings().ENABLE_ANTHROPIC,
SettingsManager.get_settings().ENABLE_AZURE,
SettingsManager.get_settings().ENABLE_BEDROCK,
]
):
raise NoProviderEnabledError()
if SettingsManager.get_settings().ENABLE_OPENAI:
LLMConfigRegistry.register_config(
"OPENAI_GPT4_TURBO",
LLMConfig(
"gpt-4-turbo",
["OPENAI_API_KEY"],
supports_vision=False,
add_assistant_prefix=False,
),
)
LLMConfigRegistry.register_config(
"OPENAI_GPT4V",
LLMConfig(
"gpt-4-turbo",
["OPENAI_API_KEY"],
supports_vision=True,
add_assistant_prefix=False,
),
)
LLMConfigRegistry.register_config(
"OPENAI_GPT4O", LLMConfig("gpt-4o", ["OPENAI_API_KEY"], supports_vision=True, add_assistant_prefix=False)
)
if SettingsManager.get_settings().ENABLE_ANTHROPIC:
LLMConfigRegistry.register_config(
"ANTHROPIC_CLAUDE3",
LLMConfig(
"anthropic/claude-3-sonnet-20240229",
["ANTHROPIC_API_KEY"],
supports_vision=True,
add_assistant_prefix=True,
),
)
LLMConfigRegistry.register_config(
"ANTHROPIC_CLAUDE3_OPUS",
LLMConfig(
"anthropic/claude-3-opus-20240229",
["ANTHROPIC_API_KEY"],
supports_vision=True,
add_assistant_prefix=True,
),
)
LLMConfigRegistry.register_config(
"ANTHROPIC_CLAUDE3_SONNET",
LLMConfig(
"anthropic/claude-3-sonnet-20240229",
["ANTHROPIC_API_KEY"],
supports_vision=True,
add_assistant_prefix=True,
),
)
LLMConfigRegistry.register_config(
"ANTHROPIC_CLAUDE3_HAIKU",
LLMConfig(
"anthropic/claude-3-haiku-20240307",
["ANTHROPIC_API_KEY"],
supports_vision=True,
add_assistant_prefix=True,
),
)
if SettingsManager.get_settings().ENABLE_BEDROCK:
# Supported through AWS IAM authentication
LLMConfigRegistry.register_config(
"BEDROCK_ANTHROPIC_CLAUDE3_OPUS",
LLMConfig(
"bedrock/anthropic.claude-3-opus-20240229-v1:0",
["AWS_REGION"],
supports_vision=True,
add_assistant_prefix=True,
),
)
LLMConfigRegistry.register_config(
"BEDROCK_ANTHROPIC_CLAUDE3_SONNET",
LLMConfig(
"bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
["AWS_REGION"],
supports_vision=True,
add_assistant_prefix=True,
),
)
LLMConfigRegistry.register_config(
"BEDROCK_ANTHROPIC_CLAUDE3_HAIKU",
LLMConfig(
"bedrock/anthropic.claude-3-haiku-20240307-v1:0",
["AWS_REGION"],
supports_vision=True,
add_assistant_prefix=True,
),
)
if SettingsManager.get_settings().ENABLE_AZURE:
LLMConfigRegistry.register_config(
"AZURE_OPENAI_GPT4V",
LLMConfig(
f"azure/{SettingsManager.get_settings().AZURE_DEPLOYMENT}",
[
"AZURE_DEPLOYMENT",
"AZURE_API_KEY",
"AZURE_API_BASE",
"AZURE_API_VERSION",
],
supports_vision=True,
add_assistant_prefix=False,
),
)