mirror of
https://github.com/Skyvern-AI/skyvern.git
synced 2025-09-02 18:50:24 +00:00
289 lines
8.3 KiB
Python
289 lines
8.3 KiB
Python
import structlog
|
|
|
|
from skyvern.config import settings
|
|
from skyvern.forge.sdk.api.llm.exceptions import (
|
|
DuplicateLLMConfigError,
|
|
InvalidLLMConfigError,
|
|
MissingLLMProviderEnvVarsError,
|
|
NoProviderEnabledError,
|
|
)
|
|
from skyvern.forge.sdk.api.llm.models import LiteLLMParams, LLMConfig, LLMRouterConfig
|
|
|
|
LOG = structlog.get_logger()
|
|
|
|
|
|
class LLMConfigRegistry:
|
|
_configs: dict[str, LLMRouterConfig | LLMConfig] = {}
|
|
|
|
@staticmethod
|
|
def is_router_config(llm_key: str) -> bool:
|
|
return isinstance(LLMConfigRegistry.get_config(llm_key), LLMRouterConfig)
|
|
|
|
@staticmethod
|
|
def validate_config(llm_key: str, config: LLMRouterConfig | LLMConfig) -> None:
|
|
missing_env_vars = config.get_missing_env_vars()
|
|
if missing_env_vars:
|
|
raise MissingLLMProviderEnvVarsError(llm_key, missing_env_vars)
|
|
|
|
@classmethod
|
|
def register_config(cls, llm_key: str, config: LLMRouterConfig | LLMConfig) -> None:
|
|
if llm_key in cls._configs:
|
|
raise DuplicateLLMConfigError(llm_key)
|
|
|
|
cls.validate_config(llm_key, config)
|
|
|
|
LOG.debug("Registering LLM config", llm_key=llm_key)
|
|
cls._configs[llm_key] = config
|
|
|
|
@classmethod
|
|
def get_config(cls, llm_key: str) -> LLMRouterConfig | LLMConfig:
|
|
if llm_key not in cls._configs:
|
|
raise InvalidLLMConfigError(llm_key)
|
|
|
|
return cls._configs[llm_key]
|
|
|
|
|
|
# if none of the LLM providers are enabled, raise an error
|
|
if not any(
|
|
[
|
|
settings.ENABLE_OPENAI,
|
|
settings.ENABLE_ANTHROPIC,
|
|
settings.ENABLE_AZURE,
|
|
settings.ENABLE_AZURE_GPT4O_MINI,
|
|
settings.ENABLE_BEDROCK,
|
|
settings.ENABLE_GEMINI,
|
|
]
|
|
):
|
|
raise NoProviderEnabledError()
|
|
|
|
|
|
if settings.ENABLE_OPENAI:
|
|
LLMConfigRegistry.register_config(
|
|
"OPENAI_GPT4_TURBO",
|
|
LLMConfig(
|
|
"gpt-4-turbo",
|
|
["OPENAI_API_KEY"],
|
|
supports_vision=False,
|
|
add_assistant_prefix=False,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"OPENAI_GPT4V",
|
|
LLMConfig(
|
|
"gpt-4-turbo",
|
|
["OPENAI_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"OPENAI_GPT4O",
|
|
LLMConfig(
|
|
"gpt-4o", ["OPENAI_API_KEY"], supports_vision=True, add_assistant_prefix=False, max_output_tokens=16384
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"OPENAI_GPT4O_MINI",
|
|
LLMConfig(
|
|
"gpt-4o-mini",
|
|
["OPENAI_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
max_output_tokens=16384,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"OPENAI_GPT-4O-2024-08-06",
|
|
LLMConfig(
|
|
"gpt-4o-2024-08-06",
|
|
["OPENAI_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
max_output_tokens=16384,
|
|
),
|
|
)
|
|
|
|
|
|
if settings.ENABLE_ANTHROPIC:
|
|
LLMConfigRegistry.register_config(
|
|
"ANTHROPIC_CLAUDE3",
|
|
LLMConfig(
|
|
"anthropic/claude-3-sonnet-20240229",
|
|
["ANTHROPIC_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"ANTHROPIC_CLAUDE3_OPUS",
|
|
LLMConfig(
|
|
"anthropic/claude-3-opus-20240229",
|
|
["ANTHROPIC_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"ANTHROPIC_CLAUDE3_SONNET",
|
|
LLMConfig(
|
|
"anthropic/claude-3-sonnet-20240229",
|
|
["ANTHROPIC_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"ANTHROPIC_CLAUDE3_HAIKU",
|
|
LLMConfig(
|
|
"anthropic/claude-3-haiku-20240307",
|
|
["ANTHROPIC_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"ANTHROPIC_CLAUDE3.5_SONNET",
|
|
LLMConfig(
|
|
"anthropic/claude-3-5-sonnet-latest",
|
|
["ANTHROPIC_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
max_output_tokens=8192,
|
|
),
|
|
)
|
|
|
|
if settings.ENABLE_BEDROCK:
|
|
# Supported through AWS IAM authentication
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3_OPUS",
|
|
LLMConfig(
|
|
"bedrock/anthropic.claude-3-opus-20240229-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3_SONNET",
|
|
LLMConfig(
|
|
"bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3_HAIKU",
|
|
LLMConfig(
|
|
"bedrock/anthropic.claude-3-haiku-20240307-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3.5_SONNET",
|
|
LLMConfig(
|
|
"bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3.5_SONNET_INFERENCE_PROFILE",
|
|
LLMConfig(
|
|
"bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_ANTHROPIC_CLAUDE3.5_SONNET_V1",
|
|
LLMConfig(
|
|
"bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_AMAZON_NOVA_PRO",
|
|
LLMConfig(
|
|
"bedrock/us.amazon.nova-pro-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"BEDROCK_AMAZON_NOVA_LITE",
|
|
LLMConfig(
|
|
"bedrock/us.amazon.nova-lite-v1:0",
|
|
["AWS_REGION"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=True,
|
|
),
|
|
)
|
|
|
|
|
|
if settings.ENABLE_AZURE:
|
|
LLMConfigRegistry.register_config(
|
|
"AZURE_OPENAI",
|
|
LLMConfig(
|
|
f"azure/{settings.AZURE_DEPLOYMENT}",
|
|
[
|
|
"AZURE_DEPLOYMENT",
|
|
"AZURE_API_KEY",
|
|
"AZURE_API_BASE",
|
|
"AZURE_API_VERSION",
|
|
],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
),
|
|
)
|
|
|
|
if settings.ENABLE_AZURE_GPT4O_MINI:
|
|
LLMConfigRegistry.register_config(
|
|
"AZURE_OPENAI_GPT4O_MINI",
|
|
LLMConfig(
|
|
f"azure/{settings.AZURE_GPT4O_MINI_DEPLOYMENT}",
|
|
[
|
|
"AZURE_GPT4O_MINI_DEPLOYMENT",
|
|
"AZURE_GPT4O_MINI_API_KEY",
|
|
"AZURE_GPT4O_MINI_API_BASE",
|
|
"AZURE_GPT4O_MINI_API_VERSION",
|
|
],
|
|
litellm_params=LiteLLMParams(
|
|
api_base=settings.AZURE_GPT4O_MINI_API_BASE,
|
|
api_key=settings.AZURE_GPT4O_MINI_API_KEY,
|
|
api_version=settings.AZURE_GPT4O_MINI_API_VERSION,
|
|
model_info={"model_name": "azure/gpt-4o-mini"},
|
|
),
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
),
|
|
)
|
|
|
|
if settings.ENABLE_GEMINI:
|
|
LLMConfigRegistry.register_config(
|
|
"GEMINI_PRO",
|
|
LLMConfig(
|
|
"gemini/gemini-1.5-pro",
|
|
["GEMINI_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
max_output_tokens=8192,
|
|
),
|
|
)
|
|
LLMConfigRegistry.register_config(
|
|
"GEMINI_FLASH",
|
|
LLMConfig(
|
|
"gemini/gemini-1.5-flash",
|
|
["GEMINI_API_KEY"],
|
|
supports_vision=True,
|
|
add_assistant_prefix=False,
|
|
max_output_tokens=8192,
|
|
),
|
|
)
|