mirror of
https://github.com/Skyvern-AI/skyvern.git
synced 2025-09-15 01:39:41 +00:00
return dummy llm api handler which will raise exception if called (#1954)
This commit is contained in:
parent
6cc595d04c
commit
205e2b35c0
3 changed files with 19 additions and 3 deletions
|
@ -18,7 +18,7 @@ from skyvern.forge.sdk.api.llm.exceptions import (
|
|||
LLMProviderError,
|
||||
LLMProviderErrorRetryableTask,
|
||||
)
|
||||
from skyvern.forge.sdk.api.llm.models import LLMAPIHandler, LLMConfig, LLMRouterConfig
|
||||
from skyvern.forge.sdk.api.llm.models import LLMAPIHandler, LLMConfig, LLMRouterConfig, dummy_llm_api_handler
|
||||
from skyvern.forge.sdk.api.llm.utils import llm_messages_builder, parse_api_response
|
||||
from skyvern.forge.sdk.artifact.models import ArtifactType
|
||||
from skyvern.forge.sdk.core import skyvern_context
|
||||
|
@ -229,7 +229,10 @@ class LLMAPIHandlerFactory:
|
|||
|
||||
@staticmethod
|
||||
def get_llm_api_handler(llm_key: str, base_parameters: dict[str, Any] | None = None) -> LLMAPIHandler:
|
||||
llm_config = LLMConfigRegistry.get_config(llm_key)
|
||||
try:
|
||||
llm_config = LLMConfigRegistry.get_config(llm_key)
|
||||
except InvalidLLMConfigError:
|
||||
return dummy_llm_api_handler
|
||||
|
||||
if LLMConfigRegistry.is_router_config(llm_key):
|
||||
return LLMAPIHandlerFactory.get_llm_api_handler_with_router(llm_key)
|
||||
|
|
|
@ -55,7 +55,7 @@ if not any(
|
|||
]
|
||||
):
|
||||
LOG.warning(
|
||||
"At least one LLM provider must be enabled. Run setup.sh and follow through the LLM provider setup, or "
|
||||
"To run skyvern locally, you need to enable at least one LLM provider. Run setup.sh and follow through the LLM provider setup, or "
|
||||
"update the .env file (check out .env.example to see the required environment variables)."
|
||||
)
|
||||
|
||||
|
|
|
@ -93,3 +93,16 @@ class LLMAPIHandler(Protocol):
|
|||
screenshots: list[bytes] | None = None,
|
||||
parameters: dict[str, Any] | None = None,
|
||||
) -> Awaitable[dict[str, Any]]: ...
|
||||
|
||||
|
||||
async def dummy_llm_api_handler(
|
||||
prompt: str,
|
||||
prompt_name: str,
|
||||
step: Step | None = None,
|
||||
task_v2: TaskV2 | None = None,
|
||||
thought: Thought | None = None,
|
||||
ai_suggestion: AISuggestion | None = None,
|
||||
screenshots: list[bytes] | None = None,
|
||||
parameters: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
raise NotImplementedError("Your LLM provider is not configured. Please configure it in the .env file.")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue