From 753bac31746c239751f84bf7f7afee0234391030 Mon Sep 17 00:00:00 2001 From: Kerem Yilmaz Date: Thu, 23 May 2024 18:49:49 -0700 Subject: [PATCH] Set 5 min timeout for LLM calls (#358) --- skyvern/config.py | 1 + skyvern/forge/sdk/api/llm/api_handler_factory.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/skyvern/config.py b/skyvern/config.py index c5a835c2..7e79466b 100644 --- a/skyvern/config.py +++ b/skyvern/config.py @@ -64,6 +64,7 @@ class Settings(BaseSettings): # ACTIVE LLM PROVIDER LLM_KEY: str = "OPENAI_GPT4O" # COMMON + LLM_CONFIG_TIMEOUT: int = 300 LLM_CONFIG_MAX_TOKENS: int = 4096 LLM_CONFIG_TEMPERATURE: float = 0 # LLM PROVIDER SPECIFIC diff --git a/skyvern/forge/sdk/api/llm/api_handler_factory.py b/skyvern/forge/sdk/api/llm/api_handler_factory.py index 0585988d..0c5f68b1 100644 --- a/skyvern/forge/sdk/api/llm/api_handler_factory.py +++ b/skyvern/forge/sdk/api/llm/api_handler_factory.py @@ -190,8 +190,10 @@ class LLMAPIHandlerFactory: response = await litellm.acompletion( model=llm_config.model_name, messages=messages, + timeout=SettingsManager.get_settings().LLM_CONFIG_TIMEOUT, **active_parameters, ) + LOG.info("LLM API call successful", llm_key=llm_key, model=llm_config.model_name) except openai.OpenAIError as e: raise LLMProviderError(llm_key) from e except Exception as e: