From cf34240e47f2de6139c64b38deb962afcd7c8456 Mon Sep 17 00:00:00 2001 From: MasterX Date: Sat, 17 Aug 2024 17:55:03 +0200 Subject: [PATCH] Update models.py Add OLLAMA_BASE_URL and LM_STUDIO_BASE_URL to the models.py file so both instances can be easily changed when using an installation on another or another docker container. --- models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/models.py b/models.py index 33af468b3..4b4b06482 100644 --- a/models.py +++ b/models.py @@ -22,7 +22,7 @@ def get_api_key(service): # Ollama models -def get_ollama_chat(model_name:str, temperature=DEFAULT_TEMPERATURE, base_url="http://localhost:11434"): +def get_ollama_chat(model_name:str, temperature=DEFAULT_TEMPERATURE, base_url=os.getenv("OLLAMA_BASE_URL")): return Ollama(model=model_name,temperature=temperature, base_url=base_url) def get_ollama_embedding(model_name:str, temperature=DEFAULT_TEMPERATURE): @@ -34,10 +34,10 @@ def get_huggingface_embedding(model_name:str): return HuggingFaceEmbeddings(model_name=model_name) # LM Studio and other OpenAI compatible interfaces -def get_lmstudio_chat(model_name:str, base_url="http://localhost:1234/v1", temperature=DEFAULT_TEMPERATURE): +def get_lmstudio_chat(model_name:str, base_url=os.getenv("LM_STUDIO_BASE_URL"), temperature=DEFAULT_TEMPERATURE): return ChatOpenAI(model_name=model_name, base_url=base_url, temperature=temperature, api_key="none") # type: ignore -def get_lmstudio_embedding(model_name:str, base_url="http://localhost:1234/v1"): +def get_lmstudio_embedding(model_name:str, base_url=os.getenv("LM_STUDIO_BASE_URL")): return OpenAIEmbeddings(model_name=model_name, base_url=base_url) # type: ignore # Anthropic models