diff --git a/open_notebook/graphs/chat.py b/open_notebook/graphs/chat.py index c87af21..0c0e517 100644 --- a/open_notebook/graphs/chat.py +++ b/open_notebook/graphs/chat.py @@ -26,7 +26,7 @@ def call_model_with_messages(state: ThreadState, config: RunnableConfig) -> dict system_prompt = Prompter(prompt_template="chat").render(data=state) payload = [system_prompt] + state.get("messages", []) model = provision_model(str(payload), config, "chat") - ai_message = model.invoke(payload, []) + ai_message = model.invoke(payload) return {"messages": ai_message} diff --git a/open_notebook/graphs/utils.py b/open_notebook/graphs/utils.py index 9429bca..3f84f3b 100644 --- a/open_notebook/graphs/utils.py +++ b/open_notebook/graphs/utils.py @@ -22,7 +22,9 @@ def provision_model(content, config, default_type): ) return model_manager.get_default_model("large_context").to_langchain() elif config.get("configurable", {}).get("model_id"): - return model_manager.get_model(config.get("configurable", {}).get("model_id")) + return model_manager.get_model( + config.get("configurable", {}).get("model_id") + ).to_langchain() else: return model_manager.get_default_model(default_type).to_langchain()