simplify model provisioning

This commit is contained in:
LUIS NOVO 2024-11-01 21:32:40 -03:00
parent fcd883f393
commit 15048b0839
3 changed files with 58 additions and 14 deletions

View file

@ -24,10 +24,9 @@ class ThreadState(TypedDict):
def call_model_with_messages(state: ThreadState, config: RunnableConfig) -> dict:
system_prompt = Prompter(prompt_template="chat").render(data=state)
model = provision_model(
str(system_prompt) + str(state.get("messages", [])), config, "chat"
)
ai_message = model.invoke([system_prompt] + state.get("messages", []))
payload = [system_prompt] + state.get("messages", [])
model = provision_model(str(payload), config, "chat")
ai_message = model.invoke(payload, [])
return {"messages": ai_message}

View file

@ -39,9 +39,8 @@ def run_pattern(
system_prompt = Prompter(prompt_template=pattern_name, parser=parser).render(
data=state
)
chain = provision_model(
str(system_prompt) + str(messages), config, "transformation"
)
payload = [system_prompt] + messages
chain = provision_model(str(payload), config, "transformation")
if parser:
chain = chain | parser
@ -53,10 +52,6 @@ def run_pattern(
llm=output_fix_model,
)
# todo: precisa deste if?
if len(messages) > 0:
response = chain.invoke([system_prompt] + messages)
else:
response = chain.invoke(system_prompt)
response = chain.invoke(payload)
return response