fix: max tokens max is 8192 now

This commit is contained in:
LUIS NOVO 2025-10-18 13:21:53 -03:00
parent 059ee29e18
commit 8b5daa86bc
3 changed files with 70 additions and 53 deletions

View file

@ -42,8 +42,7 @@ def call_model_with_messages(state: ThreadState, config: RunnableConfig) -> dict
str(payload),
model_id,
"chat",
max_tokens=10000,
)
max_tokens=8192
)
finally:
new_loop.close()
@ -64,7 +63,7 @@ def call_model_with_messages(state: ThreadState, config: RunnableConfig) -> dict
str(payload),
model_id,
"chat",
max_tokens=10000,
max_tokens=8192,
)
)