From 6529326c593bf642488b86d9d117c946fd38f6fc Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Fri, 30 May 2025 15:59:18 +0800 Subject: [PATCH] allow temperatures up to 1.0 when function calling --- koboldcpp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/koboldcpp.py b/koboldcpp.py index 44e8e0f25..6a9585a3e 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -2269,8 +2269,8 @@ ws ::= | " " | "\n" [ \t]{0,20} if used_tool_json: toolparamjson = None toolname = None - # Set temperature low automatically if function calling - genparams["temperature"] = 0.1 + # Set temperature lower automatically if function calling, cannot exceed 0.5 + genparams["temperature"] = (1.0 if genparams.get("temperature", 0.5) > 1.0 else genparams.get("temperature", 0.5)) genparams["using_openai_tools"] = True # Set grammar to llamacpp example grammar to force json response (see https://github.com/ggerganov/llama.cpp/blob/master/grammars/json_arr.gbnf) genparams["grammar"] = jsongrammar