Merge branch 'upstream' into concedo_experimental

# Conflicts:
#	.devops/llama-cli-intel.Dockerfile
#	.devops/llama-server-intel.Dockerfile
#	README.md
#	ggml/src/CMakeLists.txt
#	tests/test-chat-template.cpp
This commit is contained in:
Concedo 2024-07-24 21:57:50 +08:00
commit cca2fa9a6c
4 changed files with 11 additions and 2 deletions

View file

@ -125,6 +125,7 @@ static std::string chat_add_and_format(struct llama_model * model, std::vector<l
auto formatted = llama_chat_format_single(
model, g_params->chat_template, chat_msgs, new_msg, role == "user");
chat_msgs.push_back({role, content});
LOG("formatted: %s\n", formatted.c_str());
return formatted;
}