From b2f806a57295697b57471fd3c3300ac281313220 Mon Sep 17 00:00:00 2001 From: Zonghang Li Date: Sat, 4 Jan 2025 11:08:35 +0400 Subject: [PATCH] increase default max nodes from 8192 to 16384 --- src/llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llama.cpp b/src/llama.cpp index 7884a5b2..27e2e88c 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -4599,7 +4599,7 @@ namespace GGUFMeta { using llama_buf_map = std::unordered_map; static size_t llama_model_max_nodes(const llama_model & model) { - return std::max(8192, model.tensors_by_name.size()*5); + return std::max(16384, model.tensors_by_name.size()*5); } static int get_layer_id(const ggml_tensor * tensor) {