completion : fix prompt cache for recurrent models (#19045)

This commit is contained in:
Georgi Gerganov 2026-01-25 09:12:50 +02:00 committed by GitHub
parent 1243f93a2d
commit 080b161995
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 48 additions and 41 deletions

View file

@ -2559,6 +2559,7 @@ size_t llama_context::state_write_data(llama_io_write_i & io) {
}
}
// [TAG_CONTEXT_STATE_LOGITS]
// write logits
{
LLAMA_LOG_DEBUG("%s: - writing logits\n", __func__);