Alone in the darkness

They're coming for you
I know they will try to catch me too
Alone in the darkness
They're calling for you
There's nowhere to run for cover
This commit is contained in:
Concedo 2024-10-24 22:29:20 +08:00
commit 94a5a27b85
44 changed files with 6803 additions and 2143 deletions

View file

@ -1489,7 +1489,7 @@ static bool kcpp_eval_image(llama_context * ctx_llama, float * img_embd, int num
if (n_eval > n_batch) {
n_eval = n_batch;
}
llama_batch batch = {int32_t(n_eval), nullptr, (img_embd+i*n_embd), nullptr, nullptr, nullptr, nullptr, *n_past, 1, 0, };
llama_batch batch = {int32_t(n_eval), nullptr, (img_embd+i*n_embd), nullptr, nullptr, nullptr, nullptr,};
if (llama_decode(ctx_llama, batch)) {
fprintf(stderr, "\n%s : failed to eval image\n", __func__);
return false;
@ -2004,7 +2004,7 @@ ModelLoadResult gpttype_load_model(const load_model_inputs inputs, FileFormat in
//determine mem per token
std::vector<int> tmp = {1, 2, 3, 4};
llama_kv_cache_clear(llama_ctx_v4);
auto er = llama_decode(llama_ctx_v4, llama_batch_get_one(tmp.data(), tmp.size(), 0, 0));
auto er = llama_decode(llama_ctx_v4, llama_batch_get_one(tmp.data(), tmp.size()));
if(er!=0)
{
printf("\nLLAMA EVAL returned nonzero: %d\n",er);
@ -3061,7 +3061,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
}
else if(file_format == FileFormat::GGUF_GENERIC)
{
evalres = (llama_decode(llama_ctx_v4, llama_batch_get_one(embd.data(), embdsize, n_past, 0))==0);
evalres = (llama_decode(llama_ctx_v4, llama_batch_get_one(embd.data(), embdsize))==0);
}
else if(file_format==FileFormat::RWKV_1 || file_format==FileFormat::RWKV_2)
{
@ -3432,7 +3432,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
if(i>0 && sepsize>0)
{
//add a separator between each image
auto evr = llama_decode(llama_ctx_v4, llama_batch_get_one(llava_sep.data(), sepsize, n_past, 0));
auto evr = llama_decode(llama_ctx_v4, llama_batch_get_one(llava_sep.data(), sepsize));
if(evr!=0)
{
printf("\nError when appending llava separator: %d\n",evr);