glm4 clamp 8 on vk

This commit is contained in:
Concedo 2025-05-13 17:03:24 +08:00
parent 48f86bbbc7
commit 35284bcdb5

View file

@ -1942,8 +1942,8 @@ ModelLoadResult gpttype_load_model(const load_model_inputs inputs, FileFormat in
{ {
printf("GLM-4 is broken on larger batch sizes in Vulkan. Clamp ignored in debug.\n"); printf("GLM-4 is broken on larger batch sizes in Vulkan. Clamp ignored in debug.\n");
} else { } else {
printf("GLM-4 is broken on larger batch sizes in Vulkan. Clamping ubatch size to 16.\n"); printf("GLM-4 is broken on larger batch sizes in Vulkan. Clamping ubatch size to 8.\n");
kcpp_data->n_ubatch = 16; kcpp_data->n_ubatch = 8;
} }
} }
#endif #endif