Fix glm4moe bug (#15088)

This commit is contained in:
Juk Armstrong 2025-08-05 13:56:44 +01:00 committed by GitHub
parent 22f060c9c4
commit c81de6e107
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -13800,10 +13800,6 @@ struct llm_build_glm4_moe : public llm_graph_context {
LLM_FFN_SILU, LLM_FFN_PAR, il);
cb(cur, "ffn_out", il);
} else {
// MoE layer with shared experts
const int64_t n_expert = hparams.n_expert;
const int64_t n_expert_used = hparams.n_expert_used;
// Process routed experts using existing MoE infrastructure
ggml_tensor * routed_out = build_moe_ffn(cur,
model.layers[il].ffn_gate_inp,