From ffb86c66e3ac06a5eb57a3f4ebbc219d959142c4 Mon Sep 17 00:00:00 2001 From: liam Date: Wed, 26 Feb 2025 15:04:25 +0800 Subject: [PATCH] :zap: fix experts torch --- ktransformers/operators/experts.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ktransformers/operators/experts.py b/ktransformers/operators/experts.py index 10e3a66..88960c7 100644 --- a/ktransformers/operators/experts.py +++ b/ktransformers/operators/experts.py @@ -459,9 +459,9 @@ class KExpertsTorch(KExpertsBase): self.up[i] = w["up"][i, ...].to(device=device, dtype=self.dtype) self.down[i] = w["down"][i, ...].to(device=device, dtype=self.dtype) - self.up = torch.cat(self.up, dim=0) - self.gate = torch.cat(self.gate, dim=0) - self.down = torch.cat(self.down, dim=0) + self.up = torch.stack(self.up, dim=0) + self.gate = torch.stack(self.gate, dim=0) + self.down = torch.stack(self.down, dim=0) return def unload(self):