fix local_chat.py chunk_size not effect experts

This commit is contained in:
qiyuxinlin 2025-05-23 02:35:01 +00:00
parent adc0906967
commit 71a5fc5770
2 changed files with 3 additions and 2 deletions

View file

@ -70,6 +70,7 @@ def local_chat(
torch.set_grad_enabled(False)
Config().cpu_infer = cpu_infer
Config().chunk_size = chunk_size
if torch.xpu.is_available():
use_cuda_graph = False