From a31e09969f8423fd9cd93130e00a0376eeffe024 Mon Sep 17 00:00:00 2001 From: SkqLiao Date: Sat, 15 Mar 2025 02:37:08 +0800 Subject: [PATCH] fix typo --- .github/workflows/install.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/install.yml b/.github/workflows/install.yml index fce549c..a58b426 100644 --- a/.github/workflows/install.yml +++ b/.github/workflows/install.yml @@ -61,10 +61,10 @@ jobs: export CUDA_HOME=/usr/local/cuda-12.4 cd ${{ github.workspace }} echo "Running Local Chat 1" - python ktransformers/local_chat-test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/book.txt > log1.txt + python ktransformers/local_chat_test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/book.txt > log1.txt sed -n '/Prompt:,$p' log1.txt echo "Running Local Chat 2" - python ktransformers/local_chat-test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/chinese.txt > log2.txt + python ktransformers/local_chat_test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/chinese.txt > log2.txt sed -n '/Prompt:,$p' log2.txt - run: echo "This job's status is ${{ job.status }}."