fix md typo, fix code style, and update setup value error message

This commit is contained in:
rnwang04 2025-05-15 09:14:59 +00:00
parent 2d3aaef8b6
commit 2f6e14a54b
5 changed files with 6 additions and 6 deletions

View file

@ -23,7 +23,7 @@ Our vision for KTransformers is to serve as a flexible platform for experimentin
<h2 id="Updates">🔥 Updates</h2>
**May 14, 2025**: Support Intel Arc GPU ([Tutorial](./doc/en/xpu.md)).
* **May 14, 2025**: Support Intel Arc GPU ([Tutorial](./doc/en/xpu.md)).
* **Apr 29, 2025**: Support AMX-Int8、 AMX-BF16 and Qwen3MoE ([Tutorial](./doc/en/AMX.md))

View file

@ -21,6 +21,7 @@ interface, RESTful APIs compliant with OpenAI and Ollama, and even a simplified
Our vision for KTransformers is to serve as a flexible platform for experimenting with innovative LLM inference optimizations. Please let us know if you need any other features.
<h2 id="Updates">🔥 Updates</h2>
* **May 14, 2025**: Support Intel Arc GPU ([Tutorial](./en/xpu.md)).
* **Apr 9, 2025**: Experimental support for LLaMA 4 models ([Tutorial](./en/llama4.md)).
* **Apr 2, 2025**: Support Multi-concurrency. ([Tutorial](./en/balance-serve.md)).

View file

@ -41,7 +41,6 @@ Install PyTorch with XPU backend support and [IPEX-LLM](https://github.com/intel
pip install --pre --upgrade ipex-llm[xpu_2.6] --extra-index-url https://download.pytorch.org/whl/xpu
pip uninstall torch torchvision torchaudio
pip install torch==2.7+xpu torchvision torchaudio --index-url https://download.pytorch.org/whl/test/xpu # install torch2.7
pip install packaging ninja cpufeature numpy
pip uninstall intel-opencl-rt dpcpp-cpp-rt
```

View file

@ -229,7 +229,7 @@ class VersionInfo:
elif torch.xpu.is_available():
backend_version = f"xpu"
else:
raise ValueError("Unsupported backend: CUDA_HOME MUSA_HOME ROCM_HOME all not set.")
raise ValueError("Unsupported backend: CUDA_HOME MUSA_HOME ROCM_HOME all not set and XPU is not available.")
package_version = f"{flash_version}+{backend_version}torch{torch_version}{cpu_instruct}"
if full_version:
return package_version
@ -501,7 +501,7 @@ class CMakeBuild(BuildExtension):
elif KTRANSFORMERS_BUILD_XPU:
cmake_args += ["-DKTRANSFORMERS_USE_XPU=ON", "-DKTRANSFORMERS_USE_CUDA=OFF"]
else:
raise ValueError("Unsupported backend: CUDA_HOME, MUSA_HOME, and ROCM_HOME are not set.")
raise ValueError("Unsupported backend: CUDA_HOME, MUSA_HOME, and ROCM_HOME are not set and XPU is not available.")
cmake_args = get_cmake_abi_args(cmake_args)
# log cmake_args
@ -628,7 +628,7 @@ elif MUSA_HOME is not None:
elif torch.xpu.is_available(): #XPUExtension is not available now.
ops_module = None
else:
raise ValueError("Unsupported backend: CUDA_HOME and MUSA_HOME are not set.")
raise ValueError("Unsupported backend: CUDA_HOME ROCM_HOME MUSA_HOME are not set and XPU is not available.")
if not torch.xpu.is_available():
ext_modules = [