mirror of
https://github.com/kvcache-ai/ktransformers.git
synced 2025-09-06 12:40:02 +00:00
fix
This commit is contained in:
parent
055680e26c
commit
edd9efa49e
1 changed files with 3 additions and 3 deletions
|
@ -28,6 +28,9 @@ echo "Installing python dependencies from requirements.txt"
|
|||
pip install -r requirements-local_chat.txt
|
||||
pip install -r ktransformers/server/requirements.txt
|
||||
|
||||
echo "Installing ktransformers"
|
||||
KTRANSFORMERS_FORCE_BUILD=TRUE pip install -v . --no-build-isolation
|
||||
|
||||
# XPU-specific fix for triton
|
||||
if [[ "$DEV_BACKEND" == "xpu" ]]; then
|
||||
echo "Replacing triton for XPU backend"
|
||||
|
@ -35,9 +38,6 @@ if [[ "$DEV_BACKEND" == "xpu" ]]; then
|
|||
pip install pytorch-triton-xpu==3.3.0 --extra-index-url https://download.pytorch.org/whl/xpu
|
||||
fi
|
||||
|
||||
echo "Installing ktransformers"
|
||||
KTRANSFORMERS_FORCE_BUILD=TRUE pip install -v . --no-build-isolation
|
||||
|
||||
if [[ "$DEV_BACKEND" == "cuda" ]]; then
|
||||
echo "Installing custom_flashinfer for CUDA backend"
|
||||
pip install third_party/custom_flashinfer/
|
||||
|
|
Loading…
Add table
Reference in a new issue