mirror of
https://github.com/kvcache-ai/ktransformers.git
synced 2025-09-10 06:14:58 +00:00
support npu
This commit is contained in:
parent
1677e90092
commit
dd0e41b3b8
14 changed files with 1453 additions and 5 deletions
27
setup.py
27
setup.py
|
@ -41,6 +41,13 @@ except ImportError:
|
|||
MUSA_HOME=None
|
||||
KTRANSFORMERS_BUILD_XPU = torch.xpu.is_available()
|
||||
|
||||
|
||||
try:
|
||||
import torch_npu
|
||||
KTRANSFORMERS_BUILD_NPU = torch_npu.npu.is_available()
|
||||
except ModuleNotFoundError | ImportError as e:
|
||||
KTRANSFORMERS_BUILD_NPU = False
|
||||
|
||||
# 检测 DEV_BACKEND 环境变量
|
||||
dev_backend = os.environ.get("DEV_BACKEND", "").lower()
|
||||
if dev_backend == "xpu":
|
||||
|
@ -237,6 +244,8 @@ class VersionInfo:
|
|||
backend_version = f"rocm{self.get_rocm_bare_metal_version(ROCM_HOME)}"
|
||||
elif torch.xpu.is_available():
|
||||
backend_version = f"xpu"
|
||||
elif KTRANSFORMERS_BUILD_NPU:
|
||||
backend_version = f"npu{torch_npu.__version__}"
|
||||
else:
|
||||
raise ValueError("Unsupported backend: CUDA_HOME MUSA_HOME ROCM_HOME all not set and XPU is not available.")
|
||||
package_version = f"{flash_version}+{backend_version}torch{torch_version}{cpu_instruct}"
|
||||
|
@ -509,6 +518,8 @@ class CMakeBuild(BuildExtension):
|
|||
cmake_args += ["-DKTRANSFORMERS_USE_ROCM=ON"]
|
||||
elif KTRANSFORMERS_BUILD_XPU:
|
||||
cmake_args += ["-DKTRANSFORMERS_USE_XPU=ON", "-DKTRANSFORMERS_USE_CUDA=OFF"]
|
||||
elif KTRANSFORMERS_BUILD_NPU:
|
||||
cmake_args += ["-DKTRANSFORMERS_USE_NPU=ON", "-DKTRANSFORMERS_USE_CUDA=OFF"]
|
||||
else:
|
||||
raise ValueError("Unsupported backend: CUDA_HOME, MUSA_HOME, and ROCM_HOME are not set and XPU is not available.")
|
||||
|
||||
|
@ -636,10 +647,12 @@ elif MUSA_HOME is not None:
|
|||
)
|
||||
elif torch.xpu.is_available(): #XPUExtension is not available now.
|
||||
ops_module = None
|
||||
elif KTRANSFORMERS_BUILD_NPU:
|
||||
pass
|
||||
else:
|
||||
raise ValueError("Unsupported backend: CUDA_HOME ROCM_HOME MUSA_HOME are not set and XPU is not available.")
|
||||
|
||||
if not torch.xpu.is_available():
|
||||
if not torch.xpu.is_available() and not KTRANSFORMERS_BUILD_NPU:
|
||||
ext_modules = [
|
||||
CMakeExtension("cpuinfer_ext", os.fspath(Path("").resolve() / "csrc" / "ktransformers_ext")),
|
||||
ops_module,
|
||||
|
@ -660,10 +673,20 @@ if not torch.xpu.is_available():
|
|||
ext_modules.append(
|
||||
CMakeExtension("balance_serve", os.fspath(Path("").resolve()/ "csrc"/ "balance_serve"))
|
||||
)
|
||||
else:
|
||||
elif torch.xpu.is_available():
|
||||
ext_modules = [
|
||||
CMakeExtension("cpuinfer_ext", os.fspath(Path("").resolve() / "csrc" / "ktransformers_ext")),
|
||||
]
|
||||
elif KTRANSFORMERS_BUILD_NPU:
|
||||
ext_modules = [
|
||||
CMakeExtension("cpuinfer_ext", os.fspath(Path("").resolve() / "csrc" / "ktransformers_ext")),
|
||||
]
|
||||
if with_balance:
|
||||
print("using balance_serve")
|
||||
ext_modules.append(
|
||||
CMakeExtension("balance_serve", os.fspath(Path("").resolve()/ "csrc"/ "balance_serve"))
|
||||
)
|
||||
|
||||
|
||||
setup(
|
||||
name=VersionInfo.PACKAGE_NAME,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue