mirror of
https://github.com/kvcache-ai/ktransformers.git
synced 2025-09-05 20:19:51 +00:00
fix: use 'cuda:0' by default if torch_device is 'cuda'
This commit is contained in:
parent
ee24eb8dc3
commit
c70b6f4d5b
1 changed files with 1 additions and 0 deletions
|
@ -130,6 +130,7 @@ class KTransformersInterface(TransformersInterface):
|
|||
logger.debug(f"input_ids: {input_ids.shape}")
|
||||
|
||||
device = self.device_map.get("blk.0.self_attn", {}).get("generate_device", "cuda:0")
|
||||
device = "cuda:0" if device == "cuda" else device
|
||||
|
||||
if is_new:
|
||||
self.ever_generated_ids.clear()
|
||||
|
|
Loading…
Add table
Reference in a new issue