mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-16 00:28:10 +08:00
parent
72bbd5bdef
commit
f1d7228a74
@ -73,7 +73,7 @@ def get_current_device() -> str:
|
||||
if accelerate.utils.is_xpu_available():
|
||||
return "xpu:{}".format(os.environ.get("LOCAL_RANK", "0"))
|
||||
elif accelerate.utils.is_npu_available() or torch.cuda.is_available():
|
||||
return os.environ.get("LOCAL_RANK", "0")
|
||||
return "cuda:{}".format(os.environ.get("LOCAL_RANK", "0"))
|
||||
else:
|
||||
return "cpu"
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user