mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 04:32:50 +08:00
parent
c60e79c12e
commit
a388af4adc
@ -73,7 +73,7 @@ def get_current_device() -> str:
|
||||
if accelerate.utils.is_xpu_available():
|
||||
return "xpu:{}".format(os.environ.get("LOCAL_RANK", "0"))
|
||||
elif accelerate.utils.is_npu_available() or torch.cuda.is_available():
|
||||
return os.environ.get("LOCAL_RANK", "0")
|
||||
return "cuda:{}".format(os.environ.get("LOCAL_RANK", "0"))
|
||||
else:
|
||||
return "cpu"
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user