mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-14 23:58:11 +08:00
_is_bf16_available judgment supports npu
Former-commit-id: 50a1e892a1005b4cdd82dca1005f71db08ed89a2
This commit is contained in:
parent
2e257d6af0
commit
57d4c4a4f8
@ -37,7 +37,7 @@ from .logging import get_logger
|
||||
|
||||
_is_fp16_available = is_torch_npu_available() or is_torch_cuda_available()
|
||||
try:
|
||||
_is_bf16_available = is_torch_bf16_gpu_available()
|
||||
_is_bf16_available = is_torch_bf16_gpu_available() or (is_torch_npu_available() and torch.npu.is_bf16_supported())
|
||||
except Exception:
|
||||
_is_bf16_available = False
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user