From 57d4c4a4f8a2afd0b927354d81a635a684d6da8b Mon Sep 17 00:00:00 2001 From: Ricardo Date: Fri, 16 Aug 2024 02:58:22 +0000 Subject: [PATCH] _is_bf16_available judgment supports npu Former-commit-id: 50a1e892a1005b4cdd82dca1005f71db08ed89a2 --- src/llamafactory/extras/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llamafactory/extras/misc.py b/src/llamafactory/extras/misc.py index c1395552..9d1cbd0b 100644 --- a/src/llamafactory/extras/misc.py +++ b/src/llamafactory/extras/misc.py @@ -37,7 +37,7 @@ from .logging import get_logger _is_fp16_available = is_torch_npu_available() or is_torch_cuda_available() try: - _is_bf16_available = is_torch_bf16_gpu_available() + _is_bf16_available = is_torch_bf16_gpu_available() or (is_torch_npu_available() and torch.npu.is_bf16_supported()) except Exception: _is_bf16_available = False