From a9312387bc39a7677b0e4ddd3eb5bd25a4315dff Mon Sep 17 00:00:00 2001 From: Ricardo Date: Fri, 16 Aug 2024 02:58:22 +0000 Subject: [PATCH] _is_bf16_available judgment supports npu Former-commit-id: 384ab8db84eef7d1f6a7643c15c565a7d4906a5c --- src/llamafactory/extras/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llamafactory/extras/misc.py b/src/llamafactory/extras/misc.py index c1395552..9d1cbd0b 100644 --- a/src/llamafactory/extras/misc.py +++ b/src/llamafactory/extras/misc.py @@ -37,7 +37,7 @@ from .logging import get_logger _is_fp16_available = is_torch_npu_available() or is_torch_cuda_available() try: - _is_bf16_available = is_torch_bf16_gpu_available() + _is_bf16_available = is_torch_bf16_gpu_available() or (is_torch_npu_available() and torch.npu.is_bf16_supported()) except Exception: _is_bf16_available = False