mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-17 12:20:37 +08:00
[misc] lint (#9593)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
@@ -17,13 +17,16 @@ import os
|
||||
import pytest
|
||||
from transformers.utils import is_flash_attn_2_available
|
||||
|
||||
|
||||
# Compatible with Transformers v4 and Transformers v5
|
||||
try:
|
||||
from transformers.utils import is_torch_sdpa_available
|
||||
except ImportError:
|
||||
|
||||
def is_torch_sdpa_available():
|
||||
return True
|
||||
|
||||
|
||||
from llamafactory.extras.packages import is_transformers_version_greater_than
|
||||
from llamafactory.train.test_utils import load_infer_model
|
||||
|
||||
@@ -36,7 +39,7 @@ INFER_ARGS = {
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.runs_on(["cpu","npu"])
|
||||
@pytest.mark.runs_on(["cpu", "npu"])
|
||||
@pytest.mark.xfail(is_transformers_version_greater_than("4.48"), reason="Attention refactor.")
|
||||
def test_attention():
|
||||
attention_available = ["disabled"]
|
||||
|
||||
Reference in New Issue
Block a user