mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-23 23:30:36 +08:00
[test] add allreduce test on npu (#9619)
Co-authored-by: frozenleaves <frozen@Mac.local>
This commit is contained in:
@@ -39,7 +39,7 @@ INFER_ARGS = {
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.runs_on(["cpu", "npu"])
|
||||
@pytest.mark.runs_on(["cpu", "npu", "cuda"])
|
||||
@pytest.mark.xfail(is_transformers_version_greater_than("4.48"), reason="Attention refactor.")
|
||||
def test_attention():
|
||||
attention_available = ["disabled"]
|
||||
|
||||
Reference in New Issue
Block a user