mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-23 06:12:50 +08:00
Update test_attention.py
Former-commit-id: a9b3d91952dd5a51ff97fbb40a2dd88885d380b8
This commit is contained in:
parent
9aa640f27b
commit
a9f10a9abd
@ -29,7 +29,7 @@ INFER_ARGS = {
|
||||
|
||||
|
||||
def test_attention():
|
||||
attention_available = ["off"]
|
||||
attention_available = ["disabled"]
|
||||
if is_torch_sdpa_available():
|
||||
attention_available.append("sdpa")
|
||||
|
||||
@ -37,7 +37,7 @@ def test_attention():
|
||||
attention_available.append("fa2")
|
||||
|
||||
llama_attention_classes = {
|
||||
"off": "LlamaAttention",
|
||||
"disabled": "LlamaAttention",
|
||||
"sdpa": "LlamaSdpaAttention",
|
||||
"fa2": "LlamaFlashAttention2",
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user