From a9f10a9abd8f5fa54e7ac92f7502658777bb6e46 Mon Sep 17 00:00:00 2001 From: hoshi-hiyouga Date: Mon, 24 Jun 2024 21:35:34 +0800 Subject: [PATCH] Update test_attention.py Former-commit-id: a9b3d91952dd5a51ff97fbb40a2dd88885d380b8 --- tests/model/model_utils/test_attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/model/model_utils/test_attention.py b/tests/model/model_utils/test_attention.py index 97ac9dcc..4cae3d7c 100644 --- a/tests/model/model_utils/test_attention.py +++ b/tests/model/model_utils/test_attention.py @@ -29,7 +29,7 @@ INFER_ARGS = { def test_attention(): - attention_available = ["off"] + attention_available = ["disabled"] if is_torch_sdpa_available(): attention_available.append("sdpa") @@ -37,7 +37,7 @@ def test_attention(): attention_available.append("fa2") llama_attention_classes = { - "off": "LlamaAttention", + "disabled": "LlamaAttention", "sdpa": "LlamaSdpaAttention", "fa2": "LlamaFlashAttention2", }