Former-commit-id: c90223639790152fadd100cedb5f63d375d9c195
This commit is contained in:
hiyouga 2023-09-28 00:53:29 +08:00
parent d2ce9b879b
commit 755e3e49b4

View File

@ -13,7 +13,7 @@ try:
from flash_attn import flash_attn_func, flash_attn_varlen_func # type: ignore from flash_attn import flash_attn_func, flash_attn_varlen_func # type: ignore
from flash_attn.bert_padding import pad_input, unpad_input # type: ignore from flash_attn.bert_padding import pad_input, unpad_input # type: ignore
except ImportError: except ImportError:
raise ImportError("Please install FlashAttention from https://github.com/Dao-AILab/flash-attention") print("FlashAttention-2 is not installed, ignore this if you are not using FlashAttention.")
logger = logging.get_logger(__name__) logger = logging.get_logger(__name__)