mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
parent
d2ce9b879b
commit
755e3e49b4
@ -13,7 +13,7 @@ try:
|
|||||||
from flash_attn import flash_attn_func, flash_attn_varlen_func # type: ignore
|
from flash_attn import flash_attn_func, flash_attn_varlen_func # type: ignore
|
||||||
from flash_attn.bert_padding import pad_input, unpad_input # type: ignore
|
from flash_attn.bert_padding import pad_input, unpad_input # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ImportError("Please install FlashAttention from https://github.com/Dao-AILab/flash-attention")
|
print("FlashAttention-2 is not installed, ignore this if you are not using FlashAttention.")
|
||||||
|
|
||||||
|
|
||||||
logger = logging.get_logger(__name__)
|
logger = logging.get_logger(__name__)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user