Former-commit-id: e131bc03e05ccae3c6ad8bb42ccf2cdcc2cf3cea
This commit is contained in:
hiyouga
2023-09-14 17:56:58 +08:00
parent 3e5555502a
commit 0ca36a0f8d
4 changed files with 4 additions and 3 deletions

View File

@@ -103,7 +103,6 @@ def load_model_and_tokenizer(
elif hasattr(config, "rope_scaling"): # for LLaMA and Falcon models
require_version("transformers>=4.31.0", "RoPE scaling requires transformers>=4.31.0")
if is_trainable:
if model_args.rope_scaling == "dynamic":
assert not model_args.flash_attn, "Flash attention does not support dynamic rope scaling."