This commit is contained in:
hiyouga
2023-09-14 17:56:58 +08:00
parent 3202985087
commit 8857e45602
4 changed files with 4 additions and 3 deletions

View File

@@ -103,7 +103,6 @@ def load_model_and_tokenizer(
elif hasattr(config, "rope_scaling"): # for LLaMA and Falcon models
require_version("transformers>=4.31.0", "RoPE scaling requires transformers>=4.31.0")
if is_trainable:
if model_args.rope_scaling == "dynamic":
assert not model_args.flash_attn, "Flash attention does not support dynamic rope scaling."