[config] Fix RoPE scaling patch for resuming from a scaled model (#9588)

This commit is contained in:
DoubleWheat
2025-12-09 20:37:37 +08:00
committed by GitHub
parent 5d56817e2b
commit cff4483392

View File

@@ -40,7 +40,10 @@ def configure_rope(config: "PretrainedConfig", model_args: "ModelArguments") ->
logger.warning_rank0("Current model does not support RoPE scaling.")
return
if hasattr(config, "max_position_embeddings"):
rope_scaling = getattr(config, "rope_scaling", None)
if isinstance(rope_scaling, dict) and "original_max_position_embeddings" in rope_scaling:
old_max_length = rope_scaling["original_max_position_embeddings"]
elif hasattr(config, "max_position_embeddings"):
old_max_length = getattr(config, "max_position_embeddings", None)
else:
logger.warning_rank0("Cannot find the max position embeddings in the config.")