mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-05 13:12:53 +08:00
fix log level
Former-commit-id: 7fbe8add8f449358c9815c5ba8a2052a2d874dab
This commit is contained in:
parent
ce490c65ae
commit
9a21785396
@ -30,7 +30,7 @@ def configure_rope(config: "PretrainedConfig", model_args: "ModelArguments", is_
|
|||||||
|
|
||||||
current_max_length = getattr(config, "max_position_embeddings", None)
|
current_max_length = getattr(config, "max_position_embeddings", None)
|
||||||
if current_max_length and model_args.model_max_length > current_max_length:
|
if current_max_length and model_args.model_max_length > current_max_length:
|
||||||
logger.warning(
|
logger.info(
|
||||||
"Enlarge max model length from {} to {}.".format(current_max_length, model_args.model_max_length)
|
"Enlarge max model length from {} to {}.".format(current_max_length, model_args.model_max_length)
|
||||||
)
|
)
|
||||||
setattr(config, "max_position_embeddings", model_args.model_max_length)
|
setattr(config, "max_position_embeddings", model_args.model_max_length)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user