mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-16 00:28:10 +08:00
fix value head model resuming
Former-commit-id: ccf0b65d886c09c7c49977c43b0544fe1bfcc258
This commit is contained in:
parent
adf2730d1d
commit
bcd661afa6
@ -202,6 +202,7 @@ def load_model_and_tokenizer(
|
||||
if stage in ["rm", "ppo"]:
|
||||
model: "AutoModelForCausalLMWithValueHead" = AutoModelForCausalLMWithValueHead.from_pretrained(model)
|
||||
setattr(model, "_keys_to_ignore_on_save", [name for name, _ in model.named_parameters() if "pretrained_model" in name])
|
||||
setattr(model, "tie_weights", MethodType(lambda _: None, model))
|
||||
vhead_path = (
|
||||
model_args.checkpoint_dir[-1] if model_args.checkpoint_dir is not None else model_args.model_name_or_path
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user