mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-16 00:28:10 +08:00
set use_cache before saving model
Former-commit-id: a6a0161f32f600f3001188ff4c7929c5f13c2a03
This commit is contained in:
parent
6df5c4ccef
commit
e4e36a2d74
@ -105,11 +105,13 @@ class PeftTrainer(Seq2SeqTrainer):
|
|||||||
if self.finetuning_args.finetuning_type == "lora":
|
if self.finetuning_args.finetuning_type == "lora":
|
||||||
backbone_model.save_pretrained(output_dir, state_dict=get_state_dict(backbone_model))
|
backbone_model.save_pretrained(output_dir, state_dict=get_state_dict(backbone_model))
|
||||||
else: # freeze/full tuning
|
else: # freeze/full tuning
|
||||||
|
backbone_model.config.use_cache = True
|
||||||
backbone_model.save_pretrained(
|
backbone_model.save_pretrained(
|
||||||
output_dir,
|
output_dir,
|
||||||
state_dict=get_state_dict(backbone_model),
|
state_dict=get_state_dict(backbone_model),
|
||||||
safe_serialization=self.args.save_safetensors
|
safe_serialization=self.args.save_safetensors
|
||||||
)
|
)
|
||||||
|
backbone_model.config.use_cache = False
|
||||||
if self.tokenizer is not None:
|
if self.tokenizer is not None:
|
||||||
self.tokenizer.save_pretrained(output_dir)
|
self.tokenizer.save_pretrained(output_dir)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user