mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-03 20:22:49 +08:00
fix Llama lora merge crash
Former-commit-id: 51349ea1ccbf3e53b408037986abd850a0963468
This commit is contained in:
parent
c083708433
commit
645c27e5e2
@ -71,7 +71,7 @@ def export_model(args: Optional[Dict[str, Any]] = None):
|
|||||||
(config.top_p is not None and config.top_p != 1.0) or
|
(config.top_p is not None and config.top_p != 1.0) or
|
||||||
(config.typical_p is not None and config.typical_p != 1.0)
|
(config.typical_p is not None and config.typical_p != 1.0)
|
||||||
):
|
):
|
||||||
config.do_sample = False
|
config.do_sample = True
|
||||||
|
|
||||||
model.save_pretrained(
|
model.save_pretrained(
|
||||||
save_directory=model_args.export_dir,
|
save_directory=model_args.export_dir,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user