fix Llama lora merge crash

Former-commit-id: a8bd8e9149ff79a2707fec9c6d006761cfdd0dee
This commit is contained in:
marko1616 2024-03-24 02:55:23 +08:00
parent edeed55664
commit 2eba1c6851

View File

@ -71,7 +71,7 @@ def export_model(args: Optional[Dict[str, Any]] = None):
(config.top_p is not None and config.top_p != 1.0) or
(config.typical_p is not None and config.typical_p != 1.0)
):
config.do_sample = False
config.do_sample = True
model.save_pretrained(
save_directory=model_args.export_dir,