mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-15 08:08:09 +08:00
Merge pull request #2266 from yhyu13/fix_export_model_dtype
Remove manully set use_cache; torch_dtype is not str, save model as b… Former-commit-id: 8c0827ba92a458e18c3b68af0330af3a65149f96
This commit is contained in:
commit
013ded4bac
@ -56,12 +56,11 @@ def export_model(args: Optional[Dict[str, Any]] = None):
|
||||
if not isinstance(model, PreTrainedModel):
|
||||
raise ValueError("The model is not a `PreTrainedModel`, export aborted.")
|
||||
|
||||
setattr(model.config, "use_cache", True)
|
||||
if getattr(model.config, "torch_dtype", None) == torch.bfloat16:
|
||||
model = model.to(torch.bfloat16).to("cpu")
|
||||
if hasattr(model.config, "torch_dtype"):
|
||||
model = model.to(getattr(model.config, "torch_dtype")).to("cpu")
|
||||
else:
|
||||
model = model.to(torch.float16).to("cpu")
|
||||
setattr(model.config, "torch_dtype", "float16")
|
||||
setattr(model.config, "torch_dtype", torch.float16)
|
||||
|
||||
model.save_pretrained(
|
||||
save_directory=model_args.export_dir,
|
||||
|
Loading…
x
Reference in New Issue
Block a user