fix export

Former-commit-id: 6bc2c23b6d26b52f54ac37fa6149e6eb3cc18ee6
This commit is contained in:
hiyouga 2024-03-15 15:06:30 +08:00
parent 7ef49586be
commit 06860e8f0f

View File

@ -60,8 +60,9 @@ def export_model(args: Optional[Dict[str, Any]] = None):
if getattr(model, "quantization_method", None) is None: # cannot convert dtype of a quantized model if getattr(model, "quantization_method", None) is None: # cannot convert dtype of a quantized model
output_dtype = getattr(model.config, "torch_dtype", torch.float16) output_dtype = getattr(model.config, "torch_dtype", torch.float16)
model = model.to(output_dtype)
setattr(model.config, "torch_dtype", output_dtype) setattr(model.config, "torch_dtype", output_dtype)
for param in model.parameters():
param.data = param.data.to(output_dtype)
model.save_pretrained( model.save_pretrained(
save_directory=model_args.export_dir, save_directory=model_args.export_dir,