mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-22 21:52:51 +08:00
parent
62ddab4b3a
commit
df4aec7e72
@ -69,7 +69,7 @@ def autocast_projector_dtype(
|
||||
) -> "torch.Tensor":
|
||||
return output.to(model_args.compute_dtype)
|
||||
|
||||
if hasattr(model, mm_projector_name) and getattr(model.config, "quantization_method", None):
|
||||
if hasattr(model, mm_projector_name) and getattr(model, "quantization_method", None):
|
||||
logger.info("Casting multimodal projector outputs in {}.".format(model_args.compute_dtype))
|
||||
mm_projector: "torch.nn.Module" = getattr(model, mm_projector_name)
|
||||
mm_projector.register_forward_hook(_mm_projector_forward_post_hook)
|
||||
|
Loading…
x
Reference in New Issue
Block a user