mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-23 14:22:51 +08:00
parent
a8318723a4
commit
bad35d1730
@ -239,7 +239,7 @@ def init_adapter(
|
|||||||
)
|
)
|
||||||
model = get_peft_model(model, lora_config)
|
model = get_peft_model(model, lora_config)
|
||||||
|
|
||||||
if cast_trainable_params_to_fp32:
|
if is_trainable and cast_trainable_params_to_fp32:
|
||||||
for param in filter(lambda p: p.requires_grad, model.parameters()):
|
for param in filter(lambda p: p.requires_grad, model.parameters()):
|
||||||
param.data = param.data.to(torch.float32)
|
param.data = param.data.to(torch.float32)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user