fix llava config

This commit is contained in:
hiyouga
2024-05-12 00:02:49 +08:00
parent 5da097f406
commit b033232aea
5 changed files with 15 additions and 15 deletions

View File

@@ -46,6 +46,9 @@ def init_adapter(
if (not finetuning_args.pure_bf16) and (not finetuning_args.use_badam):
model = model.float()
if model_args.visual_inputs and hasattr(model, "vision_tower"): # freeze vision model
model.vision_tower.requires_grad_(False)
if finetuning_args.finetuning_type == "freeze" and is_trainable:
logger.info("Fine-tuning method: Freeze")
num_layers = (