fix lora target

Former-commit-id: d822e41e7ac7e310ee49e347fc45754284ce30b8
This commit is contained in:
hiyouga
2023-09-09 17:04:45 +08:00
parent 7143c551ab
commit f91c5f2638
7 changed files with 63 additions and 43 deletions

View File

@@ -82,7 +82,7 @@ def init_adapter(
model = PeftModel.from_pretrained(model, latest_checkpoint, is_trainable=is_trainable)
if is_trainable and latest_checkpoint is None: # create new lora weights while training
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target == "all":
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":
target_modules = find_all_linear_modules(model, model_args.quantization_bit)
else:
target_modules = finetuning_args.lora_target