mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
parent
a74426df0f
commit
8673abbe5e
@ -107,14 +107,18 @@ def init_adapter(
|
|||||||
adapter_to_merge = model_args.adapter_name_or_path
|
adapter_to_merge = model_args.adapter_name_or_path
|
||||||
|
|
||||||
for adapter in adapter_to_merge:
|
for adapter in adapter_to_merge:
|
||||||
model: "LoraModel" = PeftModel.from_pretrained(model, adapter)
|
model: "LoraModel" = PeftModel.from_pretrained(
|
||||||
|
model, adapter, offload_folder=model_args.offload_folder
|
||||||
|
)
|
||||||
model = model.merge_and_unload()
|
model = model.merge_and_unload()
|
||||||
|
|
||||||
if len(adapter_to_merge) > 0:
|
if len(adapter_to_merge) > 0:
|
||||||
logger.info("Merged {} adapter(s).".format(len(adapter_to_merge)))
|
logger.info("Merged {} adapter(s).".format(len(adapter_to_merge)))
|
||||||
|
|
||||||
if adapter_to_resume is not None: # resume lora training
|
if adapter_to_resume is not None: # resume lora training
|
||||||
model = PeftModel.from_pretrained(model, adapter_to_resume, is_trainable=is_trainable)
|
model = PeftModel.from_pretrained(
|
||||||
|
model, adapter_to_resume, is_trainable=is_trainable, offload_folder=model_args.offload_folder
|
||||||
|
)
|
||||||
|
|
||||||
if is_trainable and adapter_to_resume is None: # create new lora weights while training
|
if is_trainable and adapter_to_resume is None: # create new lora weights while training
|
||||||
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":
|
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":
|
||||||
|
Loading…
x
Reference in New Issue
Block a user