fix checkpoint loading

This commit is contained in:
hiyouga
2023-05-29 17:43:16 +08:00
parent ce71cc8b6d
commit c0e5df92d6
4 changed files with 56 additions and 23 deletions

View File

@@ -194,7 +194,8 @@ class FinetuningArguments:
if self.name_module_trainable == "mlp":
self.trainable_layers = ["layers.{:d}.mlp".format(idx) for idx in trainable_layer_ids]
elif self.name_module_trainable == "qkv":
self.trainable_layers = ["layers.{:d}.attention.query_key_value".format(idx) for idx in trainable_layer_ids]
self.trainable_layers = ["layers.{:d}.self_attn.{}".format(idx, proj) \
for proj in ["k_proj", "q_proj", "v_proj", "o_proj"] for idx in trainable_layer_ids]
assert self.finetuning_type in ["none", "freeze", "lora", "full"], "Invalid fine-tuning method."