mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-16 11:50:35 +08:00
remove loftq
This commit is contained in:
@@ -55,10 +55,6 @@ class LoraArguments:
|
||||
Phi-1.5 choices: [\"Wqkv\", \"out_proj\", \"fc1\", \"fc2\"], \
|
||||
Others choices: the same as LLaMA."}
|
||||
)
|
||||
loftq_init: Optional[bool] = field(
|
||||
default=False,
|
||||
metadata={"help": "Use LoftQ initialization for quantized LoRA fine-tuning."}
|
||||
)
|
||||
resume_lora_training: Optional[bool] = field(
|
||||
default=True,
|
||||
metadata={"help": "Whether to resume training from the last LoRA weights or create new weights after merging them."}
|
||||
|
||||
Reference in New Issue
Block a user