mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-01 11:12:50 +08:00
[model] add lora dropout to unsloth (#8548)
Co-authored-by: viyer <vivek_iyer2@apple.com>
This commit is contained in:
parent
8e7727f4ee
commit
62c6943699
@ -40,6 +40,7 @@ def _get_unsloth_kwargs(
|
|||||||
"load_in_4bit": model_args.quantization_bit == 4,
|
"load_in_4bit": model_args.quantization_bit == 4,
|
||||||
"token": model_args.hf_hub_token,
|
"token": model_args.hf_hub_token,
|
||||||
"full_finetuning": finetuning_args.finetuning_type == "full",
|
"full_finetuning": finetuning_args.finetuning_type == "full",
|
||||||
|
"lora_dropout": finetuning_args.lora_dropout,
|
||||||
"device_map": {"": get_current_device()},
|
"device_map": {"": get_current_device()},
|
||||||
"rope_scaling": getattr(config, "rope_scaling", None),
|
"rope_scaling": getattr(config, "rope_scaling", None),
|
||||||
"fix_tokenizer": False,
|
"fix_tokenizer": False,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user