mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-01 11:12:50 +08:00
Revert "[model] add lora dropout to unsloth" - requested feature already exists (#8554)
Co-authored-by: viyer <vivek_iyer2@apple.com>
This commit is contained in:
parent
d30cbcdfa5
commit
c6290db118
@ -40,7 +40,6 @@ def _get_unsloth_kwargs(
|
|||||||
"load_in_4bit": model_args.quantization_bit == 4,
|
"load_in_4bit": model_args.quantization_bit == 4,
|
||||||
"token": model_args.hf_hub_token,
|
"token": model_args.hf_hub_token,
|
||||||
"full_finetuning": finetuning_args.finetuning_type == "full",
|
"full_finetuning": finetuning_args.finetuning_type == "full",
|
||||||
"lora_dropout": finetuning_args.lora_dropout,
|
|
||||||
"device_map": {"": get_current_device()},
|
"device_map": {"": get_current_device()},
|
||||||
"rope_scaling": getattr(config, "rope_scaling", None),
|
"rope_scaling": getattr(config, "rope_scaling", None),
|
||||||
"fix_tokenizer": False,
|
"fix_tokenizer": False,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user