From c6290db11812bcb191d0ba45b4305cf9d44cd44a Mon Sep 17 00:00:00 2001 From: Vivek Iyer Date: Fri, 4 Jul 2025 23:25:31 -0400 Subject: [PATCH] Revert "[model] add lora dropout to unsloth" - requested feature already exists (#8554) Co-authored-by: viyer --- src/llamafactory/model/model_utils/unsloth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/llamafactory/model/model_utils/unsloth.py b/src/llamafactory/model/model_utils/unsloth.py index d262ab0e..91e18dac 100644 --- a/src/llamafactory/model/model_utils/unsloth.py +++ b/src/llamafactory/model/model_utils/unsloth.py @@ -40,7 +40,6 @@ def _get_unsloth_kwargs( "load_in_4bit": model_args.quantization_bit == 4, "token": model_args.hf_hub_token, "full_finetuning": finetuning_args.finetuning_type == "full", - "lora_dropout": finetuning_args.lora_dropout, "device_map": {"": get_current_device()}, "rope_scaling": getattr(config, "rope_scaling", None), "fix_tokenizer": False,