From 9ff7c99eb18107be39214843fa2879ae8c50a22d Mon Sep 17 00:00:00 2001 From: hiyouga Date: Wed, 13 Mar 2024 23:43:42 +0800 Subject: [PATCH] fix bug Former-commit-id: 714d936dfbe022c4f2cfa6ff643e3482a3f96012 --- src/llmtuner/train/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/llmtuner/train/utils.py b/src/llmtuner/train/utils.py index 42294164..144af244 100644 --- a/src/llmtuner/train/utils.py +++ b/src/llmtuner/train/utils.py @@ -294,6 +294,7 @@ def _create_loraplus_optimizer( dict(params=param_dict["embedding"], lr=finetuning_args.loraplus_lr_embedding, **decay_args), ] optimizer = optim_class(param_groups, **optim_kwargs) + logger.info("Using LoRA+ optimizer with loraplus lr ratio {:.2f}.".format(finetuning_args.loraplus_lr_ratio)) return optimizer @@ -303,7 +304,7 @@ def create_custom_optimzer( training_args: "Seq2SeqTrainingArguments", finetuning_args: "FinetuningArguments", ) -> Optional["torch.optim.Optimizer"]: - if not finetuning_args.use_galore: + if finetuning_args.use_galore: return _create_galore_optimizer(model, dataset, training_args, finetuning_args) if finetuning_args.loraplus_lr_ratio is not None: