From 002f58ef8e861a20df8bff1fc64db1b9f295d46b Mon Sep 17 00:00:00 2001 From: hoshi-hiyouga Date: Thu, 6 Mar 2025 11:58:36 +0800 Subject: [PATCH] [model] add QwQ 32b (#7179) Former-commit-id: 64a6fb9b5056166265abc5acbddffb64cd8b5256 --- src/llamafactory/extras/constants.py | 4 ++++ src/llamafactory/train/trainer_utils.py | 5 ++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/llamafactory/extras/constants.py b/src/llamafactory/extras/constants.py index 9f605532..2095d1a4 100644 --- a/src/llamafactory/extras/constants.py +++ b/src/llamafactory/extras/constants.py @@ -2174,6 +2174,10 @@ register_model_group( DownloadSource.DEFAULT: "Qwen/QwQ-32B-Preview", DownloadSource.MODELSCOPE: "Qwen/QwQ-32B-Preview", }, + "QwQ-32B-Instruct": { + DownloadSource.DEFAULT: "Qwen/QwQ-32B", + DownloadSource.MODELSCOPE: "Qwen/QwQ-32B", + }, }, template="qwen", ) diff --git a/src/llamafactory/train/trainer_utils.py b/src/llamafactory/train/trainer_utils.py index 45494649..15ed0df4 100644 --- a/src/llamafactory/train/trainer_utils.py +++ b/src/llamafactory/train/trainer_utils.py @@ -600,15 +600,14 @@ def get_swanlab_callback(finetuning_args: "FinetuningArguments") -> "TrainerCall return super().setup(args, state, model, **kwargs) - try: if hasattr(self, "_swanlab"): swanlab_public_config = self._swanlab.get_run().public.json() else: # swanlab <= 0.4.9 swanlab_public_config = self._experiment.get_run().public.json() - except Exception as e: + except Exception: swanlab_public_config = {} - + with open(os.path.join(args.output_dir, SWANLAB_CONFIG), "w") as f: f.write(json.dumps(swanlab_public_config, indent=2))