Support distributed BAdam.

Former-commit-id: 0f72aac8c9
This commit is contained in:
Jonery
2024-06-18 12:27:47 +08:00
parent 5d59f6562a
commit 3a5eacb4cf
7 changed files with 46 additions and 30 deletions

View File

@@ -209,24 +209,20 @@ def get_train_args(args: Optional[Dict[str, Any]] = None) -> _TRAIN_CLS:
):
raise ValueError("Distributed training does not support layer-wise GaLore.")
<<<<<<< HEAD
# if (
# finetuning_args.use_badam
# and finetuning_args.badam_mode == "layer"
# and training_args.parallel_mode.value == "distributed"
# ):
# raise ValueError("Layer-wise BAdam does not yet support distributed training, use ratio-wise BAdam.")
=======
if (
finetuning_args.use_badam
and finetuning_args.badam_mode == "layer"
and training_args.parallel_mode == ParallelMode.DISTRIBUTED
and training_args.parallel_mode.value == "distributed"
):
raise ValueError("Layer-wise BAdam does not yet support distributed training, use ratio-wise BAdam.")
>>>>>>> upstream/main
if finetuning_args.badam_mode == "ratio":
raise ValueError("Ratio-wise BAdam does not yet support distributed training, use layer-wise BAdam: --badam_mode layer")
if (finetuning_args.badam_mode == "layer"
and training_args.deepspeed_plugin is not None
and training_args.deepspeed_plugin.zero_stage < 3
):
raise ValueError(f"Layer-wise BAdam only supports DeepSpeed ZeRO 3 stage, got stage {self.args.deepspeed_plugin.zero_stage}")
if (finetuning_args.use_galore or finetuning_args.use_badam) and training_args.deepspeed is not None:
raise ValueError("GaLore and BAdam are incompatible with DeepSpeed yet.")
if (finetuning_args.use_galore) and training_args.deepspeed is not None:
raise ValueError("GaLore are incompatible with DeepSpeed yet.")
if model_args.infer_backend == "vllm":
raise ValueError("vLLM backend is only available for API, CLI and Web.")