add deepspeed check in PPO training

This commit is contained in:
hiyouga
2023-09-07 19:12:40 +08:00
parent e2bf7c3bad
commit ed1c2c5557
2 changed files with 6 additions and 0 deletions

View File

@@ -39,6 +39,9 @@ class PPOPeftTrainer(PPOTrainer, PeftTrainer):
**kwargs
):
PPOTrainer.__init__(self, **kwargs)
if getattr(self.accelerator.state, "deepspeed_plugin", None) is not None:
raise ValueError("PPOTrainer is incompatible with DeepSpeed.")
self.args = training_args
self.finetuning_args = finetuning_args
self.generating_args = generating_args