add deepspeed check in PPO training

Former-commit-id: ed1c2c5557
This commit is contained in:
hiyouga
2023-09-07 19:12:40 +08:00
parent e6fa0229f4
commit 5030f05126
2 changed files with 6 additions and 0 deletions

View File

@@ -39,6 +39,9 @@ class PPOPeftTrainer(PPOTrainer, PeftTrainer):
**kwargs
):
PPOTrainer.__init__(self, **kwargs)
if getattr(self.accelerator.state, "deepspeed_plugin", None) is not None:
raise ValueError("PPOTrainer is incompatible with DeepSpeed.")
self.args = training_args
self.finetuning_args = finetuning_args
self.generating_args = generating_args