fix ds optimizer

Former-commit-id: 3bcd41b639899e72bcabc51d59bac8967af19899
This commit is contained in:
hoshi-hiyouga 2024-03-26 23:39:56 +08:00
parent 300437a5e9
commit ae9ad13f2a
4 changed files with 8 additions and 8 deletions

View File

@ -64,10 +64,10 @@ class CustomDPOTrainer(DPOTrainer):
self.ref_model = self.accelerator.prepare_model(self.ref_model, evaluation_mode=True)
def create_optimizer_and_scheduler(self, num_training_steps: int) -> None:
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
if self.optimizer is None:
self.create_optimizer()
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
self.create_optimizer()
self.create_scheduler(num_training_steps=num_training_steps, optimizer=self.optimizer)
def sft_loss(self, chosen_logits: torch.FloatTensor, chosen_labels: torch.LongTensor) -> torch.Tensor:

View File

@ -23,8 +23,8 @@ class CustomTrainer(Trainer):
self.finetuning_args = finetuning_args
def create_optimizer_and_scheduler(self, num_training_steps: int) -> None:
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
if self.optimizer is None:
self.create_optimizer()
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
self.create_optimizer()
self.create_scheduler(num_training_steps=num_training_steps, optimizer=self.optimizer)

View File

@ -30,10 +30,10 @@ class PairwiseTrainer(Trainer):
self.can_return_loss = True # override property to return eval_loss
def create_optimizer_and_scheduler(self, num_training_steps: int) -> None:
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
if self.optimizer is None:
self.create_optimizer()
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
self.create_optimizer()
self.create_scheduler(num_training_steps=num_training_steps, optimizer=self.optimizer)
def compute_loss(

View File

@ -30,10 +30,10 @@ class CustomSeq2SeqTrainer(Seq2SeqTrainer):
self.finetuning_args = finetuning_args
def create_optimizer_and_scheduler(self, num_training_steps: int) -> None:
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
if self.optimizer is None:
self.create_optimizer()
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args, num_training_steps)
self.create_optimizer()
self.create_scheduler(num_training_steps=num_training_steps, optimizer=self.optimizer)
def prediction_step(