mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-22 22:02:51 +08:00
Update trainer.py
Former-commit-id: 24499f40dc1d9db448a3328d2a75c60eec27feb9
This commit is contained in:
parent
e58aca0602
commit
5d96cf146e
@ -4,7 +4,6 @@ from types import MethodType
|
|||||||
from typing import TYPE_CHECKING, Dict, Literal, Optional, Tuple, Union
|
from typing import TYPE_CHECKING, Dict, Literal, Optional, Tuple, Union
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
from torch.utils.data import RandomSampler
|
|
||||||
from transformers import Trainer
|
from transformers import Trainer
|
||||||
from trl import KTOTrainer
|
from trl import KTOTrainer
|
||||||
from trl.trainer import disable_dropout_in_model
|
from trl.trainer import disable_dropout_in_model
|
||||||
@ -14,6 +13,7 @@ from ..utils import create_custom_optimzer, create_custom_scheduler
|
|||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
import torch.utils.data
|
||||||
from transformers import PreTrainedModel, ProcessorMixin
|
from transformers import PreTrainedModel, ProcessorMixin
|
||||||
|
|
||||||
from ...hparams import FinetuningArguments
|
from ...hparams import FinetuningArguments
|
||||||
@ -85,6 +85,12 @@ class CustomKTOTrainer(KTOTrainer):
|
|||||||
create_custom_scheduler(self.args, num_training_steps, optimizer)
|
create_custom_scheduler(self.args, num_training_steps, optimizer)
|
||||||
return super().create_scheduler(num_training_steps, optimizer)
|
return super().create_scheduler(num_training_steps, optimizer)
|
||||||
|
|
||||||
|
def _get_train_sampler(self) -> Optional["torch.utils.data.Sampler"]:
|
||||||
|
r"""
|
||||||
|
Replaces the sequential sampler of KTO Trainer created by trl with the random sampler.
|
||||||
|
"""
|
||||||
|
return Trainer._get_train_sampler(self)
|
||||||
|
|
||||||
def _save(self, output_dir: Optional[str] = None, state_dict: Optional[Dict[str, "torch.Tensor"]] = None) -> None:
|
def _save(self, output_dir: Optional[str] = None, state_dict: Optional[Dict[str, "torch.Tensor"]] = None) -> None:
|
||||||
super()._save(output_dir, state_dict)
|
super()._save(output_dir, state_dict)
|
||||||
if self.processor is not None:
|
if self.processor is not None:
|
||||||
@ -174,21 +180,6 @@ class CustomKTOTrainer(KTOTrainer):
|
|||||||
|
|
||||||
return reference_chosen_logps, reference_rejected_logps, reference_kl_logps
|
return reference_chosen_logps, reference_rejected_logps, reference_kl_logps
|
||||||
|
|
||||||
def has_length(self,dataset):
|
|
||||||
"""
|
|
||||||
Checks if the dataset implements __len__() and it doesn't raise an error
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return len(dataset) is not None
|
|
||||||
except TypeError:
|
|
||||||
# TypeError: len() of unsized object
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _get_train_sampler(self) -> Optional[torch.utils.data.Sampler]:
|
|
||||||
if self.train_dataset is None or not self.has_length(self.train_dataset):
|
|
||||||
return None
|
|
||||||
return RandomSampler(self.train_dataset)
|
|
||||||
|
|
||||||
def get_batch_loss_metrics(
|
def get_batch_loss_metrics(
|
||||||
self,
|
self,
|
||||||
model: "PreTrainedModel",
|
model: "PreTrainedModel",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user