support report custom args

This commit is contained in:
hiyouga
2024-12-19 14:57:09 +00:00
parent 84cd1188ac
commit 5111cac6f8
20 changed files with 164 additions and 124 deletions

View File

@@ -30,7 +30,7 @@ from typing_extensions import override
from ...extras.constants import IGNORE_INDEX
from ...extras.packages import is_transformers_version_equal_to_4_46
from ..callbacks import SaveProcessorCallback
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler, get_batch_logps, get_swanlab_callback
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler, get_batch_logps
if TYPE_CHECKING:
@@ -101,9 +101,6 @@ class CustomKTOTrainer(KTOTrainer):
self.accelerator.clip_grad_norm_ = MethodType(clip_grad_norm_old_version, self.accelerator)
self.add_callback(BAdamCallback)
if finetuning_args.use_swanlab:
self.add_callback(get_swanlab_callback(finetuning_args))
@override
def create_optimizer(self) -> "torch.optim.Optimizer":
if self.optimizer is None: