fix bug in data loader, support dpo eval

This commit is contained in:
hiyouga
2023-11-03 00:34:26 +08:00
parent 2b5e33c338
commit b355f6cac9
3 changed files with 9 additions and 2 deletions

View File

@@ -58,3 +58,9 @@ def run_dpo(
trainer.save_model()
if trainer.is_world_process_zero() and model_args.plot_loss:
plot_loss(training_args.output_dir, keys=["loss", "eval_loss"])
# Evaluation
if training_args.do_eval:
metrics = trainer.evaluate(metric_key_prefix="eval")
trainer.log_metrics("eval", metrics)
trainer.save_metrics("eval", metrics)