1. add custom eval dataset support

2. merge load dataset and split dataset function
This commit is contained in:
codingma
2024-07-05 15:52:10 +08:00
parent 9f33f1edf5
commit 76f3bbcfc0
16 changed files with 104 additions and 43 deletions

View File

@@ -41,7 +41,7 @@ def run_dpo(
):
tokenizer_module = load_tokenizer(model_args)
tokenizer = tokenizer_module["tokenizer"]
dataset = get_dataset(model_args, data_args, training_args, stage="rm", **tokenizer_module)
dataset_module = get_dataset(model_args, data_args, training_args, stage="rm", **tokenizer_module)
model = load_model(tokenizer, model_args, finetuning_args, training_args.do_train)
data_collator = PairwiseDataCollatorWithPadding(
@@ -71,7 +71,7 @@ def run_dpo(
data_collator=data_collator,
callbacks=callbacks,
**tokenizer_module,
**split_dataset(dataset, data_args, training_args),
**dataset_module,
)
# Training