fix bug in pretraining

Former-commit-id: 18a2d90bd6e7c3e1e3513e6f9d895e4048b35b04
This commit is contained in:
hiyouga 2023-09-28 00:45:20 +08:00
parent 1c150995ae
commit fa53fd2db2

View File

@ -22,7 +22,7 @@ def preprocess_dataset(
column_names = list(next(iter(dataset)).keys())
template = get_template_and_fix_tokenizer(data_args.template, tokenizer)
if template.efficient_eos and data_args.sft_packing:
if template is not None and template.efficient_eos and data_args.sft_packing:
raise ValueError("Current template is incompatible with packing.")
def construct_example(examples: Dict[str, List[Any]]) -> Generator[Any, None, None]: