From d2ce9b879b127d659f9662a73f1627f6ad84ebc1 Mon Sep 17 00:00:00 2001 From: hiyouga Date: Thu, 28 Sep 2023 00:45:20 +0800 Subject: [PATCH] fix bug in pretraining Former-commit-id: b3fbba57eb8be33a018b5904bdf08d1c95412005 --- src/llmtuner/dsets/preprocess.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llmtuner/dsets/preprocess.py b/src/llmtuner/dsets/preprocess.py index 5031a817..6ee2ee1c 100644 --- a/src/llmtuner/dsets/preprocess.py +++ b/src/llmtuner/dsets/preprocess.py @@ -22,7 +22,7 @@ def preprocess_dataset( column_names = list(next(iter(dataset)).keys()) template = get_template_and_fix_tokenizer(data_args.template, tokenizer) - if template.efficient_eos and data_args.sft_packing: + if template is not None and template.efficient_eos and data_args.sft_packing: raise ValueError("Current template is incompatible with packing.") def construct_example(examples: Dict[str, List[Any]]) -> Generator[Any, None, None]: