mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 04:32:50 +08:00
fix bug in pretraining
Former-commit-id: b3fbba57eb8be33a018b5904bdf08d1c95412005
This commit is contained in:
parent
deb17942ab
commit
d2ce9b879b
@ -22,7 +22,7 @@ def preprocess_dataset(
|
|||||||
column_names = list(next(iter(dataset)).keys())
|
column_names = list(next(iter(dataset)).keys())
|
||||||
template = get_template_and_fix_tokenizer(data_args.template, tokenizer)
|
template = get_template_and_fix_tokenizer(data_args.template, tokenizer)
|
||||||
|
|
||||||
if template.efficient_eos and data_args.sft_packing:
|
if template is not None and template.efficient_eos and data_args.sft_packing:
|
||||||
raise ValueError("Current template is incompatible with packing.")
|
raise ValueError("Current template is incompatible with packing.")
|
||||||
|
|
||||||
def construct_example(examples: Dict[str, List[Any]]) -> Generator[Any, None, None]:
|
def construct_example(examples: Dict[str, List[Any]]) -> Generator[Any, None, None]:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user