mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-23 22:32:54 +08:00
fix bug in packed sft dataset
Former-commit-id: de196143064772db770a45235424b3c911b2e147
This commit is contained in:
parent
f61a000e73
commit
f88088c43d
@ -116,7 +116,7 @@ def preprocess_dataset(
|
||||
# split by chunks of cutoff_len
|
||||
for i in range(0, total_length, block_size):
|
||||
model_inputs["input_ids"].append(input_ids[i: i + block_size])
|
||||
model_inputs["attention_mask"].append([1] * len(block_size))
|
||||
model_inputs["attention_mask"].append([1] * block_size)
|
||||
model_inputs["labels"].append(labels[i: i + block_size])
|
||||
|
||||
return model_inputs
|
||||
|
Loading…
x
Reference in New Issue
Block a user