mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
parent
ba48084086
commit
e87a17464f
@ -112,9 +112,9 @@ def preprocess_packed_supervised_dataset(
|
||||
input_ids += source_ids + target_ids
|
||||
labels += source_mask + target_ids
|
||||
|
||||
if template.efficient_eos:
|
||||
input_ids += [tokenizer.eos_token_id]
|
||||
labels += [tokenizer.eos_token_id]
|
||||
if template.efficient_eos:
|
||||
input_ids += [tokenizer.eos_token_id]
|
||||
labels += [tokenizer.eos_token_id]
|
||||
|
||||
total_length = len(input_ids)
|
||||
block_size = data_args.cutoff_len
|
||||
|
@ -477,7 +477,7 @@ LOCALES = {
|
||||
},
|
||||
"zh": {
|
||||
"label": "序列打包",
|
||||
"info": "在指令监督微调阶段将序列打包为相同长度的样本。",
|
||||
"info": "在指令监督微调时将序列打包为等长样本。",
|
||||
},
|
||||
},
|
||||
"upcast_layernorm": {
|
||||
|
Loading…
x
Reference in New Issue
Block a user