mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 04:32:50 +08:00
patch for gemma cpt
Former-commit-id: 70a3052dd8a2d1322fa01ab19e369e465842d416
This commit is contained in:
parent
c28818c39f
commit
096c31bfb6
@ -37,6 +37,10 @@ def preprocess_pretrain_dataset(
|
|||||||
k: [t[i : i + block_size] for i in range(0, total_length, block_size)]
|
k: [t[i : i + block_size] for i in range(0, total_length, block_size)]
|
||||||
for k, t in concatenated_examples.items()
|
for k, t in concatenated_examples.items()
|
||||||
}
|
}
|
||||||
|
if data_args.template == "gemma":
|
||||||
|
for i in range(len(result["input_ids"])):
|
||||||
|
result["input_ids"][i][0] = tokenizer.bos_token_id
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -503,7 +503,7 @@ register_model_group(
|
|||||||
},
|
},
|
||||||
"OLMo-7B-Chat": {
|
"OLMo-7B-Chat": {
|
||||||
DownloadSource.DEFAULT: "allenai/OLMo-7B-Instruct",
|
DownloadSource.DEFAULT: "allenai/OLMo-7B-Instruct",
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
module="att_proj",
|
module="att_proj",
|
||||||
template="olmo",
|
template="olmo",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user