fix shift short attention

This commit is contained in:
hiyouga
2023-10-09 17:07:46 +08:00
parent b8dbec086e
commit ab65c3063b
6 changed files with 46 additions and 52 deletions

View File

@@ -33,6 +33,7 @@ def run_sft(
data_collator = DataCollatorForSeq2Seq(
tokenizer=tokenizer,
pad_to_multiple_of=4, # for shift short attention
label_pad_token_id=IGNORE_INDEX if data_args.ignore_pad_token_for_loss else tokenizer.pad_token_id
)