mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-15 16:18:10 +08:00
fix sft trainer
Former-commit-id: 08cc888b1569572d0cd20bcf3f07e20072a0311a
This commit is contained in:
parent
a3a7465f00
commit
990eeccf45
@ -79,7 +79,7 @@ class Seq2SeqPeftTrainer(PeftTrainer):
|
|||||||
|
|
||||||
padded_tensor = pad_token_id * torch.ones_like(tgt_tensor)
|
padded_tensor = pad_token_id * torch.ones_like(tgt_tensor)
|
||||||
padded_tensor[:, -src_tensor.shape[-1]:] = src_tensor # adopt left-padding
|
padded_tensor[:, -src_tensor.shape[-1]:] = src_tensor # adopt left-padding
|
||||||
return padded_tensor
|
return padded_tensor.contiguous()
|
||||||
|
|
||||||
def save_predictions(
|
def save_predictions(
|
||||||
self,
|
self,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user