mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-14 19:06:26 +08:00
update packing with sdpa and eager attention mode
Former-commit-id: 238f5c3d99
This commit is contained in:
@@ -34,7 +34,7 @@ def run_sft(
|
||||
model = load_model(tokenizer, model_args, finetuning_args, training_args.do_train)
|
||||
|
||||
if data_args.efficient_packing:
|
||||
configure_packing(model.config)
|
||||
configure_packing(model.config, model_args)
|
||||
|
||||
if training_args.predict_with_generate:
|
||||
tokenizer.padding_side = "left" # use left-padding in generation
|
||||
|
||||
Reference in New Issue
Block a user