mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-02 03:32:50 +08:00
6 lines
183 B
Bash
6 lines
183 B
Bash
#!/bin/bash
|
|
|
|
CUDA_VISIBLE_DEVICES=0,1,2,3 accelerate launch \
|
|
--config_file examples/accelerate/single_config.yaml \
|
|
src/train.py examples/lora_multi_gpu/llama3_lora_sft.yaml
|