mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-02 03:32:50 +08:00
6 lines
187 B
Bash
6 lines
187 B
Bash
#!/bin/bash
|
|
|
|
CUDA_VISIBLE_DEVICES=0,1,2,3 accelerate launch \
|
|
--config_file examples/accelerate/single_config.yaml \
|
|
src/train.py examples/full_multi_gpu/llama3_full_predict.yaml
|