mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-02 03:32:50 +08:00
10 lines
231 B
Bash
10 lines
231 B
Bash
#!/bin/bash
|
|
|
|
NPROC_PER_NODE=4
|
|
|
|
CUDA_VISIBLE_DEVICES=0,1,2,3 python -m torch.distributed.run \
|
|
--nproc_per_node $NPROC_PER_NODE \
|
|
--nnodes 1 \
|
|
--standalone \
|
|
src/train.py examples/full_multi_gpu/llama3_full_sft.yaml
|