mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-15 03:10:35 +08:00
fix examples
This commit is contained in:
@@ -14,5 +14,15 @@
|
|||||||
},
|
},
|
||||||
"bf16": {
|
"bf16": {
|
||||||
"enabled": "auto"
|
"enabled": "auto"
|
||||||
|
},
|
||||||
|
"zero_optimization": {
|
||||||
|
"stage": 0,
|
||||||
|
"allgather_partitions": true,
|
||||||
|
"allgather_bucket_size": 5e8,
|
||||||
|
"overlap_comm": true,
|
||||||
|
"reduce_scatter": true,
|
||||||
|
"reduce_bucket_size": 5e8,
|
||||||
|
"contiguous_gradients": true,
|
||||||
|
"round_robin_gradients": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -12,4 +12,4 @@ ASCEND_RT_VISIBLE_DEVICES=0,1,2,3 torchrun \
|
|||||||
--node_rank $RANK \
|
--node_rank $RANK \
|
||||||
--master_addr $MASTER_ADDR \
|
--master_addr $MASTER_ADDR \
|
||||||
--master_port $MASTER_PORT \
|
--master_port $MASTER_PORT \
|
||||||
src/train.py examples/lora_multi_gpu/llama3_lora_sft_ds.yaml
|
src/train.py examples/lora_multi_npu/llama3_lora_sft_ds.yaml
|
||||||
|
|||||||
Reference in New Issue
Block a user