mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-15 11:20:35 +08:00
61 lines
1.6 KiB
YAML
61 lines
1.6 KiB
YAML
services:
|
|
llamafactory-a2:
|
|
build:
|
|
dockerfile: ./docker/docker-npu/Dockerfile
|
|
context: ../..
|
|
args:
|
|
PIP_INDEX: https://pypi.org/simple
|
|
EXTRAS: torch-npu,metrics
|
|
container_name: llamafactory-a2
|
|
image: llamafactory:npu-a2
|
|
volumes:
|
|
- /usr/local/dcmi:/usr/local/dcmi
|
|
- /usr/local/bin/npu-smi:/usr/local/bin/npu-smi
|
|
- /usr/local/Ascend/driver:/usr/local/Ascend/driver
|
|
- /etc/ascend_install.info:/etc/ascend_install.info
|
|
ports:
|
|
- "7860:7860"
|
|
- "8000:8000"
|
|
ipc: host
|
|
tty: true
|
|
# shm_size: "16gb" # ipc: host is set
|
|
stdin_open: true
|
|
command: bash
|
|
devices:
|
|
- /dev/davinci0
|
|
- /dev/davinci_manager
|
|
- /dev/devmm_svm
|
|
- /dev/hisi_hdc
|
|
restart: unless-stopped
|
|
|
|
llamafactory-a3:
|
|
profiles: ["a3"]
|
|
build:
|
|
dockerfile: ./docker/docker-npu/Dockerfile
|
|
context: ../..
|
|
args:
|
|
BASE_IMAGE: quay.io/ascend/cann:8.3.rc2-a3-ubuntu22.04-py3.11
|
|
PIP_INDEX: https://pypi.org/simple
|
|
EXTRAS: torch-npu,metrics
|
|
container_name: llamafactory-a3
|
|
image: llamafactory:npu-a3
|
|
volumes:
|
|
- /usr/local/dcmi:/usr/local/dcmi
|
|
- /usr/local/bin/npu-smi:/usr/local/bin/npu-smi
|
|
- /usr/local/Ascend/driver:/usr/local/Ascend/driver
|
|
- /etc/ascend_install.info:/etc/ascend_install.info
|
|
ports:
|
|
- "7861:7860"
|
|
- "8001:8000"
|
|
ipc: host
|
|
tty: true
|
|
# shm_size: "16gb" # ipc: host is set
|
|
stdin_open: true
|
|
command: bash
|
|
devices:
|
|
- /dev/davinci0
|
|
- /dev/davinci_manager
|
|
- /dev/devmm_svm
|
|
- /dev/hisi_hdc
|
|
restart: unless-stopped
|