mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	Add ROCm support
Former-commit-id: cf9df10a24936efd420b0fdac541fd6c0808a327
This commit is contained in:
		
							parent
							
								
									21adb09730
								
							
						
					
					
						commit
						5b67ccd1c6
					
				
							
								
								
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@ -160,6 +160,8 @@ cython_debug/
 | 
			
		||||
.idea/
 | 
			
		||||
 | 
			
		||||
# custom .gitignore
 | 
			
		||||
ms_cache/
 | 
			
		||||
hf_cache/
 | 
			
		||||
cache/
 | 
			
		||||
config/
 | 
			
		||||
saves/
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										44
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										44
									
								
								README.md
									
									
									
									
									
								
							@ -422,16 +422,24 @@ For CUDA users:
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
cd docker/docker-cuda/
 | 
			
		||||
docker-compose up -d
 | 
			
		||||
docker-compose exec llamafactory bash
 | 
			
		||||
docker compose up -d
 | 
			
		||||
docker compose exec llamafactory bash
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
For Ascend NPU users:
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
cd docker/docker-npu/
 | 
			
		||||
docker-compose up -d
 | 
			
		||||
docker-compose exec llamafactory bash
 | 
			
		||||
docker compose up -d
 | 
			
		||||
docker compose exec llamafactory bash
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
For ROCm users:
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
cd docker/docker-rocm/
 | 
			
		||||
docker compose up -d
 | 
			
		||||
docker compose exec llamafactory bash
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
<details><summary>Build without Docker Compose</summary>
 | 
			
		||||
@ -493,6 +501,34 @@ docker run -dit \
 | 
			
		||||
docker exec -it llamafactory bash
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
For ROCm users:
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
docker build -f ./docker/docker-rocm/Dockerfile \
 | 
			
		||||
    --build-arg INSTALL_BNB=false \
 | 
			
		||||
    --build-arg INSTALL_VLLM=false \
 | 
			
		||||
    --build-arg INSTALL_DEEPSPEED=false \
 | 
			
		||||
    --build-arg INSTALL_FLASHATTN=false \
 | 
			
		||||
    --build-arg PIP_INDEX=https://pypi.org/simple \
 | 
			
		||||
    -t llamafactory:latest .
 | 
			
		||||
 | 
			
		||||
docker run -dit \
 | 
			
		||||
    -v ./hf_cache:/root/.cache/huggingface \
 | 
			
		||||
    -v ./ms_cache:/root/.cache/modelscope \
 | 
			
		||||
    -v ./data:/app/data \
 | 
			
		||||
    -v ./output:/app/output \
 | 
			
		||||
    -v ./saves:/app/saves \
 | 
			
		||||
    -p 7860:7860 \
 | 
			
		||||
    -p 8000:8000 \
 | 
			
		||||
    --device=/dev/kfd \
 | 
			
		||||
	--device=/dev/dri \
 | 
			
		||||
    --shm-size 16G \
 | 
			
		||||
    --name llamafactory \
 | 
			
		||||
    llamafactory:latest
 | 
			
		||||
 | 
			
		||||
docker exec -it llamafactory bash
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
</details>
 | 
			
		||||
 | 
			
		||||
<details><summary>Details about volume</summary>
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										57
									
								
								docker/docker-rocm/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								docker/docker-rocm/Dockerfile
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,57 @@
 | 
			
		||||
FROM hardandheavy/transformers-rocm:2.1.0
 | 
			
		||||
 | 
			
		||||
# Define environments
 | 
			
		||||
ENV MAX_JOBS=4
 | 
			
		||||
ENV FLASH_ATTENTION_FORCE_BUILD=TRUE
 | 
			
		||||
ENV VLLM_WORKER_MULTIPROC_METHOD=spawn
 | 
			
		||||
 | 
			
		||||
# Define installation arguments
 | 
			
		||||
ARG INSTALL_BNB=false
 | 
			
		||||
ARG INSTALL_VLLM=false
 | 
			
		||||
ARG INSTALL_DEEPSPEED=false
 | 
			
		||||
ARG INSTALL_FLASHATTN=false
 | 
			
		||||
ARG PIP_INDEX=https://pypi.org/simple
 | 
			
		||||
 | 
			
		||||
# Set the working directory
 | 
			
		||||
WORKDIR /app
 | 
			
		||||
 | 
			
		||||
# Install the requirements
 | 
			
		||||
COPY requirements.txt /app
 | 
			
		||||
RUN pip config set global.index-url "$PIP_INDEX" && \
 | 
			
		||||
    pip config set global.extra-index-url "$PIP_INDEX" && \
 | 
			
		||||
    python -m pip install --upgrade pip && \
 | 
			
		||||
    python -m pip install -r requirements.txt
 | 
			
		||||
 | 
			
		||||
# Copy the rest of the application into the image
 | 
			
		||||
COPY . /app
 | 
			
		||||
 | 
			
		||||
# Install the LLaMA Factory
 | 
			
		||||
RUN EXTRA_PACKAGES="metrics"; \
 | 
			
		||||
    if [ "$INSTALL_BNB" == "true" ]; then \
 | 
			
		||||
        EXTRA_PACKAGES="${EXTRA_PACKAGES},bitsandbytes"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    if [ "$INSTALL_VLLM" == "true" ]; then \
 | 
			
		||||
        EXTRA_PACKAGES="${EXTRA_PACKAGES},vllm"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    if [ "$INSTALL_DEEPSPEED" == "true" ]; then \
 | 
			
		||||
        EXTRA_PACKAGES="${EXTRA_PACKAGES},deepspeed"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    pip install -e ".[$EXTRA_PACKAGES]"
 | 
			
		||||
 | 
			
		||||
# Rebuild flash attention
 | 
			
		||||
RUN pip uninstall -y transformer-engine flash-attn && \
 | 
			
		||||
    if [ "$INSTALL_FLASHATTN" == "true" ]; then \
 | 
			
		||||
        pip uninstall -y ninja && pip install ninja && \
 | 
			
		||||
        pip install --no-cache-dir flash-attn --no-build-isolation; \
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
# Set up volumes
 | 
			
		||||
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/app/data", "/app/output" ]
 | 
			
		||||
 | 
			
		||||
# Expose port 7860 for the LLaMA Board
 | 
			
		||||
ENV GRADIO_SERVER_PORT 7860
 | 
			
		||||
EXPOSE 7860
 | 
			
		||||
 | 
			
		||||
# Expose port 8000 for the API service
 | 
			
		||||
ENV API_PORT 8000
 | 
			
		||||
EXPOSE 8000
 | 
			
		||||
							
								
								
									
										29
									
								
								docker/docker-rocm/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								docker/docker-rocm/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,29 @@
 | 
			
		||||
services:
 | 
			
		||||
  llamafactory:
 | 
			
		||||
    build:
 | 
			
		||||
      dockerfile: ./docker/docker-rocm/Dockerfile
 | 
			
		||||
      context: ../..
 | 
			
		||||
      args:
 | 
			
		||||
        INSTALL_BNB: false
 | 
			
		||||
        INSTALL_VLLM: false
 | 
			
		||||
        INSTALL_DEEPSPEED: false
 | 
			
		||||
        INSTALL_FLASHATTN: false
 | 
			
		||||
        PIP_INDEX: https://pypi.org/simple
 | 
			
		||||
    container_name: llamafactory
 | 
			
		||||
    volumes:
 | 
			
		||||
      - ../../hf_cache:/root/.cache/huggingface
 | 
			
		||||
      - ../../ms_cache:/root/.cache/modelscope
 | 
			
		||||
      - ../../data:/app/data
 | 
			
		||||
      - ../../output:/app/output
 | 
			
		||||
      - ../../saves:/app/saves
 | 
			
		||||
    ports:
 | 
			
		||||
      - "7860:7860"
 | 
			
		||||
      - "8000:8000"
 | 
			
		||||
    ipc: host
 | 
			
		||||
    tty: true
 | 
			
		||||
    stdin_open: true
 | 
			
		||||
    command: bash
 | 
			
		||||
    devices:                                                                                      
 | 
			
		||||
      - /dev/kfd:/dev/kfd                                                                         
 | 
			
		||||
      - /dev/dri:/dev/dri
 | 
			
		||||
    restart: unless-stopped
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user