mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	
		
			
				
	
	
		
			26 lines
		
	
	
		
			565 B
		
	
	
	
		
			YAML
		
	
	
	
	
	
			
		
		
	
	
			26 lines
		
	
	
		
			565 B
		
	
	
	
		
			YAML
		
	
	
	
	
	
services:
 | 
						|
  llamafactory:
 | 
						|
    build:
 | 
						|
      dockerfile: ./docker/docker-cuda/Dockerfile
 | 
						|
      context: ../..
 | 
						|
      args:
 | 
						|
        PIP_INDEX: https://pypi.org/simple
 | 
						|
        EXTRAS: metrics
 | 
						|
    container_name: llamafactory
 | 
						|
    ports:
 | 
						|
      - "7860:7860"
 | 
						|
      - "8000:8000"
 | 
						|
    ipc: host
 | 
						|
    tty: true
 | 
						|
    # shm_size: "16gb"  # ipc: host is set
 | 
						|
    stdin_open: true
 | 
						|
    command: bash
 | 
						|
    deploy:
 | 
						|
      resources:
 | 
						|
        reservations:
 | 
						|
          devices:
 | 
						|
          - driver: nvidia
 | 
						|
            count: "all"
 | 
						|
            capabilities: [ gpu ]
 | 
						|
    restart: unless-stopped
 |