mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	
							parent
							
								
									d3bfcbd3af
								
							
						
					
					
						commit
						875f9078d1
					
				@ -23,13 +23,6 @@ RUN pip config set global.index-url "$PIP_INDEX" && \
 | 
			
		||||
    python -m pip install --upgrade pip && \
 | 
			
		||||
    python -m pip install -r requirements.txt
 | 
			
		||||
 | 
			
		||||
# Rebuild flash attention
 | 
			
		||||
RUN pip uninstall -y transformer-engine flash-attn && \
 | 
			
		||||
    if [ "$INSTALL_FLASHATTN" == "true" ]; then \
 | 
			
		||||
        pip uninstall -y ninja && pip install ninja && \
 | 
			
		||||
        pip install --no-cache-dir flash-attn --no-build-isolation; \
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
# Copy the rest of the application into the image
 | 
			
		||||
COPY . /app
 | 
			
		||||
 | 
			
		||||
@ -46,6 +39,13 @@ RUN EXTRA_PACKAGES="metrics"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    pip install -e ".[$EXTRA_PACKAGES]"
 | 
			
		||||
 | 
			
		||||
# Rebuild flash attention
 | 
			
		||||
RUN pip uninstall -y transformer-engine flash-attn && \
 | 
			
		||||
    if [ "$INSTALL_FLASHATTN" == "true" ]; then \
 | 
			
		||||
        pip uninstall -y ninja && pip install ninja && \
 | 
			
		||||
        pip install --no-cache-dir flash-attn --no-build-isolation; \
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
# Set up volumes
 | 
			
		||||
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/app/data", "/app/output" ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user