mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	support flash-attn in Dockerfile
Former-commit-id: 0dba000aa178f915cea7d75bf0c9d47e671a21d2
This commit is contained in:
		
							parent
							
								
									98f382fda3
								
							
						
					
					
						commit
						daea86e047
					
				@ -35,6 +35,11 @@ RUN EXTRA_PACKAGES="metrics"; \
 | 
			
		||||
    pip install -e .[$EXTRA_PACKAGES] && \
 | 
			
		||||
    pip uninstall -y transformer-engine flash-attn
 | 
			
		||||
 | 
			
		||||
# Rebuild flash-attn
 | 
			
		||||
RUN ninja --version || \
 | 
			
		||||
    (pip uninstall -y ninja && pip install ninja) && \
 | 
			
		||||
    MAX_JOBS=4 pip install --no-cache-dir flash-attn --no-build-isolation
 | 
			
		||||
 | 
			
		||||
# Set up volumes
 | 
			
		||||
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/app/data", "/app/output" ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user