mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-01 11:12:50 +08:00
support flash-attn in Dockerfile
Former-commit-id: c88b1be9f3dfaf5fe65448dea20fc697b4f257bd
This commit is contained in:
parent
f3f25ae3b7
commit
08a221443c
@ -35,6 +35,11 @@ RUN EXTRA_PACKAGES="metrics"; \
|
||||
pip install -e .[$EXTRA_PACKAGES] && \
|
||||
pip uninstall -y transformer-engine flash-attn
|
||||
|
||||
# Rebuild flash-attn
|
||||
RUN ninja --version || \
|
||||
(pip uninstall -y ninja && pip install ninja) && \
|
||||
MAX_JOBS=4 pip install --no-cache-dir flash-attn --no-build-isolation
|
||||
|
||||
# Set up volumes
|
||||
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/app/data", "/app/output" ]
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user