From daea86e0470b8b4c0cf31265798e670bac2f6a49 Mon Sep 17 00:00:00 2001 From: hzhaoy Date: Tue, 25 Jun 2024 15:13:07 +0800 Subject: [PATCH] support flash-attn in Dockerfile Former-commit-id: 0dba000aa178f915cea7d75bf0c9d47e671a21d2 --- docker/docker-cuda/Dockerfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docker/docker-cuda/Dockerfile b/docker/docker-cuda/Dockerfile index 827b7b3c..06a172f0 100644 --- a/docker/docker-cuda/Dockerfile +++ b/docker/docker-cuda/Dockerfile @@ -35,6 +35,11 @@ RUN EXTRA_PACKAGES="metrics"; \ pip install -e .[$EXTRA_PACKAGES] && \ pip uninstall -y transformer-engine flash-attn +# Rebuild flash-attn +RUN ninja --version || \ + (pip uninstall -y ninja && pip install ninja) && \ + MAX_JOBS=4 pip install --no-cache-dir flash-attn --no-build-isolation + # Set up volumes VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/app/data", "/app/output" ]