[assets] fix docker image (#8180)

This commit is contained in:
hoshi-hiyouga 2025-05-27 19:01:31 +08:00 committed by GitHub
parent 2bf8e993ab
commit 4d3ffa2ec4
4 changed files with 26 additions and 21 deletions

View File

@ -1,13 +1,13 @@
# Installation arguments # https://hub.docker.com/r/hiyouga/pytorch/tags
ARG BASE_IMAGE=hiyouga/pytorch:th2.6.0-cu124-flashattn2.7.4-cxx11abi0 ARG BASE_IMAGE=hiyouga/pytorch:th2.6.0-cu124-flashattn2.7.4-cxx11abi0
FROM ${BASE_IMAGE}
# Installation arguments
ARG PIP_INDEX=https://pypi.org/simple ARG PIP_INDEX=https://pypi.org/simple
ARG EXTRAS=metrics ARG EXTRAS=metrics
ARG INSTALL_FLASHATTN=false ARG INSTALL_FLASHATTN=false
ARG HTTP_PROXY="" ARG HTTP_PROXY=""
# https://hub.docker.com/r/hiyouga/pytorch/tags
FROM "${BASE_IMAGE}"
# Define environments # Define environments
ENV MAX_JOBS=16 ENV MAX_JOBS=16
ENV FLASH_ATTENTION_FORCE_BUILD=TRUE ENV FLASH_ATTENTION_FORCE_BUILD=TRUE
@ -37,7 +37,7 @@ COPY . /app
RUN pip install --no-cache-dir -e ".[${EXTRAS}]" --no-build-isolation RUN pip install --no-cache-dir -e ".[${EXTRAS}]" --no-build-isolation
# Rebuild flash attention # Rebuild flash attention
RUN if [ "$INSTALL_FLASHATTN" == "true" ]; then \ RUN if [ "${INSTALL_FLASHATTN}" == "true" ]; then \
pip uninstall -y ninja && \ pip uninstall -y ninja && \
pip install --no-cache-dir ninja && \ pip install --no-cache-dir ninja && \
pip install --no-cache-dir flash-attn --no-build-isolation; \ pip install --no-cache-dir flash-attn --no-build-isolation; \
@ -47,11 +47,11 @@ RUN if [ "$INSTALL_FLASHATTN" == "true" ]; then \
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ] VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ]
# Expose port 7860 for LLaMA Board # Expose port 7860 for LLaMA Board
ENV GRADIO_SERVER_PORT 7860 ENV GRADIO_SERVER_PORT=7860
EXPOSE 7860 EXPOSE 7860
# Expose port 8000 for API service # Expose port 8000 for API service
ENV API_PORT 8000 ENV API_PORT=8000
EXPOSE 8000 EXPOSE 8000
# unset proxy # unset proxy

View File

@ -1,13 +1,12 @@
# Installation arguments # https://hub.docker.com/r/ascendai/cann/tags
ARG BASE_IMAGE=ascendai/cann:8.0.0-910b-ubuntu22.04-py3.11 ARG BASE_IMAGE=ascendai/cann:8.0.0-910b-ubuntu22.04-py3.11
FROM ${BASE_IMAGE}
# Installation arguments
ARG PIP_INDEX=https://pypi.org/simple ARG PIP_INDEX=https://pypi.org/simple
ARG EXTRAS=metrics ARG EXTRAS=metrics
ARG INSTALL_FLASHATTN=false
ARG HTTP_PROXY="" ARG HTTP_PROXY=""
# https://hub.docker.com/r/ascendai/cann/tags
FROM "${BASE_IMAGE}"
# Define environments # Define environments
ENV MAX_JOBS=16 ENV MAX_JOBS=16
ENV FLASH_ATTENTION_FORCE_BUILD=TRUE ENV FLASH_ATTENTION_FORCE_BUILD=TRUE
@ -40,11 +39,11 @@ RUN pip install --no-cache-dir -e ".[${EXTRAS}]" --no-build-isolation
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ] VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ]
# Expose port 7860 for LLaMA Board # Expose port 7860 for LLaMA Board
ENV GRADIO_SERVER_PORT 7860 ENV GRADIO_SERVER_PORT=7860
EXPOSE 7860 EXPOSE 7860
# Expose port 8000 for API service # Expose port 8000 for API service
ENV API_PORT 8000 ENV API_PORT=8000
EXPOSE 8000 EXPOSE 8000
# unset proxy # unset proxy

View File

@ -1,14 +1,14 @@
# Installation arguments # https://hub.docker.com/r/rocm/pytorch/tags
ARG BASE_IMAGE=rocm/pytorch:rocm6.4.1_ubuntu22.04_py3.10_pytorch_release_2.6.0 ARG BASE_IMAGE=rocm/pytorch:rocm6.4.1_ubuntu22.04_py3.10_pytorch_release_2.6.0
FROM ${BASE_IMAGE}
# Installation arguments
ARG PIP_INDEX=https://pypi.org/simple ARG PIP_INDEX=https://pypi.org/simple
ARG EXTRAS=metrics ARG EXTRAS=metrics
ARG INSTALL_FLASHATTN=false ARG INSTALL_FLASHATTN=false
ARG HTTP_PROXY="" ARG HTTP_PROXY=""
ARG PYTORCH_INDEX=https://download.pytorch.org/whl/rocm6.3 ARG PYTORCH_INDEX=https://download.pytorch.org/whl/rocm6.3
# https://hub.docker.com/r/rocm/pytorch/tags
FROM "${BASE_IMAGE}"
# Define environments # Define environments
ENV MAX_JOBS=16 ENV MAX_JOBS=16
ENV FLASH_ATTENTION_FORCE_BUILD=TRUE ENV FLASH_ATTENTION_FORCE_BUILD=TRUE
@ -45,7 +45,7 @@ COPY . /app
RUN pip install --no-cache-dir -e ".[${EXTRAS}]" --no-build-isolation RUN pip install --no-cache-dir -e ".[${EXTRAS}]" --no-build-isolation
# Rebuild flash attention # Rebuild flash attention
RUN if [ "$INSTALL_FLASHATTN" == "true" ]; then \ RUN if [ "${INSTALL_FLASHATTN}" == "true" ]; then \
pip uninstall -y ninja && \ pip uninstall -y ninja && \
pip install --no-cache-dir ninja && \ pip install --no-cache-dir ninja && \
pip install --no-cache-dir flash-attn --no-build-isolation; \ pip install --no-cache-dir flash-attn --no-build-isolation; \
@ -55,11 +55,11 @@ RUN if [ "$INSTALL_FLASHATTN" == "true" ]; then \
VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ] VOLUME [ "/root/.cache/huggingface", "/root/.cache/modelscope", "/root/.cache/openmind", "/app/data", "/app/output" ]
# Expose port 7860 for LLaMA Board # Expose port 7860 for LLaMA Board
ENV GRADIO_SERVER_PORT 7860 ENV GRADIO_SERVER_PORT=7860
EXPOSE 7860 EXPOSE 7860
# Expose port 8000 for API service # Expose port 8000 for API service
ENV API_PORT 8000 ENV API_PORT=8000
EXPOSE 8000 EXPOSE 8000
# unset proxy # unset proxy

View File

@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import platform import platform
import accelerate import accelerate
@ -83,4 +84,9 @@ def print_env() -> None:
except Exception: except Exception:
pass pass
if os.path.exists("data"):
info["Default data directory"] = "detected"
else:
info["Default data directory"] = "not detected"
print("\n" + "\n".join([f"- {key}: {value}" for key, value in info.items()]) + "\n") print("\n" + "\n".join([f"- {key}: {value}" for key, value in info.items()]) + "\n")