1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162 |
- FROM rocm/pytorch:rocm5.7_ubuntu22.04_py3.10_pytorch_2.0.1
- # Install some basic utilities
- RUN apt-get update && apt-get install python3 python3-pip -y
- # Install some basic utilities
- RUN apt-get update && apt-get install -y \
- curl \
- ca-certificates \
- sudo \
- git \
- bzip2 \
- libx11-6 \
- build-essential \
- wget \
- unzip \
- nvidia-cuda-toolkit \
- tmux \
- && rm -rf /var/lib/apt/lists/*
- ### Mount Point ###
- # When launching the container, mount the code directory to /app
- ARG APP_MOUNT=/app
- VOLUME [ ${APP_MOUNT} ]
- WORKDIR ${APP_MOUNT}
- RUN python3 -m pip install --upgrade pip
- RUN python3 -m pip install --no-cache-dir fastapi ninja tokenizers pandas
- ENV LLVM_SYMBOLIZER_PATH=/opt/rocm/llvm/bin/llvm-symbolizer
- ENV PATH=$PATH:/opt/rocm/bin:/libtorch/bin:
- ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rocm/lib/:/libtorch/lib:
- ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/libtorch/include:/libtorch/include/torch/csrc/api/include/:/opt/rocm/include/:
- # Install ROCm flash-attention
- RUN mkdir libs \
- && cd libs \
- && git clone https://github.com/ROCmSoftwarePlatform/flash-attention.git \
- && cd flash-attention \
- && git checkout 3d2b6f5 \
- && git submodule update --init \
- && export GPU_ARCHS=$(/opt/rocm/llvm/bin/amdgpu-offload-arch) \
- && patch /opt/conda/envs/py_3.10/lib/python3.10/site-packages/torch/utils/hipify/hipify_python.py hipify_patch.patch \
- && python3 setup.py install \
- && cd ..
- COPY ./ /app/aphrodite-engine
- RUN python3 -m pip install --upgrade pip
- RUN pip install xformers==0.0.22.post7 --no-deps
- RUN cd /app \
- && cd aphrodite-engine \
- && pip install -U -r requirements-rocm.txt \
- && bash patch_xformers-0.0.22.post7.rocm.sh \
- && python3 setup.py install \
- && cd ..
- RUN python3 -m pip install --upgrade pip
- RUN python3 -m pip install --no-cache-dir ray[all]
- CMD ["/bin/bash"]
|