preemo_text-generation-infe.../Dockerfile

74 lines
2.2 KiB
Docker
Raw Normal View History

2022-11-14 05:59:56 -07:00
FROM rust:1.65 as router-builder
2022-10-14 07:56:21 -06:00
WORKDIR /usr/src
COPY rust-toolchain.toml rust-toolchain.toml
2022-10-14 07:56:21 -06:00
COPY proto proto
COPY router router
WORKDIR /usr/src/router
RUN cargo install --path .
2022-11-14 05:59:56 -07:00
FROM rust:1.65 as launcher-builder
2022-10-18 07:19:03 -06:00
WORKDIR /usr/src
COPY rust-toolchain.toml rust-toolchain.toml
2022-10-18 07:19:03 -06:00
COPY launcher launcher
WORKDIR /usr/src/launcher
RUN cargo install --path .
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
2022-10-14 07:56:21 -06:00
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
MODEL_BASE_PATH=/data \
MODEL_ID=bigscience/bloom-560m \
2022-10-27 06:25:29 -06:00
QUANTIZE=false \
NUM_GPUS=1 \
2022-11-07 04:53:56 -07:00
SAFETENSORS_FAST_GPU=1 \
PORT=80 \
2022-10-14 07:56:21 -06:00
CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 \
2022-10-17 06:59:00 -06:00
NCCL_ASYNC_ERROR_HANDLING=1 \
2022-10-14 07:56:21 -06:00
CUDA_HOME=/usr/local/cuda \
LD_LIBRARY_PATH="/opt/miniconda/envs/text-generation/lib:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH" \
CONDA_DEFAULT_ENV=text-generation \
PATH=$PATH:/opt/miniconda/envs/text-generation/bin:/opt/miniconda/bin:/usr/local/cuda/bin
SHELL ["/bin/bash", "-c"]
RUN apt-get update && apt-get install -y unzip curl libssl-dev && rm -rf /var/lib/apt/lists/*
2022-10-14 07:56:21 -06:00
RUN cd ~ && \
curl -L -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \
2022-10-14 07:56:21 -06:00
chmod +x Miniconda3-latest-Linux-x86_64.sh && \
bash ./Miniconda3-latest-Linux-x86_64.sh -bf -p /opt/miniconda && \
conda create -n text-generation python=3.9 -y
2022-10-18 07:19:03 -06:00
WORKDIR /usr/src
COPY server/Makefile server/Makefile
2022-10-14 07:56:21 -06:00
# Install specific version of torch
2022-10-18 07:19:03 -06:00
RUN cd server && make install-torch
2022-10-14 07:56:21 -06:00
# Install specific version of transformers
RUN cd server && BUILD_EXTENSIONS="True" make install-transformers
2022-10-14 07:56:21 -06:00
# Install server
COPY proto proto
2022-10-14 07:56:21 -06:00
COPY server server
RUN cd server && \
make gen-server && \
/opt/miniconda/envs/text-generation/bin/pip install ".[bnb]" --no-cache-dir
2022-10-14 07:56:21 -06:00
# Install router
2022-10-18 07:19:03 -06:00
COPY --from=router-builder /usr/local/cargo/bin/text-generation-router /usr/local/bin/text-generation-router
# Install launcher
2022-10-18 07:19:03 -06:00
COPY --from=launcher-builder /usr/local/cargo/bin/text-generation-launcher /usr/local/bin/text-generation-launcher
2022-10-14 07:56:21 -06:00
CMD HUGGINGFACE_HUB_CACHE=$MODEL_BASE_PATH text-generation-launcher --num-shard $NUM_GPUS --model-name $MODEL_ID --json-output