hf_text-generation-inference/Dockerfile

80 lines
2.6 KiB
Docker
Raw Normal View History

2023-03-03 07:07:27 -07:00
FROM lukemathwalker/cargo-chef:latest-rust-1.67 AS chef
WORKDIR /usr/src
FROM chef as planner
COPY Cargo.toml Cargo.toml
COPY rust-toolchain.toml rust-toolchain.toml
COPY proto proto
COPY router router
COPY launcher launcher
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
2023-02-13 05:02:45 -07:00
RUN PROTOC_ZIP=protoc-21.12-linux-x86_64.zip && \
curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v21.12/$PROTOC_ZIP && \
unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \
unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \
rm -f $PROTOC_ZIP
2022-10-14 07:56:21 -06:00
2023-03-03 07:07:27 -07:00
COPY --from=planner /usr/src/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
2022-10-14 07:56:21 -06:00
2023-03-03 07:07:27 -07:00
COPY Cargo.toml Cargo.toml
COPY rust-toolchain.toml rust-toolchain.toml
2022-10-14 07:56:21 -06:00
COPY proto proto
COPY router router
2022-10-18 07:19:03 -06:00
COPY launcher launcher
2023-03-03 07:07:27 -07:00
RUN cargo build --release
2022-10-18 07:19:03 -06:00
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
2022-10-14 07:56:21 -06:00
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
HUGGINGFACE_HUB_CACHE=/data \
2023-02-18 06:04:11 -07:00
HF_HUB_ENABLE_HF_TRANSFER=1 \
MODEL_ID=bigscience/bloom-560m \
2022-10-27 06:25:29 -06:00
QUANTIZE=false \
NUM_SHARD=1 \
PORT=80 \
2022-10-14 07:56:21 -06:00
CUDA_HOME=/usr/local/cuda \
LD_LIBRARY_PATH="/opt/miniconda/envs/text-generation/lib:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH" \
CONDA_DEFAULT_ENV=text-generation \
PATH=$PATH:/opt/miniconda/envs/text-generation/bin:/opt/miniconda/bin:/usr/local/cuda/bin
2023-03-24 07:02:14 -06:00
RUN apt-get update && apt-get install -y git curl libssl-dev && rm -rf /var/lib/apt/lists/*
2022-10-14 07:56:21 -06:00
RUN cd ~ && \
curl -L -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \
2022-10-14 07:56:21 -06:00
chmod +x Miniconda3-latest-Linux-x86_64.sh && \
bash ./Miniconda3-latest-Linux-x86_64.sh -bf -p /opt/miniconda && \
conda create -n text-generation python=3.9 -y
2022-10-18 07:19:03 -06:00
WORKDIR /usr/src
2023-03-24 07:02:14 -06:00
# Install torch
RUN pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
2022-10-18 07:19:03 -06:00
COPY server/Makefile server/Makefile
2023-03-24 07:02:14 -06:00
# Install specific version of flash attention
RUN cd server && make install-flash-attention
2022-10-14 07:56:21 -06:00
# Install specific version of transformers
RUN cd server && BUILD_EXTENSIONS="True" make install-transformers
2022-10-14 07:56:21 -06:00
# Install server
COPY proto proto
2022-10-14 07:56:21 -06:00
COPY server server
RUN cd server && \
make gen-server && \
/opt/miniconda/envs/text-generation/bin/pip install ".[bnb]" --no-cache-dir
2022-10-14 07:56:21 -06:00
# Install router
2023-03-03 07:07:27 -07:00
COPY --from=builder /usr/src/target/release/text-generation-router /usr/local/bin/text-generation-router
# Install launcher
2023-03-03 07:07:27 -07:00
COPY --from=builder /usr/src/target/release/text-generation-launcher /usr/local/bin/text-generation-launcher
2022-10-14 07:56:21 -06:00
ENTRYPOINT ["text-generation-launcher"]
CMD ["--json-output"]