nix: experimental support for building a Docker container (#2470)

* nix: experimental support for building a Docker image

Run using something like:

```
docker run \
  --device nvidia.com/gpu=all \
  -it --rm -p 8080:80 \
  -v $PWD/data:/data \
  -v $PWD/tmp:/tmp \
  tgi-docker:latest \
  --model-id <model_id>
```

* Example of building the Docker image using Nix inside Docker

* Stream to make the builder image smaller

This avoids storing a Docker image tarball in the image. Instead,
stream the layers while doing `docker run`.

* Don't spam journalctl on Linux

* Other dockerfile.

---------

Co-authored-by: Nicolas Patry <patry.nicolas@protonmail.com>
This commit is contained in:
Daniël de Kok 2024-10-01 18:02:06 +02:00 committed by GitHub
parent 1c84a30fe6
commit 584b4d7a68
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 68 additions and 9 deletions

View File

@ -4,3 +4,4 @@ server/transformers
server/flash-attention server/flash-attention
cmake-build-debug/ cmake-build-debug/
cmake-build-release/ cmake-build-release/
Dockerfile*

24
Dockerfile.nix Normal file
View File

@ -0,0 +1,24 @@
# Build the image and get out the docker file:
#
# docker build -t tgi-nix-builder -f Dockerfile.nix
# docker run --log-driver=none tgi-nix-builder | docker load
FROM nixos/nix:2.18.8 AS builder
RUN echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
RUN nix profile install nixpkgs#cachix
RUN cachix use text-generation-inference
WORKDIR /root
ADD . .
RUN nix build .
RUN mkdir /tmp/nix-store-closure
RUN cp -R $(nix-store -qR result/) /tmp/nix-store-closure
FROM ubuntu:24.04
WORKDIR /app
# Copy /nix/store
COPY --from=builder /tmp/nix-store-closure /nix/store
COPY --from=builder /root/result /app
RUN ldconfig
CMD ["ldconfig", "/app/bin/text-generation-launcher"]

View File

@ -141,15 +141,26 @@
}; };
}; };
packages.default = pkgs.writeShellApplication { packages = rec {
name = "text-generation-inference"; default = pkgs.writeShellApplication {
runtimeInputs = [ name = "text-generation-inference";
server runtimeInputs = [
router server
]; router
text = '' ];
${launcher}/bin/text-generation-launcher "$@" text = ''
''; ${launcher}/bin/text-generation-launcher "$@"
'';
};
dockerImage = pkgs.callPackage nix/docker.nix {
text-generation-inference = default;
};
dockerImageStreamed = pkgs.callPackage nix/docker.nix {
text-generation-inference = default;
stream = true;
};
}; };
} }
); );

23
nix/docker.nix Normal file
View File

@ -0,0 +1,23 @@
{
dockerTools,
cacert,
text-generation-inference,
stream ? false,
}:
let
build = if stream then dockerTools.streamLayeredImage else dockerTools.buildLayeredImage;
in
build {
name = "tgi-docker";
tag = "latest";
config = {
EntryPoint = [ "${text-generation-inference}/bin/text-generation-inference" ];
Env = [
"HF_HOME=/data"
"PORT=80"
];
};
contents = [ cacert ];
}