hf_text-generation-inference/.github/workflows/build.yaml

260 lines
10 KiB
YAML
Raw Normal View History

2023-01-31 12:14:05 -07:00
name: Build and push docker image to internal registry
on:
workflow_call:
inputs:
hardware:
type: string
description: Hardware
# options:
# - cuda
# - rocm
2024-06-28 10:08:27 -06:00
# - xpu
required: true
release-tests:
description: "Run release integration tests"
required: true
default: false
type: boolean
2023-01-31 12:14:05 -07:00
jobs:
build-and-push:
outputs:
docker_image: ${{ steps.final.outputs.docker_image }}
2024-07-08 05:10:09 -06:00
base_docker_image: ${{ steps.final.outputs.base_docker_image }}
docker_devices: ${{ steps.final.outputs.docker_devices }}
2024-06-17 04:01:17 -06:00
docker_volume: ${{ steps.final.outputs.docker_volume}}
runs_on: ${{ steps.final.outputs.runs_on }}
label: ${{ steps.final.outputs.label }}
2023-05-15 15:36:30 -06:00
concurrency:
group: ${{ github.workflow }}-build-and-push-image-${{ inputs.hardware }}-${{ github.head_ref || github.run_id }}
2023-05-15 15:36:30 -06:00
cancel-in-progress: true
# TODO see with @Glegendre to get CPU runner here instead
runs-on: [self-hosted, intel-cpu, 32-cpu, 256-ram, ci]
permissions:
2023-04-13 08:23:47 -06:00
contents: write
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write
2023-04-13 08:32:37 -06:00
security-events: write
2024-06-11 05:25:14 -06:00
2023-01-31 12:14:05 -07:00
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4.4.1
2024-06-11 05:25:14 -06:00
- name: Construct harware variables
shell: bash
run: |
case ${{ inputs.hardware }} in
cuda)
export dockerfile="Dockerfile"
export label_extension=""
export docker_devices=""
export runs_on="nvidia-gpu"
;;
rocm)
export dockerfile="Dockerfile_amd"
export label_extension="-rocm"
export docker_devices="/dev/kfd,/dev/dri"
2024-06-11 05:25:14 -06:00
export runs_on="amd-gpu-tgi"
;;
2024-06-11 05:25:14 -06:00
xpu)
export dockerfile="Dockerfile_intel"
export label_extension="-intel"
export docker_devices=""
export runs_on="ubuntu-latest"
;;
esac
echo $dockerfile
echo "Dockerfile=${dockerfile}"
echo $label_extension
echo $docker_devices
echo $runs_on
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
echo "LABEL=${label_extension}" >> $GITHUB_ENV
echo "DOCKER_DEVICES=${docker_devices}" >> $GITHUB_ENV
echo "RUNS_ON=${runs_on}" >> $GITHUB_ENV
2024-06-11 05:25:14 -06:00
- name: Initialize Docker Buildx
uses: docker/setup-buildx-action@v3
with:
install: true
2024-07-01 07:42:26 -06:00
config-inline: |
[registry."docker.io"]
mirrors = ["registry.github-runners.huggingface.tech"]
2024-06-21 03:55:58 -06:00
2023-01-31 12:14:05 -07:00
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
2023-01-31 12:14:05 -07:00
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
2024-06-11 05:25:14 -06:00
2023-02-06 06:33:56 -07:00
- name: Login to Azure Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
2023-02-06 06:33:56 -07:00
with:
username: ${{ secrets.AZURE_DOCKER_USERNAME }}
password: ${{ secrets.AZURE_DOCKER_PASSWORD }}
registry: db4c2190dd824d1f950f5d1555fbadf0.azurecr.io
2024-06-11 05:25:14 -06:00
2023-05-15 15:36:30 -06:00
# If pull request
2023-01-31 12:14:05 -07:00
- name: Extract metadata (tags, labels) for Docker
2023-05-15 15:36:30 -06:00
if: ${{ github.event_name == 'pull_request' }}
id: meta-pr
uses: docker/metadata-action@v5
2023-05-15 15:36:30 -06:00
with:
images: |
registry-push.github-runners.huggingface.tech/api-inference/community/text-generation-inference
2023-05-15 15:36:30 -06:00
tags: |
type=raw,value=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
2023-05-15 15:36:30 -06:00
# If main, release or tag
- name: Extract metadata (tags, labels) for Docker
if: ${{ github.event_name != 'pull_request' }}
2023-01-31 12:14:05 -07:00
id: meta
uses: docker/metadata-action@v4.3.0
with:
flavor: |
latest=auto
images: |
registry-push.github-runners.huggingface.tech/api-inference/community/text-generation-inference
ghcr.io/huggingface/text-generation-inference
2023-02-06 06:33:56 -07:00
db4c2190dd824d1f950f5d1555fbadf0.azurecr.io/text-generation-inference
2023-01-31 12:14:05 -07:00
tags: |
type=semver,pattern={{version}}${{ env.LABEL }}
type=semver,pattern={{major}}.{{minor}}${{ env.LABEL }}
type=raw,value=latest${{ env.LABEL }},enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }}
type=raw,value=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
2023-01-31 12:14:05 -07:00
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v4
2023-01-31 12:14:05 -07:00
with:
context: .
file: ${{ env.DOCKERFILE }}
2023-05-15 15:36:30 -06:00
push: true
2023-01-31 12:14:05 -07:00
platforms: 'linux/amd64'
build-args: |
GIT_SHA=${{ env.GITHUB_SHA }}
DOCKER_LABEL=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
2023-05-15 15:36:30 -06:00
tags: ${{ steps.meta.outputs.tags || steps.meta-pr.outputs.tags }}
labels: ${{ steps.meta.outputs.labels || steps.meta-pr.outputs.labels }}
cache-from: type=s3,region=us-east-1,bucket=ci-docker-buildx-cache,name=text-generation-inference-cache${{ env.LABEL }},mode=min,access_key_id=${{ secrets.S3_CI_DOCKER_BUILDX_CACHE_ACCESS_KEY_ID }},secret_access_key=${{ secrets.S3_CI_DOCKER_BUILDX_CACHE_SECRET_ACCESS_KEY }},mode=min
cache-to: type=s3,region=us-east-1,bucket=ci-docker-buildx-cache,name=text-generation-inference-cache${{ env.LABEL }},mode=min,access_key_id=${{ secrets.S3_CI_DOCKER_BUILDX_CACHE_ACCESS_KEY_ID }},secret_access_key=${{ secrets.S3_CI_DOCKER_BUILDX_CACHE_SECRET_ACCESS_KEY }},mode=min
- name: Final
id: final
run: |
echo "docker_image=registry-push.github-runners.huggingface.tech/api-inference/community/text-generation-inference:sha-${{ env.GITHUB_SHA_SHORT}}${{ env.LABEL }}" >> "$GITHUB_OUTPUT"
echo "docker_devices=${{ env.DOCKER_DEVICES }}" >> "$GITHUB_OUTPUT"
echo "runs_on=${{ env.RUNS_ON }}" >> "$GITHUB_OUTPUT"
echo "label=${{ env.LABEL }}" >> "$GITHUB_OUTPUT"
2024-06-11 05:25:14 -06:00
2024-07-08 05:10:09 -06:00
if [[ ${{ inputs.hardware }} == "rocm" ]]
then
echo "base_docker_image=rocm/dev-ubuntu-22.04:6.1.1_hip_update" >> "$GITHUB_OUTPUT"
elif [[ ${{ inputs.hardware }} == "cuda" ]]
then
echo "base_docker_image=nvidia/cuda:12.1.0-base-ubuntu22.04" >> "$GITHUB_OUTPUT"
elif [[ ${{ inputs.hardware }} == "xpu" ]]
then
echo "base_docker_image=intel/intel-extension-for-pytorch:2.1.30-xpu" >> "$GITHUB_OUTPUT"
else
exit 1
fi
2024-06-17 04:01:17 -06:00
if [[ ${{ inputs.hardware }} == "rocm" ]]
then
2024-06-20 03:03:00 -06:00
echo "docker_volume=/data/cache/.cache/huggingface/hub" >> "$GITHUB_OUTPUT"
2024-06-17 04:01:17 -06:00
else
echo "docker_volume=/mnt/cache" >> "$GITHUB_OUTPUT"
fi
2024-06-26 04:43:57 -06:00
prepare_integration_tests:
runs-on: ["self-hosted", "${{ needs.build-and-push.outputs.runs_on }}", "multi-gpu"]
needs: [build-and-push]
2024-06-26 04:43:57 -06:00
concurrency:
group: ${{ github.workflow }}-${{ github.job }}-${{ needs.build-and-push.outputs.label }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
if: needs.build-and-push.outputs.runs_on != 'ubuntu-latest'
# Ideally, we would use the image from registry.internal.huggingface.tech but we can not login to the private registry outside of tailscale,
# and even adding a previous job with tailscale login still results in `Docker login for 'registry.internal.huggingface.tech' failed with exit code 1`.
2024-06-17 04:01:17 -06:00
container:
2024-07-08 05:10:09 -06:00
image: ${{ needs.build-and-push.outputs.base_docker_image }}
2024-06-17 04:01:17 -06:00
options: --shm-size "16gb" --ipc host -v ${{ needs.build-and-push.outputs.docker_volume }}:/data
steps:
2024-06-20 03:28:10 -06:00
- name: Checkout repository
uses: actions/checkout@v4
2024-06-17 04:01:17 -06:00
- name: Clean Hugging Face cache
2024-06-20 03:28:10 -06:00
shell: bash
2024-06-17 04:01:17 -06:00
run: |
if [[ ${{ inputs.hardware }} == "rocm" ]]
then
2024-06-20 03:28:10 -06:00
echo "pwd:"
pwd
echo "ls:"
ls
2024-07-08 06:28:50 -06:00
pip3 install -U huggingface_hub
python3 integration-tests/clean_cache_and_download.py --token ${{ secrets.HF_TOKEN }} --cache-dir /data
2024-06-27 08:51:11 -06:00
# Avoid permissions issues in the next step not run within docker (File was unable to be removed Error: EACCES).
if [[ $PWD == *"text-generation-inference"* ]]; then
2024-06-28 03:49:20 -06:00
rm -rf -- ..?* .[!.]* *
2024-06-27 08:51:11 -06:00
fi
2024-06-17 04:01:17 -06:00
fi
integration_tests:
concurrency:
group: ${{ github.workflow }}-${{ github.job }}-${{ needs.build-and-push.outputs.label }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
2024-06-21 10:28:04 -06:00
needs: [build-and-push, prepare_integration_tests]
runs-on: ["self-hosted", "${{ needs.build-and-push.outputs.runs_on }}", "multi-gpu"]
if: needs.build-and-push.outputs.runs_on != 'ubuntu-latest'
env:
PYTEST_FLAGS: ${{ (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || inputs.release-tests == true) && '--release' || '' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
2024-06-11 05:25:14 -06:00
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4.4.1
2024-06-11 05:25:14 -06:00
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
2024-06-11 05:25:14 -06:00
- name: Install
run: |
make install-integration-tests
2024-06-11 05:25:14 -06:00
- name: Run tests
run: |
export DOCKER_DEVICES=${{ needs.build-and-push.outputs.docker_devices }}
2024-06-28 07:10:43 -06:00
export HF_TOKEN=${{ secrets.HF_TOKEN }}
2024-06-11 05:25:14 -06:00
export DOCKER_IMAGE=${{ needs.build-and-push.outputs.docker_image }}
echo "DOCKER_IMAGE:"
echo $DOCKER_IMAGE
2024-06-11 05:25:14 -06:00
export SYSTEM=${{ inputs.hardware }}
echo "SYSTEM:"
echo $SYSTEM
export DOCKER_VOLUME=${{ needs.build-and-push.outputs.docker_volume }}
echo "DOCKER_VOLUME:"
echo $DOCKER_VOLUME
# TunableOp warmup is rather slow, do it only for a few seqlens.
if [[ ${{ inputs.hardware }} == "rocm" ]]
then
PYTORCH_TUNABLEOP_SEQLENS=2,4
fi
2024-06-26 04:08:42 -06:00
pytest -s -vvvvv integration-tests ${PYTEST_FLAGS}