247 lines
9.1 KiB
YAML
247 lines
9.1 KiB
YAML
name: Build and push docker image to internal registry
|
|
|
|
on:
|
|
workflow_call:
|
|
inputs:
|
|
hardware:
|
|
type: string
|
|
description: Hardware
|
|
# options:
|
|
# - cuda
|
|
# - rocm
|
|
# - intel
|
|
required: true
|
|
|
|
jobs:
|
|
build-and-push:
|
|
outputs:
|
|
docker_image: ${{ steps.final.outputs.docker_image }}
|
|
docker_devices: ${{ steps.final.outputs.docker_devices }}
|
|
docker_volume: ${{ steps.final.outputs.docker_volume}}
|
|
runs_on: ${{ steps.final.outputs.runs_on }}
|
|
label: ${{ steps.final.outputs.label }}
|
|
concurrency:
|
|
group: ${{ github.workflow }}-build-and-push-image-${{ inputs.hardware }}-${{ github.head_ref || github.run_id }}
|
|
cancel-in-progress: true
|
|
# TODO see with @Glegendre to get CPU runner here instead
|
|
runs-on: [self-hosted, nvidia-gpu , multi-gpu, 4-a10, ci]
|
|
permissions:
|
|
contents: write
|
|
packages: write
|
|
# This is used to complete the identity challenge
|
|
# with sigstore/fulcio when running outside of PRs.
|
|
id-token: write
|
|
security-events: write
|
|
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v4
|
|
- name: Inject slug/short variables
|
|
uses: rlespinasse/github-slug-action@v4.4.1
|
|
|
|
- name: Construct harware variables
|
|
shell: bash
|
|
run: |
|
|
case ${{ inputs.hardware }} in
|
|
cuda)
|
|
export dockerfile="Dockerfile"
|
|
export label_extension=""
|
|
export docker_devices=""
|
|
export runs_on="nvidia-gpu"
|
|
;;
|
|
rocm)
|
|
export dockerfile="Dockerfile_amd"
|
|
export label_extension="-rocm"
|
|
export docker_devices="/dev/kfd,/dev/dri"
|
|
export runs_on="amd-gpu-tgi"
|
|
;;
|
|
xpu)
|
|
export dockerfile="Dockerfile_intel"
|
|
export label_extension="-intel"
|
|
export docker_devices=""
|
|
export runs_on="ubuntu-latest"
|
|
;;
|
|
esac
|
|
echo $dockerfile
|
|
echo "Dockerfile=${dockerfile}"
|
|
echo $label_extension
|
|
echo $docker_devices
|
|
echo $runs_on
|
|
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
|
echo "LABEL=${label_extension}" >> $GITHUB_ENV
|
|
echo "DOCKER_DEVICES=${docker_devices}" >> $GITHUB_ENV
|
|
echo "RUNS_ON=${runs_on}" >> $GITHUB_ENV
|
|
|
|
- name: Tailscale
|
|
uses: huggingface/tailscale-action@main
|
|
with:
|
|
authkey: ${{ secrets.TAILSCALE_AUTHKEY }}
|
|
slackChannel: ${{ secrets.SLACK_CIFEEDBACK_CHANNEL }}
|
|
slackToken: ${{ secrets.SLACK_CIFEEDBACK_BOT_TOKEN }}
|
|
|
|
- name: Initialize Docker Buildx
|
|
uses: docker/setup-buildx-action@v3
|
|
with:
|
|
install: true
|
|
|
|
- name: Login to GitHub Container Registry
|
|
if: github.event_name != 'pull_request'
|
|
uses: docker/login-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
username: ${{ github.actor }}
|
|
password: ${{ secrets.GITHUB_TOKEN }}
|
|
|
|
- name: Login to internal Container Registry
|
|
uses: docker/login-action@v3
|
|
with:
|
|
username: ${{ secrets.TAILSCALE_DOCKER_USERNAME }}
|
|
password: ${{ secrets.TAILSCALE_DOCKER_PASSWORD }}
|
|
registry: registry.internal.huggingface.tech
|
|
|
|
- name: Login to Azure Container Registry
|
|
if: github.event_name != 'pull_request'
|
|
uses: docker/login-action@v3
|
|
with:
|
|
username: ${{ secrets.AZURE_DOCKER_USERNAME }}
|
|
password: ${{ secrets.AZURE_DOCKER_PASSWORD }}
|
|
registry: db4c2190dd824d1f950f5d1555fbadf0.azurecr.io
|
|
|
|
# If pull request
|
|
- name: Extract metadata (tags, labels) for Docker
|
|
if: ${{ github.event_name == 'pull_request' }}
|
|
id: meta-pr
|
|
uses: docker/metadata-action@v5
|
|
with:
|
|
images: |
|
|
registry.internal.huggingface.tech/api-inference/community/text-generation-inference
|
|
tags: |
|
|
type=raw,value=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
|
|
|
|
# If main, release or tag
|
|
- name: Extract metadata (tags, labels) for Docker
|
|
if: ${{ github.event_name != 'pull_request' }}
|
|
id: meta
|
|
uses: docker/metadata-action@v4.3.0
|
|
with:
|
|
flavor: |
|
|
latest=auto
|
|
images: |
|
|
registry.internal.huggingface.tech/api-inference/community/text-generation-inference
|
|
ghcr.io/huggingface/text-generation-inference
|
|
db4c2190dd824d1f950f5d1555fbadf0.azurecr.io/text-generation-inference
|
|
tags: |
|
|
type=semver,pattern={{version}}${{ env.LABEL }}
|
|
type=semver,pattern={{major}}.{{minor}}${{ env.LABEL }}
|
|
type=raw,value=latest${{ env.LABEL }},enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }}
|
|
type=raw,value=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
|
|
|
|
- name: Build and push Docker image
|
|
id: build-and-push
|
|
uses: docker/build-push-action@v4
|
|
with:
|
|
context: .
|
|
file: ${{ env.DOCKERFILE }}
|
|
push: true
|
|
platforms: 'linux/amd64'
|
|
build-args: |
|
|
GIT_SHA=${{ env.GITHUB_SHA }}
|
|
DOCKER_LABEL=sha-${{ env.GITHUB_SHA_SHORT }}${{ env.LABEL }}
|
|
tags: ${{ steps.meta.outputs.tags || steps.meta-pr.outputs.tags }}
|
|
labels: ${{ steps.meta.outputs.labels || steps.meta-pr.outputs.labels }}
|
|
cache-from: type=registry,ref=registry.internal.huggingface.tech/api-inference/community/text-generation-inference:cache${{ env.LABEL }},mode=min
|
|
cache-to: type=registry,ref=registry.internal.huggingface.tech/api-inference/community/text-generation-inference:cache${{ env.LABEL }},mode=min
|
|
|
|
- name: Final
|
|
id: final
|
|
run: |
|
|
echo "docker_image=registry.internal.huggingface.tech/api-inference/community/text-generation-inference:sha-${{ env.GITHUB_SHA_SHORT}}${{ env.LABEL }}" >> "$GITHUB_OUTPUT"
|
|
echo "docker_devices=${{ env.DOCKER_DEVICES }}" >> "$GITHUB_OUTPUT"
|
|
echo "runs_on=${{ env.RUNS_ON }}" >> "$GITHUB_OUTPUT"
|
|
echo "label=${{ env.LABEL }}" >> "$GITHUB_OUTPUT"
|
|
|
|
if [[ ${{ inputs.hardware }} == "rocm" ]]
|
|
then
|
|
echo "docker_volume=/data/cache/.cache/huggingface/hub" >> "$GITHUB_OUTPUT"
|
|
else
|
|
echo "docker_volume=/mnt/cache" >> "$GITHUB_OUTPUT"
|
|
fi
|
|
|
|
|
|
prepare_integration_tests:
|
|
runs-on: ["self-hosted", "${{ needs.build-and-push.outputs.runs_on }}", "multi-gpu"]
|
|
needs: build-and-push
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.job }}-${{ needs.build-and-push.outputs.label }}-${{ github.head_ref || github.run_id }}
|
|
cancel-in-progress: true
|
|
if: needs.build-and-push.outputs.runs_on == 'amd-gpu-tgi'
|
|
container:
|
|
image: ${{ needs.build-and-push.outputs.docker_image }}
|
|
options: --shm-size "16gb" --ipc host -v ${{ needs.build-and-push.outputs.docker_volume }}:/data
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v4
|
|
|
|
- name: Clean Hugging Face cache
|
|
shell: bash
|
|
run: |
|
|
if [[ ${{ inputs.hardware }} == "rocm" ]]
|
|
then
|
|
echo "pwd:"
|
|
pwd
|
|
echo "ls:"
|
|
ls
|
|
python integration-tests/clean_cache_and_download.py --token ${{ secrets.HF_TOKEN }}
|
|
fi
|
|
|
|
integration_tests:
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.job }}-${{ needs.build-and-push.outputs.label }}-${{ github.head_ref || github.run_id }}
|
|
cancel-in-progress: true
|
|
needs: [build-and-push, prepare_integration_tests]
|
|
runs-on: ["self-hosted", "${{ needs.build-and-push.outputs.runs_on }}", "multi-gpu"]
|
|
if: needs.build-and-push.outputs.runs_on != 'ubuntu-latest'
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v4
|
|
|
|
- name: Inject slug/short variables
|
|
uses: rlespinasse/github-slug-action@v4.4.1
|
|
|
|
# - name: Login to internal Container Registry
|
|
# uses: docker/login-action@v3
|
|
# with:
|
|
# username: ${{ secrets.TAILSCALE_DOCKER_USERNAME }}
|
|
# password: ${{ secrets.TAILSCALE_DOCKER_PASSWORD }}
|
|
# registry: registry.internal.huggingface.tech
|
|
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v4
|
|
with:
|
|
python-version: "3.10"
|
|
|
|
- name: Install
|
|
run: |
|
|
make install-integration-tests
|
|
|
|
- name: Tailscale
|
|
uses: huggingface/tailscale-action@main
|
|
if: needs.build-and-push.outputs.runs_on != 'amd-gpu-tgi'
|
|
with:
|
|
authkey: ${{ secrets.TAILSCALE_AUTHKEY }}
|
|
|
|
- name: Run tests
|
|
run: |
|
|
export DOCKER_DEVICES=${{ needs.build-and-push.outputs.docker_devices }}
|
|
export HUGGING_FACE_HUB_TOKEN=${{ secrets.HF_TOKEN }}
|
|
|
|
export DOCKER_IMAGE=${{ needs.build-and-push.outputs.docker_image }}
|
|
echo "DOCKER_IMAGE:"
|
|
echo $DOCKER_IMAGE
|
|
|
|
export SYSTEM=${{ inputs.hardware }}
|
|
echo "SYSTEM:"
|
|
echo $SYSTEM
|
|
|
|
pytest -s -vvvvv integration-tests
|