Skip to content

docker-test-gpu

docker-test-gpu #94

name: docker-test-gpu
on:
workflow_dispatch:
permissions: read-all
jobs:
test:
runs-on: self-hosted
timeout-minutes: 240
strategy:
fail-fast: true
max-parallel: 1
matrix:
backend:
# - llama-cuda
# - exllama # https://github.com/sozercan/aikit/issues/94
- exllama2-gptq
- exllama2-exl2
- mamba
steps:
- name: cleanup workspace
run: |
rm -rf ./* || true
rm -rf ./.??* || true
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4
# use default docker driver builder with containerd image store for local aikit image
# these must be setup before running this test
- run: docker buildx use default
- name: build aikit
run: |
docker buildx build . -t aikit:test \
--load --provenance=false --progress plain
- name: build test model
run: |
docker buildx build . -t testmodel:test \
-f test/aikitfile-${{ matrix.backend }}.yaml \
--load --provenance=false --progress plain
- name: list images
run: docker images
- name: run test model
run: docker run --name testmodel -d --rm -p 8080:8080 --gpus all testmodel:test
- name: run test (gguf)
if: matrix.backend == 'llama-cuda'
run: |
result=$(curl --fail --retry 10 --retry-all-errors http://127.0.0.1:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
"model": "llama-3-8b-instruct",
"messages": [{"role": "user", "content": "explain kubernetes in a sentence"}]
}')
echo $result
choices=$(echo "$result" | jq '.choices')
if [ -z "$choices" ]; then
exit 1
fi
- name: run test (exl2/gptq)
if: matrix.backend == 'exllama2-gptq' || matrix.backend == 'exllama2-exl2'
run: |
result=$(curl --fail --retry 10 --retry-all-errors http://127.0.0.1:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
"model": "llama-2-7b-chat",
"messages": [{"role": "user", "content": "explain kubernetes in a sentence"}]
}')
echo $result
choices=$(echo "$result" | jq '.choices')
if [ -z "$choices" ]; then
exit 1
fi
- name: run test (mamba)
if: matrix.backend == 'mamba'
run: |
result=$(curl --fail --retry 10 --retry-all-errors http://127.0.0.1:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
"model": "mamba-chat",
"messages": [{"role": "user", "content": "explain kubernetes in a sentence"}]
}')
echo $result
choices=$(echo "$result" | jq '.choices')
if [ -z "$choices" ]; then
exit 1
fi
- name: save logs
if: always()
run: docker logs testmodel > /tmp/docker-${{ matrix.backend }}.log
- run: docker stop testmodel
if: always()
- run: docker system prune -a -f --volumes || true
if: always()
- name: publish test artifacts
if: always()
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: test-${{ matrix.backend }}
path: |
/tmp/*.log