Running C tests after build #572
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "Linux GPU x64 Build" | |
on: [ workflow_dispatch, pull_request ] | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
env: | |
ort_dir: "onnxruntime-linux-x64-gpu-1.17.0" | |
ort_zip: "onnxruntime-linux-x64-gpu-1.17.0.tgz" | |
ort_url: "https://github.com/microsoft/onnxruntime/releases/download/v1.17.0/onnxruntime-linux-x64-gpu-1.17.0.tgz" | |
# ort_dir: "onnxruntime-linux-x64-cuda-1.17.0" | |
# ort_zip: "onnxruntime-linux-x64-cuda12-1.17.0.tgz" | |
# ort_url: "https://github.com/microsoft/onnxruntime/releases/download/v1.17.0/onnxruntime-linux-x64-cuda12-1.17.0.tgz" | |
jobs: | |
linux-gpu-x64-build: | |
runs-on: [ "self-hosted", "1ES.Pool=onnxruntime-genai-Ubuntu2004-T4" ] | |
steps: | |
- name: Checkout OnnxRuntime GenAI repo | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
# - name: Install Nvidia Container Toolkit | |
# run: | | |
# curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \ | |
# && curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \ | |
# sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \ | |
# sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list | |
# sudo apt-get update | |
# sudo apt-get install -y nvidia-container-toolkit | |
- name: Download OnnxRuntime | |
run: | | |
curl -L -o ${{ env.ort_zip }} ${{ env.ort_url }} | |
- name: Unzip OnnxRuntime | |
run: | | |
tar -xzf ${{ env.ort_zip }} | |
rm ${{ env.ort_zip }} | |
- name: Rename OnnxRuntime to ort | |
run: | | |
mv ${{ env.ort_dir }} ort | |
- name: Print nvidia-smi | |
run: | | |
nvidia-smi | |
- name: Download Docker Image | |
run: | | |
set -e -x | |
python3 tools/ci_build/get_docker_image.py --dockerfile tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile \ | |
--context tools/ci_build/github/linux/docker/inference/x64/default/gpu \ | |
--docker-build-args "--build-arg BUILD_UID=$( id -u )" \ | |
--container-registry onnxruntimebuildcache \ | |
--repository onnxruntimegpubuild | |
- name: Print Docker Image Environment Variables | |
run: | | |
echo "Printing docker image environment variables" | |
docker run --rm onnxruntimegpubuild env | |
- name: Check CUDA Info | |
run: | | |
echo "Finding CUDA version" | |
docker run --rm onnxruntimegpubuild bash -c "nvcc --version && which nvcc" | |
- name: Docker -- Configure with CMake and GCC | |
run: | | |
echo "Running docker image onnxruntimegpubuild" | |
docker run \ | |
--gpus all \ | |
--rm \ | |
--volume $GITHUB_WORKSPACE:/onnxruntime_src \ | |
-w /onnxruntime_src onnxruntimegpubuild bash -c "/usr/bin/cmake -DCMAKE_CUDA_ARCHITECTURES=86 --preset linux_gcc_cuda_release -DTEST_PHI2=False" | |
- name: Docker -- Build with CMake and GCC | |
run: | | |
echo "Running docker image onnxruntimegpubuild" | |
docker run \ | |
--gpus all \ | |
--rm \ | |
--volume $GITHUB_WORKSPACE:/onnxruntime_src \ | |
-w /onnxruntime_src onnxruntimegpubuild bash -c "/usr/bin/cmake --build --preset linux_gcc_cuda_release" | |
- name: Docker -- check test directory | |
run: | | |
echo "Running docker image onnxruntimegpubuild" | |
docker run \ | |
--gpus all \ | |
--rm \ | |
--volume $GITHUB_WORKSPACE:/onnxruntime_src \ | |
-w /onnxruntime_src onnxruntimegpubuild bash -c "ls -l /onnxruntime_src/build/gcc_cuda/release/test/" | |
- name: Docker -- Run tests | |
run: | | |
echo "Running docker image onnxruntimegpubuild" | |
docker run \ | |
--gpus all \ | |
--rm \ | |
--volume $GITHUB_WORKSPACE:/onnxruntime_src \ | |
-w /onnxruntime_src onnxruntimegpubuild bash -c "/onnxruntime_src/build/gcc_cuda/release/test/unit_tests" | |
# TODO: Re-enable these tests when python version is updated | |
# - name: Install the onnxruntime-genai Python wheel and run Python tests | |
# run: | | |
# echo "Installing the onnxruntime-genai Python wheel and running the Python tests" | |
# docker run \ | |
# --gpus all \ | |
# --rm \ | |
# --volume $GITHUB_WORKSPACE:/onnxruntime_src \ | |
# -w /onnxruntime_src onnxruntimegpubuild bash -c "python3 -m pip install /onnxruntime_src/build/gcc_cuda/release/wheel/onnxruntime_genai*.whl && python3 -m pip install -r test/python/requirements.txt && python3 test/python/test_onnxruntime_genai.py --cwd test/python --test_models test/test_models" |