diff --git a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu-cuda12.yml b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu-cuda12.yml index f68847afff379..bb79356169f1d 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu-cuda12.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu-cuda12.yml @@ -53,8 +53,8 @@ jobs: inputs: targetType: filePath filePath: tools/ci_build/github/linux/run_python_dockerbuild.sh - # please check ONNXRUNTIME_CUDA_VERSION in tools/ci_build/github/linux/build_linux_arm64_python_package.sh - arguments: -i onnxruntimecuda118xtrt86build${{ parameters.arch }} -d "GPU" -c ${{ parameters.cmake_build_type }} -x "${{ parameters.extra_build_arg }}" + # please check ONNXRUNTIME_CUDA_VERSION in tools/ci_build/github/linux/build_linux_python_package.sh + arguments: -i onnxruntimecuda118xtrt86build${{ parameters.arch }} -d "GPU" -c ${{ parameters.cmake_build_type }} -x "${{ parameters.extra_build_arg }} -u "12.2" - task: PublishBuildArtifacts@1 displayName: 'Publish Artifact: ONNXRuntime python wheel' diff --git a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml index 0f988039339af..412a02e628e2f 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml @@ -64,8 +64,8 @@ jobs: inputs: targetType: filePath filePath: tools/ci_build/github/linux/run_python_dockerbuild.sh - # please check ONNXRUNTIME_CUDA_VERSION in tools/ci_build/github/linux/build_linux_arm64_python_package.sh - arguments: -i onnxruntimecuda118xtrt86build${{ parameters.arch }} -d "GPU" -c ${{ parameters.cmake_build_type }} -x "${{ parameters.extra_build_arg }}" + # please check ONNXRUNTIME_CUDA_VERSION in tools/ci_build/github/linux/build_linux_python_package.sh + arguments: -i onnxruntimecuda118xtrt86build${{ parameters.arch }} -d "GPU" -c ${{ parameters.cmake_build_type }} -x "${{ parameters.extra_build_arg } -u "12.2" - task: PublishBuildArtifacts@1 displayName: 'Publish Artifact: ONNXRuntime python wheel' diff --git a/tools/ci_build/github/linux/build_linux_arm64_python_package.sh b/tools/ci_build/github/linux/build_linux_python_package.sh similarity index 86% rename from tools/ci_build/github/linux/build_linux_arm64_python_package.sh rename to tools/ci_build/github/linux/build_linux_python_package.sh index 516f320cd64c4..e3d5acb30a8f3 100755 --- a/tools/ci_build/github/linux/build_linux_arm64_python_package.sh +++ b/tools/ci_build/github/linux/build_linux_python_package.sh @@ -10,7 +10,7 @@ EXTRA_ARG="" # Put 3.8 at the last because Ubuntu 20.04 use python 3.8 and we will upload the intermediate build files of this # config to Azure DevOps Artifacts and download them to a Ubuntu 20.04 machine to run the tests. PYTHON_EXES=("/opt/python/cp39-cp39/bin/python3.9" "/opt/python/cp310-cp310/bin/python3.10" "/opt/python/cp311-cp311/bin/python3.11" "/opt/python/cp38-cp38/bin/python3.8") -while getopts "d:p:x:c:" parameter_Option +while getopts "d:p:x:c:u:" parameter_Option do case "${parameter_Option}" in #GPU or CPU. @@ -59,10 +59,9 @@ if [ "$ARCH" == "x86_64" ]; then BUILD_ARGS+=("--enable_onnx_tests") fi -if [ "$BUILD_DEVICE" == "GPU" ]; then +if [ "$BUILD_DEVICE" == "GPU" ] && [ "$CUDA_VERSION" != ""]; then #Enable CUDA and TRT EPs. - ONNXRUNTIME_CUDA_VERSION="11.8" - BUILD_ARGS+=("--nvcc_threads=1" "--use_cuda" "--use_tensorrt" "--cuda_version=$ONNXRUNTIME_CUDA_VERSION" "--tensorrt_home=/usr" "--cuda_home=/usr/local/cuda-$ONNXRUNTIME_CUDA_VERSION" "--cudnn_home=/usr/local/cuda-$ONNXRUNTIME_CUDA_VERSION" "--cmake_extra_defines" "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80") + BUILD_ARGS+=("--nvcc_threads=1" "--use_cuda" "--use_tensorrt" "--cuda_version=$CUDA_VERSION" "--tensorrt_home=/usr" "--cuda_home=/usr/local/cuda-$CUDA_VERSION" "--cudnn_home=/usr/local/cuda-$CUDA_VERSION" "--cmake_extra_defines" "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80") fi export CFLAGS diff --git a/tools/ci_build/github/linux/build_linux_python_package_cuda12.sh b/tools/ci_build/github/linux/build_linux_python_package_cuda12.sh deleted file mode 100755 index a04e72ed1a348..0000000000000 --- a/tools/ci_build/github/linux/build_linux_python_package_cuda12.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -set -e -x - -# This script invokes build.py - -mkdir -p /build/dist - -EXTRA_ARG="" - -# Put 3.8 at the last because Ubuntu 20.04 use python 3.8 and we will upload the intermediate build files of this -# config to Azure DevOps Artifacts and download them to a Ubuntu 20.04 machine to run the tests. -PYTHON_EXES=("/opt/python/cp39-cp39/bin/python3.9" "/opt/python/cp310-cp310/bin/python3.10" "/opt/python/cp311-cp311/bin/python3.11" "/opt/python/cp38-cp38/bin/python3.8") -while getopts "d:p:x:c:" parameter_Option -do case "${parameter_Option}" -in -#GPU or CPU. -d) BUILD_DEVICE=${OPTARG};; -p) PYTHON_EXES=(${OPTARG});; -x) EXTRA_ARG=(${OPTARG});; -c) BUILD_CONFIG=${OPTARG};; -esac -done - -BUILD_ARGS=("--build_dir" "/build" "--config" "$BUILD_CONFIG" "--update" "--build" "--skip_submodule_sync" "--parallel" "--build_wheel") - -if [ "$BUILD_CONFIG" == "Debug" ]; then - CFLAGS="-ggdb3" - CXXFLAGS="-ggdb3" -else - CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -O3 -pipe -Wl,--strip-all" - CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -O3 -pipe -Wl,--strip-all" - BUILD_ARGS+=("--enable_lto") -fi - -# Depending on how the compiler has been configured when it was built, sometimes "gcc -dumpversion" shows the full version. -GCC_VERSION=$(gcc -dumpversion | cut -d . -f 1) -#-fstack-clash-protection prevents attacks based on an overlapping heap and stack. -if [ "$GCC_VERSION" -ge 8 ]; then - CFLAGS="$CFLAGS -fstack-clash-protection" - CXXFLAGS="$CXXFLAGS -fstack-clash-protection" -fi - -ARCH=$(uname -m) - -if [ "$ARCH" == "x86_64" ] && [ "$GCC_VERSION" -ge 9 ]; then - CFLAGS="$CFLAGS -fcf-protection" - CXXFLAGS="$CXXFLAGS -fcf-protection" -fi - -echo "EXTRA_ARG:" -echo $EXTRA_ARG - -if [ "$EXTRA_ARG" != "" ]; then - BUILD_ARGS+=("$EXTRA_ARG") -fi - -if [ "$ARCH" == "x86_64" ]; then - #ARM build machines do not have the test data yet. - BUILD_ARGS+=("--enable_onnx_tests") -fi - -if [ "$BUILD_DEVICE" == "GPU" ]; then - #Enable CUDA and TRT EPs. - ONNXRUNTIME_CUDA_VERSION="12.2" - BUILD_ARGS+=("--nvcc_threads=1" "--use_cuda" "--use_tensorrt" "--cuda_version=$ONNXRUNTIME_CUDA_VERSION" "--tensorrt_home=/usr" "--cuda_home=/usr/local/cuda-$ONNXRUNTIME_CUDA_VERSION" "--cudnn_home=/usr/local/cuda-$ONNXRUNTIME_CUDA_VERSION" "--cmake_extra_defines" "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80") -fi - -export CFLAGS -export CXXFLAGS -for PYTHON_EXE in "${PYTHON_EXES[@]}" -do - rm -rf /build/$BUILD_CONFIG - ${PYTHON_EXE} /onnxruntime_src/tools/ci_build/build.py "${BUILD_ARGS[@]}" - - cp /build/$BUILD_CONFIG/dist/*.whl /build/dist -done - -which ccache && ccache -sv && ccache -z diff --git a/tools/ci_build/github/linux/run_python_dockerbuild.sh b/tools/ci_build/github/linux/run_python_dockerbuild.sh index 0f36550819c4e..c24c390c24afc 100755 --- a/tools/ci_build/github/linux/run_python_dockerbuild.sh +++ b/tools/ci_build/github/linux/run_python_dockerbuild.sh @@ -2,13 +2,14 @@ set -e -x BUILD_CONFIG="Release" -while getopts "i:d:x:c:" parameter_Option +while getopts "i:d:x:c:u:" parameter_Option do case "${parameter_Option}" in i) DOCKER_IMAGE=${OPTARG};; d) DEVICE=${OPTARG};; x) BUILD_EXTR_PAR=${OPTARG};; c) BUILD_CONFIG=${OPTARG};; +u) CUDA_VERSION=${OPTARG};; esac done @@ -23,7 +24,7 @@ docker run --rm \ -e NIGHTLY_BUILD \ -e BUILD_BUILDNUMBER \ $ADDITIONAL_DOCKER_PARAMETER \ - $DOCKER_IMAGE tools/ci_build/github/linux/build_linux_python_package_cuda12.sh -d $DEVICE -c $BUILD_CONFIG -x $BUILD_EXTR_PAR + $DOCKER_IMAGE tools/ci_build/github/linux/build_linux_arm64_python_package.sh -d $DEVICE -c $BUILD_CONFIG -x $BUILD_EXTR_PAR -u $CUDA_VERSION sudo rm -rf $BUILD_BINARIESDIRECTORY/$BUILD_CONFIG/onnxruntime $BUILD_BINARIESDIRECTORY/$BUILD_CONFIG/pybind11 \ $BUILD_BINARIESDIRECTORY/$BUILD_CONFIG/models $BUILD_BINARIESDIRECTORY/$BUILD_CONFIG/_deps \