diff --git a/tools/ci_build/build.py b/tools/ci_build/build.py index aa1198102f978..dbd9b40d30425 100644 --- a/tools/ci_build/build.py +++ b/tools/ci_build/build.py @@ -260,6 +260,7 @@ def convert_arg_line_to_args(self, arg_line): ) parser.add_argument("--disable_cuda_nhwc_ops", action="store_true", help="Disable CUDA NHWC ops in build.") + parser.add_argument("--enable_cuda_minimal_build", action="store_true", help="Enable CUDA minimal build.") # Python bindings parser.add_argument("--enable_pybind", action="store_true", help="Enable Python Bindings.") @@ -1093,6 +1094,7 @@ def generate_build_tree( "-Donnxruntime_DISABLE_FLOAT8_TYPES=" + ("ON" if disable_float8_types else "OFF"), "-Donnxruntime_DISABLE_SPARSE_TENSORS=" + ("ON" if disable_sparse_tensors else "OFF"), "-Donnxruntime_DISABLE_OPTIONAL_TYPE=" + ("ON" if disable_optional_type else "OFF"), + "-Donnxruntime_CUDA_MINIMAL=" + ("ON" if args.enable_cuda_minimal_build else "OFF"), ] if args.rv64: diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-cuda-minimal-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-cuda-minimal-ci-pipeline.yml new file mode 100644 index 0000000000000..2a32dd1a62408 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-cuda-minimal-ci-pipeline.yml @@ -0,0 +1,108 @@ +##### start trigger Don't edit it manually, Please do edit set-trigger-rules.py #### +### please do rerun set-trigger-rules.py ### +trigger: + branches: + include: + - main + - rel-* + paths: + exclude: + - docs/** + - README.md + - CONTRIBUTING.md + - BUILD.md + - 'js/web' + - 'onnxruntime/core/providers/js' +pr: + branches: + include: + - main + - rel-* + paths: + exclude: + - docs/** + - README.md + - CONTRIBUTING.md + - BUILD.md + - 'js/web' + - 'onnxruntime/core/providers/js' +#### end trigger #### +parameters: + - name: CudaVersion + displayName: CUDA version + type: string + default: '12.2' + values: + - 11.8 + - 12.2 + +variables: + - template: templates/common-variables.yml + - name: docker_base_image + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20241120.3 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20241120.3 + - name: linux_trt_version + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: ${{ variables.linux_trt_version_cuda11 }} + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: ${{ variables.linux_trt_version_cuda12 }} + +jobs: +- job: Linux_Build + timeoutInMinutes: 180 + variables: + skipComponentGovernanceDetection: true + ALLOW_RELEASED_ONNX_OPSET_ONLY: '1' + ORT_CACHE_DIR: '$(Agent.TempDirectory)/ort/ccache' + TODAY: $[format('{0:dd}{0:MM}{0:yyyy}', pipeline.startTime)] + workspace: + clean: all + pool: onnxruntime-tensorrt-linuxbuild-T4 + steps: + - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 + displayName: 'Clean Agent Directories' + condition: always() + + - checkout: self + clean: true + submodules: none + + - template: templates/get-docker-image-steps.yml + parameters: + Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda + Context: tools/ci_build/github/linux/docker + DockerBuildArgs: " + --network=host + --build-arg BASEIMAGE=${{ variables.docker_base_image }} + --build-arg TRT_VERSION=${{ variables.linux_trt_version }} + --build-arg BUILD_UID=$( id -u ) + " + Repository: onnxruntimetensorrtcudaminimalbuild + + - template: templates/linux-build-step-with-cache.yml + parameters: + WithCache: true + Today: $(TODAY) + AdditionalKey: gpu_tensorrt_cuda_minimal + CacheDir: '$(ORT_CACHE_DIR)' + BuildStep: + - task: CmdLine@2 + inputs: + script: | + docker run --gpus all --rm \ + --volume /data/onnx:/data/onnx:ro \ + --volume $(Build.SourcesDirectory):/onnxruntime_src \ + --volume $(Build.BinariesDirectory):/build \ + --volume /data/models:/build/models:ro \ + --volume $HOME/.onnx:/home/onnxruntimedev/.onnx \ + --volume $(ORT_CACHE_DIR):/cache \ + -e ALLOW_RELEASED_ONNX_OPSET_ONLY=0 \ + -e NIGHTLY_BUILD \ + -e BUILD_BUILDNUMBER \ + -e CCACHE_DIR=/cache -w /onnxruntime_src \ + onnxruntimetensorrtcudaminimalbuild tools/ci_build/github/linux/build_tensorrt_ci.sh --cuda_minimal=ON + workingDirectory: $(Build.SourcesDirectory) + + - template: templates/explicitly-defined-final-tasks.yml diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-cuda-minimal-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-cuda-minimal-ci-pipeline.yml new file mode 100644 index 0000000000000..c68ba01485db2 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-cuda-minimal-ci-pipeline.yml @@ -0,0 +1,86 @@ +##### start trigger Don't edit it manually, Please do edit set-trigger-rules.py #### +### please do rerun set-trigger-rules.py ### +trigger: + branches: + include: + - main + - rel-* + paths: + exclude: + - docs/** + - README.md + - CONTRIBUTING.md + - BUILD.md + - 'js/web' + - 'onnxruntime/core/providers/js' +pr: + branches: + include: + - main + - rel-* + paths: + exclude: + - docs/** + - README.md + - CONTRIBUTING.md + - BUILD.md + - 'js/web' + - 'onnxruntime/core/providers/js' +#### end trigger #### +parameters: +- name: CudaVersion + displayName: CUDA version + type: string + default: '12.2' + values: + - 11.8 + - 12.2 + +variables: + - template: templates/common-variables.yml + - name: win_trt_folder + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: ${{ variables.win_trt_folder_cuda11 }} + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: ${{ variables.win_trt_folder_cuda12 }} + +jobs: +- job: 'build' + pool: 'onnxruntime-Win2022-GPU-A10' + variables: + MsbuildArguments: '-detailedsummary -maxcpucount -consoleloggerparameters:PerformanceSummary' + EnvSetupScript: setup_env_trt.bat + skipComponentGovernanceDetection: true + TODAY: $[format('{0:dd}{0:MM}{0:yyyy}', pipeline.startTime)] + timeoutInMinutes: 150 + workspace: + clean: all + steps: + - template: templates/jobs/win-ci-prebuild-steps.yml + parameters: + EnvSetupScript: $(EnvSetupScript) + DownloadCUDA: true + DownloadTRT: true + BuildArch: 'x64' + BuildConfig: RelWithDebInfo + MachinePool: 'onnxruntime-Win2022-GPU-A10' + WithCache: true + Today: $(Today) + + - template: templates/jobs/win-ci-build-steps.yml + parameters: + WithCache: True + Today: $(TODAY) + AdditionalKey: "gpu_tensorrt_cuda_minimal | RelWithDebInfo" + BuildPyArguments: '--config RelWithDebInfo --parallel --use_binskim_compliant_compile_flags --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --update --cmake_generator "Visual Studio 17 2022" --build_wheel --enable_onnx_tests --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\${{ variables.win_trt_folder }}" --cuda_home="$(Agent.TempDirectory)\v${{ parameters.CudaVersion }}" --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=86 --enable_cuda_minimal_build' + MsbuildArguments: $(MsbuildArguments) + BuildArch: 'x64' + Platform: 'x64' + BuildConfig: RelWithDebInfo + + - task: PythonScript@0 + displayName: 'Build wheel' + inputs: + scriptPath: '$(Build.SourcesDirectory)\setup.py' + arguments: 'bdist_wheel' + workingDirectory: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo' diff --git a/tools/ci_build/github/linux/build_tensorrt_ci.sh b/tools/ci_build/github/linux/build_tensorrt_ci.sh index 5b206bc0a92d9..ccf7a6f4ea630 100755 --- a/tools/ci_build/github/linux/build_tensorrt_ci.sh +++ b/tools/ci_build/github/linux/build_tensorrt_ci.sh @@ -21,6 +21,19 @@ BUILD_ARGS=('--config' 'Release' "CMAKE_CUDA_ARCHITECTURES=75" "onnxruntime_BUILD_UNIT_TESTS=ON" "onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON") + +# Parse external args +for arg in "$@"; do + case $arg in + --cuda_minimal=ON) + # Replace onnxruntime_BUILD_UNIT_TESTS=ON with OFF + BUILD_ARGS=("${BUILD_ARGS[@]/onnxruntime_BUILD_UNIT_TESTS=ON/onnxruntime_BUILD_UNIT_TESTS=OFF}") + BUILD_ARGS+=("--enable_cuda_minimal_build") + BUILD_ARGS+=("--skip_tests") + ;; + esac +done + if [ -x "$(command -v ninja)" ]; then BUILD_ARGS+=('--cmake_generator' 'Ninja') fi