diff --git a/.config/1espt/PipelineAutobaseliningConfig.yml b/.config/1espt/PipelineAutobaseliningConfig.yml
index daa9b73d5971a..183d52d5c1d44 100644
--- a/.config/1espt/PipelineAutobaseliningConfig.yml
+++ b/.config/1espt/PipelineAutobaseliningConfig.yml
@@ -5,15 +5,16 @@ pipelines:
retail:
source:
credscan:
- lastModifiedDate: 2024-10-24
+ lastModifiedDate: 2024-10-25
policheck:
- lastModifiedDate: 2024-10-24
+ lastModifiedDate: 2024-10-25
eslint:
- lastModifiedDate: 2024-10-24
+ lastModifiedDate: 2024-10-25
psscriptanalyzer:
- lastModifiedDate: 2024-10-24
+ lastModifiedDate: 2024-10-25
armory:
- lastModifiedDate: 2024-10-24
+ lastModifiedDate: 2024-10-25
+ usedNonDefaultBranch: true
1299:
retail:
source:
@@ -25,6 +26,8 @@ pipelines:
lastModifiedDate: 2024-10-25
armory:
lastModifiedDate: 2024-10-25
+ policheck:
+ lastModifiedDate: 2024-10-29
binary:
credscan:
lastModifiedDate: 2024-10-25
@@ -32,3 +35,43 @@ pipelines:
lastModifiedDate: 2024-10-25
spotbugs:
lastModifiedDate: 2024-10-25
+ 1625:
+ retail:
+ source:
+ credscan:
+ lastModifiedDate: 2024-11-05
+ policheck:
+ lastModifiedDate: 2024-11-05
+ eslint:
+ lastModifiedDate: 2024-11-05
+ psscriptanalyzer:
+ lastModifiedDate: 2024-11-05
+ armory:
+ lastModifiedDate: 2024-11-05
+ binary:
+ credscan:
+ lastModifiedDate: 2024-11-13
+ binskim:
+ lastModifiedDate: 2024-11-13
+ spotbugs:
+ lastModifiedDate: 2024-11-13
+ 1626:
+ retail:
+ source:
+ credscan:
+ lastModifiedDate: 2024-11-13
+ policheck:
+ lastModifiedDate: 2024-11-13
+ eslint:
+ lastModifiedDate: 2024-11-13
+ psscriptanalyzer:
+ lastModifiedDate: 2024-11-13
+ armory:
+ lastModifiedDate: 2024-11-13
+ binary:
+ credscan:
+ lastModifiedDate: 2024-11-13
+ binskim:
+ lastModifiedDate: 2024-11-13
+ spotbugs:
+ lastModifiedDate: 2024-11-13
diff --git a/.config/guardian/.gdnbaselines b/.config/guardian/.gdnbaselines
new file mode 100644
index 0000000000000..a7ee2a4b69dda
--- /dev/null
+++ b/.config/guardian/.gdnbaselines
@@ -0,0 +1,43 @@
+{
+ "properties": {
+ "helpUri": "https://eng.ms/docs/microsoft-security/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/microsoft-guardian/general/baselines"
+ },
+ "version": "1.0.0",
+ "baselines": {
+ "default": {
+ "name": "default",
+ "createdDate": "2024-11-13 00:40:35Z",
+ "lastUpdatedDate": "2024-11-13 00:40:35Z"
+ }
+ },
+ "results": {
+ "48f03e2797fc40ecea50f878a0268947c7e13db1b2fa51aa3981246844fc4c68": {
+ "signature": "48f03e2797fc40ecea50f878a0268947c7e13db1b2fa51aa3981246844fc4c68",
+ "alternativeSignatures": [],
+ "target": "ScanTelemetry_20241113003616898.json",
+ "line": 1,
+ "memberOf": [
+ "default"
+ ],
+ "tool": "credscan",
+ "ruleId": "CSCAN-AZURE0130",
+ "createdDate": "2024-11-13 00:40:35Z",
+ "expirationDate": "2025-05-02 01:29:47Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-11-13 01:29:47Z"
+ },
+ "9cb6eddb3f3e886ad06cae65f5886412ff0c5fb0b96d4e943e4efa237be617b1": {
+ "signature": "9cb6eddb3f3e886ad06cae65f5886412ff0c5fb0b96d4e943e4efa237be617b1",
+ "alternativeSignatures": [],
+ "target": "ScanTelemetry_20241113111547065.json",
+ "line": 1,
+ "memberOf": [
+ "default"
+ ],
+ "tool": "credscan",
+ "ruleId": "CSCAN-AZURE0130",
+ "createdDate": "2024-11-13 11:20:17Z",
+ "expirationDate": "2025-05-02 11:55:15Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-11-13 11:55:15Z"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index d3b51c0681a20..d1dc717c2a9c9 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -15,6 +15,10 @@ on:
schedule:
- cron: '41 13 * * 0'
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
jobs:
analyze:
name: Analyze
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index ec834b07b2c78..64785574c7728 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -37,6 +37,9 @@ jobs:
# Required workflow
name: Python format
runs-on: ["self-hosted", "1ES.Pool=onnxruntime-github-Ubuntu2204-AMD-CPU"]
+ permissions:
+ contents: read
+ security-events: write
steps:
- uses: actions/checkout@v4
- name: Setup Python
@@ -49,10 +52,15 @@ jobs:
with:
toolchain: stable
components: rustfmt
+ - name: Update PATH
+ run: |
+ echo "$HOME/.local/bin" >> "$GITHUB_PATH"
+
- name: Install dependencies
run: |
- python -m pip install -r requirements-dev.txt
- python -m pip install lintrunner lintrunner-adapters
+ set -e -x
+ python -m pip install --user -r requirements-dev.txt
+ python -m pip install --user lintrunner lintrunner-adapters
lintrunner init
- name: Run lintrunner on all files
run: |
@@ -81,8 +89,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
+ - name: Update PATH
+ run: |
+ echo "$HOME/.local/bin" >> "$GITHUB_PATH"
+
- name: Install ninja
- run: python -m pip install --upgrade ninja
+ run: python -m pip install --user --upgrade ninja
- name: Generate compile_commands.json
run: |
python tools/ci_build/build.py \
diff --git a/.github/workflows/pr_checks.yml b/.github/workflows/pr_checks.yml
index af3f00c4e35ab..af890d88995be 100644
--- a/.github/workflows/pr_checks.yml
+++ b/.github/workflows/pr_checks.yml
@@ -41,12 +41,12 @@ jobs:
- name: Install dependencies and run lintrunner on all files
run: |
- set -e
python -m pip install --user -r requirements-dev.txt
- python -m pip install --user lintrunner lintrunner-adapters
+ python -m pip install --user lintrunner lintrunner-adapters
lintrunner init
+ set +e
lintrunner f --all-files -v
exit 0
- - uses: parkerbxyz/suggest-changes@v1
+ - uses: parkerbxyz/suggest-changes@v2
with:
comment: 'You can commit the suggested changes from lintrunner.'
diff --git a/.github/workflows/publish-csharp-apidocs.yml b/.github/workflows/publish-csharp-apidocs.yml
index c704adb263db4..7cca0969a168b 100644
--- a/.github/workflows/publish-csharp-apidocs.yml
+++ b/.github/workflows/publish-csharp-apidocs.yml
@@ -20,18 +20,17 @@ permissions:
jobs:
build:
- runs-on: ["self-hosted", "1ES.Pool=onnxruntime-github-Ubuntu2204-AMD-CPU"]
+ runs-on: ["self-hosted", "1ES.Pool=onnxruntime-github-vs2022-mms"]
env:
DOCFXVERSION: 2.62.2
steps:
- uses: actions/checkout@v4
- - name: Setup .NET
- uses: actions/setup-dotnet@v4
- with:
- dotnet-version: 8.0.x
- name: Install DocFX
run: |
dotnet tool update -g docfx
+ - name: Update PATH
+ run: |
+ Add-Content -Value "$env:USERPROFILE\.dotnet\tools" -Encoding utf8 -Path $env:GITHUB_PATH
# NOTE: We need to restore Microsoft.ML.OnnxRuntime.csproj manually to set IncludeMobileTargets=false
# docfx doesn't seem to be able to do that properly resulting in build errors
- name: Restore dependencies
@@ -50,10 +49,12 @@ jobs:
- name: Log source commit
run: git rev-parse --short HEAD > csharp/ApiDocs/csharp/source-version.txt
- name: Move C# docs into site
+ shell: pwsh
run: |
- mkdir -p _site/docs/api
- rm -rf _site/docs/api/csharp
- mv csharp/ApiDocs/csharp _site/docs/api/csharp
+ New-Item -Path _site/docs/api -Force -ItemType "Directory" | Out-Null
+ $OutputDirectory="_site/docs/api/csharp"
+ if (Test-Path $OutputDirectory) { Remove-Item -Recurse -Force $OutputDirectory }
+ Move-Item -Path csharp\ApiDocs\csharp -Destination $OutputDirectory
- name: Upload docs artifact
uses: actions/upload-artifact@v4
with:
diff --git a/.github/workflows/publish-python-apidocs.yml b/.github/workflows/publish-python-apidocs.yml
index 2be9ad957c5cb..adc2346d1bf1b 100644
--- a/.github/workflows/publish-python-apidocs.yml
+++ b/.github/workflows/publish-python-apidocs.yml
@@ -32,10 +32,10 @@ jobs:
sudo apt-get install graphviz
- name: Install dependencies
run: |
- python3 -m pip install --upgrade pip
+ python3 -m pip install --user --upgrade pip
cd docs/python
- python3 -m pip install -r requirements.txt
- python3 -m pip install --pre onnxruntime-training -f https://download.onnxruntime.ai/onnxruntime_nightly_cpu.html
+ python3 -m pip install --user -r requirements.txt
+ python3 -m pip install --user --pre onnxruntime-training -f https://download.onnxruntime.ai/onnxruntime_nightly_cpu.html
python3 -m pip list
- name: Generate Python docs with Sphinx
run: |
diff --git a/.pipelines/nuget_config/x64/packages.config b/.pipelines/nuget_config/x64/packages.config
index 294bd926a34cb..b9932eb563b83 100644
--- a/.pipelines/nuget_config/x64/packages.config
+++ b/.pipelines/nuget_config/x64/packages.config
@@ -1,6 +1,6 @@
-
+
diff --git a/.pipelines/nuget_config/x86/packages.config b/.pipelines/nuget_config/x86/packages.config
index 3528545dfb06e..37fe2d378b7fd 100644
--- a/.pipelines/nuget_config/x86/packages.config
+++ b/.pipelines/nuget_config/x86/packages.config
@@ -1,6 +1,6 @@
-
+
diff --git a/CPPLINT.cfg b/CPPLINT.cfg
new file mode 100644
index 0000000000000..12c1c7be0d773
--- /dev/null
+++ b/CPPLINT.cfg
@@ -0,0 +1 @@
+filter=-whitespace
diff --git a/README.md b/README.md
index 8452e26a58d4d..f1817282b61a0 100644
--- a/README.md
+++ b/README.md
@@ -24,8 +24,8 @@
|System|Inference|Training|
|---|---|---|
-|Windows|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20CPU%20CI%20Pipeline?label=Windows+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=9) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20CI%20Pipeline?label=Windows+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=10) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20TensorRT%20CI%20Pipeline?label=Windows+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=47)||
-|Linux|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20CI%20Pipeline?label=Linux+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=11) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20Minimal%20Build%20E2E%20CI%20Pipeline?label=Linux+CPU+Minimal+Build)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=64) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20CI%20Pipeline?label=Linux+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=12) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20TensorRT%20CI%20Pipeline?label=Linux+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=45) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20OpenVINO%20CI%20Pipeline?label=Linux+OpenVINO)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=55)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-ci-pipeline?label=Linux+CPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=86) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-gpu-ci-pipeline?label=Linux+GPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=84) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining/orttraining-ortmodule-distributed?label=Training+Distributed)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=148)|
+|Windows|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20CPU%20CI%20Pipeline?label=Windows+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=9) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20CUDA%20CI%20Pipeline?label=Windows+GPU+CUDA)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=218) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20TensorRT%20CI%20Pipeline?label=Windows+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=47) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20WebGPU%20CI%20Pipeline?label=Windows+GPU+WebGPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=228)||
+|Linux|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20CI%20Pipeline?label=Linux+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=11) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20Minimal%20Build%20E2E%20CI%20Pipeline?label=Linux+CPU+Minimal+Build)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=64) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20CI%20Pipeline?label=Linux+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=12) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20TensorRT%20CI%20Pipeline?label=Linux+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=45) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20OpenVINO%20CI%20Pipeline?label=Linux+OpenVINO)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=55)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-ci-pipeline?label=Linux+CPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=86) [![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-gpu-ci-pipeline?label=Linux+GPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=84)|
|Mac|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/MacOS%20CI%20Pipeline?label=MacOS+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=13)||
|Android|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Android%20CI%20Pipeline?label=Android)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=53)||
|iOS|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/iOS%20CI%20Pipeline?label=iOS)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=134)||
diff --git a/ThirdPartyNotices.txt b/ThirdPartyNotices.txt
index 20142e734dfac..26084ab42ec1c 100644
--- a/ThirdPartyNotices.txt
+++ b/ThirdPartyNotices.txt
@@ -2108,261 +2108,6 @@ SOFTWARE.
_____
-TVM Open Deep Learning Compiler Stack
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-CONTRIBUTORS
-
-TVM Contributors
-================
-TVM adopts the Apache style model and governs by merit. We believe that it is important to create an inclusive community where everyone can use,
-contribute to, and influence the direction of the project. We actively invite contributors who have earned the merit to be part of the development community.
-
-See the [community structure document](http://docs.tvm.ai/contribute/community.html) for the explanation of community structure and contribution guidelines.
-
-## Committers
-- [Tianqi Chen](https://github.com/tqchen) (PMC)
-- [Thierry Moreau](http://homes.cs.washington.edu/~moreau/)
-- [Ziheng Jiang](https://github.com/ZihengJiang)
-- [Haichen Shen](http://homes.cs.washington.edu/~haichen/)
-- [Yizhi Liu](https://github.com/yzhliu)
-
-## Code Owners
-- [Aditya Atluri](https://github.com/adityaatluri) ROCM
-- [Leyuan Wang](https://github.com/Laurawly) TOPI
-- [Yuwei Hu](https://github.com/Huyuwei) TOPI
-- [Zhixun Tan](https://github.com/phisiart) OpenGL/WebGL backend
-- [Nick Hynes](https://github.com/nhynes) SGX and secured computing
-- [Lianmin Zheng](https://github.com/merrymercy) AutoTVM
-
-## Reviewers
-- [Zhi Chen](https://github.com/zhiics)
-- [Xiaoqiang Dan](https://github.com/xqdan)
-- [Liangfu Chen](https://github.com/liangfu)
-- [Masahiro Masuda](https://github.com/masahi)
-- [Kazutaka Morita](https://github.com/kazum)
-- [Tatsuya Nishiyama](https://github.com/nishi-t)
-- [Pariksheet Pinjari](https://github.com/PariksheetPinjari909)
-- [Jared Roesch](https://github.com/jroesch)
-- [Siva](https://github.com/srkreddy1238)
-- [Siju Samuel](https://github.com/siju-samuel)
-- [Alex Weaver](https://github.com/alex-weaver)
-- [Yao Wang](https://github.com/kevinthesun)
-- [Jian Weng](https://github.com/were)
-- [Eddie Yan](https://github.com/eqy)
-- [Joshua Z. Zhang](https://github.com/zhreshold)
-
-## List of Contributors
-- [Full List of Contributors](https://github.com/dmlc/tvm/graphs/contributors)
- - To contributors: please add your name to the list.
-- [Qiao Zhang](https://github.com/zhangqiaorjc)
-- [Haolong Zhang](https://github.com/haolongzhangm)
-- [Cody Hao Yu](https://github.com/comaniac)
-- [Chris Nuernberger](https://github.com/cnuernber)
-
-_____
-
FreeBSD: getopt.c file
Copyright (c) 1987, 1993, 1994
diff --git a/cgmanifests/cgmanifest.json b/cgmanifests/cgmanifest.json
index 1432193ac9080..46349f43923e2 100644
--- a/cgmanifests/cgmanifest.json
+++ b/cgmanifests/cgmanifest.json
@@ -1,578 +1,508 @@
{
- "$schema": "https://json.schemastore.org/component-detection-manifest.json",
- "Registrations": [
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "215105818dfde3174fe799600bb0f3cae233d0bf",
- "repositoryUrl": "https://github.com/abseil/abseil-cpp.git"
- }
- }
- },
- {
- "component": {
- "Type": "maven",
- "maven": {
- "GroupId": "org.junit.platform",
- "ArtifactId": "junit-platform-console-standalone",
- "Version": "1.6.2"
- },
- "DevelopmentDependency": true
- }
- },
- {
- "component": {
- "Type": "maven",
- "maven": {
- "GroupId": "com.google.protobuf",
- "ArtifactId": "protobuf-java",
- "Version": "3.21.7"
- },
- "DevelopmentDependency": true
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "2379917985919ed3918dc12cad47f469f245be7a",
- "repositoryUrl": "https://github.com/apache/tvm.git"
- },
- "comments": "needed for TVM EP"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "cabe04d6d6b05356fa8f9741704924788f0dd762",
- "repositoryUrl": "https://github.com/agauniyal/rang.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "a3bcc6981d5dad3afb212689e2c7853d1b1ee45d",
- "repositoryUrl": "https://github.com/NVIDIA/cutlass.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "08f7c7e69f8ea61a0c4151359bc8023be8e9217b",
- "repositoryUrl": "https://github.com/tlc-pack/libbacktrace.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "36a91576edf633479c78649e050f18dd2ddc8103",
- "repositoryUrl": "https://github.com/apache/incubator-tvm-vta.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "111c9be5188f7350c2eac9ddaedd8cca3d7bf394",
- "repositoryUrl": "https://github.com/kazuho/picojson.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "b5e4186d7ab63458e79084842dced166be2ca5b5",
- "repositoryUrl": "https://github.com/lammertb/libcrc.git"
- },
- "comments": "dependency from tvm"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "e4a4c02764d37c9c3db0d64c4996651a3ef9513c",
- "repositoryUrl": "https://github.com/dmlc/HalideIR.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "bee4d1dd8dc1ee4a1fd8fa6a96476c2f8b7492a3",
- "repositoryUrl": "https://github.com/dmlc/dlpack.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "4d49691f1a9d944c3b0aa5e63f1db3cad1f941f8",
- "repositoryUrl": "https://github.com/dmlc/dmlc-core.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "7de7e5d02bf687f971e7668963649728356e0c20",
- "repositoryUrl": "https://github.com/intel/mkl-dnn.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "d860915b0198ddb96f93e9e97a789af156544dc6",
- "repositoryUrl": "https://github.com/tensorflow/tensorflow.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "eddf9023206dc40974c26f589ee2ad63a4227a1e",
- "repositoryUrl": "https://github.com/glennrp/libpng.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "217f52fb121ef92491e5d5f71394b07ce4ead1d0",
- "repositoryUrl": "https://github.com/KjellKod/g3log.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "50893291621658f355bc5b4d450a8d06a563053d",
- "repositoryUrl": "https://github.com/madler/zlib.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "d264a2603493fecda607c1d1cda87fedba77d36b",
- "repositoryUrl": "https://github.com/Microsoft/CNTK.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "971e2e89d08deeae0139d3011d15646fdac13c92",
- "repositoryUrl": "https://github.com/numpy/numpy.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "90537289a04ef5d572496240e2ac3a881be518d2",
- "repositoryUrl": "https://github.com/pytorch/pytorch.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "b31f58de6fa8bbda5353b3c77d9be4914399724d",
- "repositoryUrl": "https://github.com/pytorch/pytorch.git"
- },
- "comments": "pytorch 1.6 used by onnxruntime training image"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "7389dbac82d362f296dc2746f10e43ffa1615660",
- "repositoryUrl": "https://github.com/scikit-learn/scikit-learn.git"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "eeebdab16155d34ff8f5f42137da7df4d1c7eab0",
- "repositoryUrl": "https://github.com/BVLC/caffe.git"
- }
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "LLVM",
- "Version": "9.0.0",
- "DownloadUrl": "https://releases.llvm.org/9.0.0/llvm-9.0.0.src.tar.xz"
- }
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "FreeBSD GetOpt",
- "Version": "12.0.0",
- "DownloadUrl": "https://svnweb.freebsd.org/base/release/12.0.0/lib/libc/stdlib/getopt.c?revision=341707&view=co"
- }
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "Boost",
- "Version": "1.69.0",
- "DownloadUrl": "https://boostorg.jfrog.io/artifactory/main/release/1.69.0/source/boost_1_69_0.tar.bz2"
- }
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "02a2a458ac15912d7d87cc1171e811b0c5219ece",
- "repositoryUrl": "https://github.com/grpc/grpc"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "b29b21a81b32ec273f118f589f46d56ad3332420",
- "repositoryUrl": "https://github.com/google/boringssl.git"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "3be1924221e1326df520f8498d704a5c4c8d0cce",
- "repositoryUrl": "https://github.com/c-ares/c-ares.git"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "6599cac0965be8e5a835ab7a5684bbef033d5ad0",
- "repositoryUrl": "https://github.com/llvm-mirror/libcxx.git"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "9245d481eb3e890f708ff2d7dadf2a10c04748ba",
- "repositoryUrl": "https://github.com/llvm-mirror/libcxxabi.git"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "git": {
- "commitHash": "9ce4a77f61c134bbed28bfd5be5cd7dc0e80f5e3",
- "repositoryUrl": "https://github.com/google/upb.git"
- },
- "type": "git"
- }
- },
- {
- "component": {
- "type": "other",
- "Other": {
- "Name": "Go",
- "Version": "1.12.6",
- "DownloadUrl": "https://dl.google.com/go/go1.12.6.linux-amd64.tar.gz"
- }
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "OpenMPI",
- "Version": "4.0.0",
- "DownloadUrl": "https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.0.tar.gz"
- }
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "OpenMPI",
- "Version": "4.0.4",
- "DownloadUrl": "https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.4.tar.gz"
- },
- "comments": "openmpi 4.0.4 used by onnxruntime training image"
- }
- },
- {
- "component": {
- "Type": "git",
- "git": {
- "commitHash": "7db3f9c741d3dfd8dda14ffb537ed251280d2025",
- "repositoryUrl": "https://github.com/mpi4py/mpi4py"
- },
- "comments": "mpi4py 3.0.3 used by onnxruntime training image"
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "NCCL",
- "Version": "2.4.8",
- "DownloadUrl": "https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "67afac65ce64fd4dce1494f43e565e8fe34bdffb",
- "repositoryUrl": "https://android.googlesource.com/platform/frameworks/ml"
- },
- "comments": "used by onnxruntime"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "c30b7da2301202da5f9f0529966944f110e5d6e7",
- "repositoryUrl": "https://github.com/openucx/ucx"
- },
- "comments": "middleware between IB verbs and OpenMPI used by onnxruntime training image"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "63d1e08e64e7e09408eb63cd8dd7c65ad766f277",
- "repositoryUrl": "https://github.com/nodejs/node"
- },
- "comments": "For Nodejs binding"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "aead4d751c2101e23336aa73f2380df83e7a13f3",
- "repositoryUrl": "https://github.com/pypa/manylinux"
- },
- "comments": "For building our CI build docker image"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "c974557598645360fbabac71352b083117e3cc17",
- "repositoryUrl": "https://gitlab.kitware.com/cmake/cmake"
- },
- "comments": "CMake 3.24.3. For building our CI build docker image"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "1e5d33e9b9b8631b36f061103a30208b206fd03a",
- "repositoryUrl": "https://github.com/python/cpython"
- },
- "comments": "Python 3.9.1"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "6503f05dd59e26a9986bdea097b3da9b3546f45b",
- "repositoryUrl": "https://github.com/python/cpython"
- },
- "comments": "Python 3.8.7"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "13c94747c74437e594b7fc242ff7da668e81887c",
- "repositoryUrl": "https://github.com/python/cpython"
- },
- "comments": "Python 3.7.9"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "c0a9afe2ac1820409e6173bd1893ebee2cf50270",
- "repositoryUrl": "https://github.com/python/cpython"
- },
- "comments": "Python 3.6.12"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "426b022776672fdf3d71ddd98d89af341c88080f",
- "repositoryUrl": "https://github.com/python/cpython"
- },
- "comments": "Python 3.5.10"
- }
- },
- {
- "component": {
- "type": "pip",
- "pip": {
- "Name": "transformers",
- "Version": "4.38.0"
- },
- "comments": "Installed in the training docker image"
- }
- },
- {
- "component": {
- "type": "pip",
- "pip": {
- "Name": "msgpack",
- "Version": "1.0.0"
- },
- "comments": "Installed in the training docker image"
- }
- },
- {
- "component": {
- "type": "pip",
- "pip": {
- "Name": "tensorboardX",
- "Version": "1.8"
- },
- "comments": "Installed in the training docker image"
- }
- },
- {
- "component": {
- "type": "pip",
- "pip": {
- "Name": "tensorboard",
- "Version": "2.3.0"
- },
- "comments": "Installed in the training docker image"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "92cf3702fcfaadc84eb7bef59825a23e0cd84f56",
- "repositoryUrl": "https://github.com/aappleby/smhasher"
- },
- "comments": "MurmurHash3"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "b89da3c5a0aa18fb2c6163ad9984f81ab65b22e3",
- "repositoryUrl": "https://github.com/mestevens/gtest-ios-framework"
- },
- "comments": "gtest-ios-framework"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "277508879878e0a5b5b43599b1bea11f66eb3c6c",
- "repositoryUrl": "https://github.com/dmlc/dlpack.git"
- },
- "comments": "dlpack"
- }
- },
- {
- "component": {
- "Type": "other",
- "Other": {
- "Name": "SQLite3",
- "Version": "3.22.0",
- "DownloadUrl": "http://security.ubuntu.com/ubuntu/pool/main/s/sqlite3/libsqlite3-dev_3.22.0-1ubuntu0.4_amd64.deb"
- }
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "9d0ef119d9fcb9139f831adc224857b791c81140",
- "repositoryUrl": "https://github.com/dlfcn-win32/dlfcn-win32.git"
- },
- "comments": "dlfcn-win32"
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "6812205f18ca4ef54372e87e1a13ce4a859434df",
- "repositoryUrl": "https://github.com/python-pillow/Pillow.git"
- },
- "comments": "python-pillow. Implementation logic for anti-aliasing copied by Resize CPU kernel."
- }
- },
- {
- "component": {
- "type": "git",
- "git": {
- "commitHash": "e7248b26a1ed53fa030c5c459f7ea095dfd276ac",
- "repositoryUrl": "https://gitlab.com/libeigen/eigen.git"
- }
- }
- }
- ],
- "Version": 1
+ "$schema": "https://json.schemastore.org/component-detection-manifest.json",
+ "Registrations": [
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "215105818dfde3174fe799600bb0f3cae233d0bf",
+ "repositoryUrl": "https://github.com/abseil/abseil-cpp.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "maven",
+ "maven": {
+ "GroupId": "org.junit.platform",
+ "ArtifactId": "junit-platform-console-standalone",
+ "Version": "1.6.2"
+ },
+ "DevelopmentDependency": true
+ }
+ },
+ {
+ "component": {
+ "Type": "maven",
+ "maven": {
+ "GroupId": "com.google.protobuf",
+ "ArtifactId": "protobuf-java",
+ "Version": "3.21.7"
+ },
+ "DevelopmentDependency": true
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "e4a4c02764d37c9c3db0d64c4996651a3ef9513c",
+ "repositoryUrl": "https://github.com/dmlc/HalideIR.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "bee4d1dd8dc1ee4a1fd8fa6a96476c2f8b7492a3",
+ "repositoryUrl": "https://github.com/dmlc/dlpack.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "4d49691f1a9d944c3b0aa5e63f1db3cad1f941f8",
+ "repositoryUrl": "https://github.com/dmlc/dmlc-core.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "7de7e5d02bf687f971e7668963649728356e0c20",
+ "repositoryUrl": "https://github.com/intel/mkl-dnn.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "d860915b0198ddb96f93e9e97a789af156544dc6",
+ "repositoryUrl": "https://github.com/tensorflow/tensorflow.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "eddf9023206dc40974c26f589ee2ad63a4227a1e",
+ "repositoryUrl": "https://github.com/glennrp/libpng.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "217f52fb121ef92491e5d5f71394b07ce4ead1d0",
+ "repositoryUrl": "https://github.com/KjellKod/g3log.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "50893291621658f355bc5b4d450a8d06a563053d",
+ "repositoryUrl": "https://github.com/madler/zlib.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "d264a2603493fecda607c1d1cda87fedba77d36b",
+ "repositoryUrl": "https://github.com/Microsoft/CNTK.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "971e2e89d08deeae0139d3011d15646fdac13c92",
+ "repositoryUrl": "https://github.com/numpy/numpy.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "90537289a04ef5d572496240e2ac3a881be518d2",
+ "repositoryUrl": "https://github.com/pytorch/pytorch.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "b31f58de6fa8bbda5353b3c77d9be4914399724d",
+ "repositoryUrl": "https://github.com/pytorch/pytorch.git"
+ },
+ "comments": "pytorch 1.6 used by onnxruntime training image"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "7389dbac82d362f296dc2746f10e43ffa1615660",
+ "repositoryUrl": "https://github.com/scikit-learn/scikit-learn.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "eeebdab16155d34ff8f5f42137da7df4d1c7eab0",
+ "repositoryUrl": "https://github.com/BVLC/caffe.git"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "LLVM",
+ "Version": "9.0.0",
+ "DownloadUrl": "https://releases.llvm.org/9.0.0/llvm-9.0.0.src.tar.xz"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "FreeBSD GetOpt",
+ "Version": "12.0.0",
+ "DownloadUrl": "https://svnweb.freebsd.org/base/release/12.0.0/lib/libc/stdlib/getopt.c?revision=341707&view=co"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "Boost",
+ "Version": "1.69.0",
+ "DownloadUrl": "https://boostorg.jfrog.io/artifactory/main/release/1.69.0/source/boost_1_69_0.tar.bz2"
+ }
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "02a2a458ac15912d7d87cc1171e811b0c5219ece",
+ "repositoryUrl": "https://github.com/grpc/grpc"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "b29b21a81b32ec273f118f589f46d56ad3332420",
+ "repositoryUrl": "https://github.com/google/boringssl.git"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "3be1924221e1326df520f8498d704a5c4c8d0cce",
+ "repositoryUrl": "https://github.com/c-ares/c-ares.git"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "6599cac0965be8e5a835ab7a5684bbef033d5ad0",
+ "repositoryUrl": "https://github.com/llvm-mirror/libcxx.git"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "9245d481eb3e890f708ff2d7dadf2a10c04748ba",
+ "repositoryUrl": "https://github.com/llvm-mirror/libcxxabi.git"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "git": {
+ "commitHash": "9ce4a77f61c134bbed28bfd5be5cd7dc0e80f5e3",
+ "repositoryUrl": "https://github.com/google/upb.git"
+ },
+ "type": "git"
+ }
+ },
+ {
+ "component": {
+ "type": "other",
+ "Other": {
+ "Name": "Go",
+ "Version": "1.12.6",
+ "DownloadUrl": "https://dl.google.com/go/go1.12.6.linux-amd64.tar.gz"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "OpenMPI",
+ "Version": "4.0.0",
+ "DownloadUrl": "https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.0.tar.gz"
+ }
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "OpenMPI",
+ "Version": "4.0.4",
+ "DownloadUrl": "https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.4.tar.gz"
+ },
+ "comments": "openmpi 4.0.4 used by onnxruntime training image"
+ }
+ },
+ {
+ "component": {
+ "Type": "git",
+ "git": {
+ "commitHash": "7db3f9c741d3dfd8dda14ffb537ed251280d2025",
+ "repositoryUrl": "https://github.com/mpi4py/mpi4py"
+ },
+ "comments": "mpi4py 3.0.3 used by onnxruntime training image"
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "NCCL",
+ "Version": "2.4.8",
+ "DownloadUrl": "https://docs.nvidia.com/deeplearning/sdk/nccl-install-guide/index.html"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "67afac65ce64fd4dce1494f43e565e8fe34bdffb",
+ "repositoryUrl": "https://android.googlesource.com/platform/frameworks/ml"
+ },
+ "comments": "used by onnxruntime"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "c30b7da2301202da5f9f0529966944f110e5d6e7",
+ "repositoryUrl": "https://github.com/openucx/ucx"
+ },
+ "comments": "middleware between IB verbs and OpenMPI used by onnxruntime training image"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "63d1e08e64e7e09408eb63cd8dd7c65ad766f277",
+ "repositoryUrl": "https://github.com/nodejs/node"
+ },
+ "comments": "For Nodejs binding"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "aead4d751c2101e23336aa73f2380df83e7a13f3",
+ "repositoryUrl": "https://github.com/pypa/manylinux"
+ },
+ "comments": "For building our CI build docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "c974557598645360fbabac71352b083117e3cc17",
+ "repositoryUrl": "https://gitlab.kitware.com/cmake/cmake"
+ },
+ "comments": "CMake 3.24.3. For building our CI build docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "1e5d33e9b9b8631b36f061103a30208b206fd03a",
+ "repositoryUrl": "https://github.com/python/cpython"
+ },
+ "comments": "Python 3.9.1"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "6503f05dd59e26a9986bdea097b3da9b3546f45b",
+ "repositoryUrl": "https://github.com/python/cpython"
+ },
+ "comments": "Python 3.8.7"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "13c94747c74437e594b7fc242ff7da668e81887c",
+ "repositoryUrl": "https://github.com/python/cpython"
+ },
+ "comments": "Python 3.7.9"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "c0a9afe2ac1820409e6173bd1893ebee2cf50270",
+ "repositoryUrl": "https://github.com/python/cpython"
+ },
+ "comments": "Python 3.6.12"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "426b022776672fdf3d71ddd98d89af341c88080f",
+ "repositoryUrl": "https://github.com/python/cpython"
+ },
+ "comments": "Python 3.5.10"
+ }
+ },
+ {
+ "component": {
+ "type": "pip",
+ "pip": {
+ "Name": "transformers",
+ "Version": "4.38.0"
+ },
+ "comments": "Installed in the training docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "pip",
+ "pip": {
+ "Name": "msgpack",
+ "Version": "1.0.0"
+ },
+ "comments": "Installed in the training docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "pip",
+ "pip": {
+ "Name": "tensorboardX",
+ "Version": "1.8"
+ },
+ "comments": "Installed in the training docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "pip",
+ "pip": {
+ "Name": "tensorboard",
+ "Version": "2.3.0"
+ },
+ "comments": "Installed in the training docker image"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "92cf3702fcfaadc84eb7bef59825a23e0cd84f56",
+ "repositoryUrl": "https://github.com/aappleby/smhasher"
+ },
+ "comments": "MurmurHash3"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "b89da3c5a0aa18fb2c6163ad9984f81ab65b22e3",
+ "repositoryUrl": "https://github.com/mestevens/gtest-ios-framework"
+ },
+ "comments": "gtest-ios-framework"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "277508879878e0a5b5b43599b1bea11f66eb3c6c",
+ "repositoryUrl": "https://github.com/dmlc/dlpack.git"
+ },
+ "comments": "dlpack"
+ }
+ },
+ {
+ "component": {
+ "Type": "other",
+ "Other": {
+ "Name": "SQLite3",
+ "Version": "3.22.0",
+ "DownloadUrl": "http://security.ubuntu.com/ubuntu/pool/main/s/sqlite3/libsqlite3-dev_3.22.0-1ubuntu0.4_amd64.deb"
+ }
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "9d0ef119d9fcb9139f831adc224857b791c81140",
+ "repositoryUrl": "https://github.com/dlfcn-win32/dlfcn-win32.git"
+ },
+ "comments": "dlfcn-win32"
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "6812205f18ca4ef54372e87e1a13ce4a859434df",
+ "repositoryUrl": "https://github.com/python-pillow/Pillow.git"
+ },
+ "comments": "python-pillow. Implementation logic for anti-aliasing copied by Resize CPU kernel."
+ }
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "commitHash": "e7248b26a1ed53fa030c5c459f7ea095dfd276ac",
+ "repositoryUrl": "https://gitlab.com/libeigen/eigen.git"
+ }
+ }
+ }
+ ],
+ "Version": 1
}
diff --git a/cgmanifests/generated/cgmanifest.json b/cgmanifests/generated/cgmanifest.json
index c8236c7c529a6..07dff50f9a3bd 100644
--- a/cgmanifests/generated/cgmanifest.json
+++ b/cgmanifests/generated/cgmanifest.json
@@ -196,7 +196,7 @@
"component": {
"type": "git",
"git": {
- "commitHash": "9f98e2ebe7507fe0774d06a44bbf4b0e82cc9ce7",
+ "commitHash": "bc0d2e35909b8456abe32f3b30a49bb0c125e8b7",
"repositoryUrl": "https://github.com/onnx/onnx-tensorrt.git"
},
"comments": "onnx_tensorrt"
@@ -346,7 +346,7 @@
"component": {
"type": "git",
"git": {
- "commitHash": "511eb80847afe6bded34ec491a38d5d78ba2d604",
+ "commitHash": "12a3b24c456cebd9fd11f23ac0164f78129b00c6",
"repositoryUrl": "https://github.com/google/dawn.git"
},
"comments": "dawn"
diff --git a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt
index 1070627d5e7da..d2fe7e7457983 100644
--- a/cmake/CMakeLists.txt
+++ b/cmake/CMakeLists.txt
@@ -86,7 +86,7 @@ option(onnxruntime_USE_CUDA "Build with CUDA support" OFF)
# use. If you hit any problem with that, please do not report it to GTest. Turn OFF the following build option instead.
cmake_dependent_option(onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS "Build with CUDA unit tests" OFF "onnxruntime_USE_CUDA;onnxruntime_BUILD_UNIT_TESTS" OFF)
-option(onnxruntime_USE_CUDA_NHWC_OPS "Build CUDA with NHWC op support" OFF)
+cmake_dependent_option(onnxruntime_USE_CUDA_NHWC_OPS "Build CUDA with NHWC op support" ON "onnxruntime_USE_CUDA" OFF)
option(onnxruntime_CUDA_MINIMAL "Build CUDA without any operations apart from memcpy ops. Usefuel for a very minial TRT build" OFF)
option(onnxruntime_ENABLE_CUDA_LINE_NUMBER_INFO "When building with CUDA support, generate device code line number information." OFF)
option(onnxruntime_USE_OPENVINO "Build with OpenVINO support" OFF)
@@ -102,7 +102,6 @@ option(onnxruntime_BUILD_CSHARP "Build C# library" OFF)
option(onnxruntime_BUILD_OBJC "Build Objective-C library" OFF)
option(onnxruntime_USE_PREINSTALLED_EIGEN "Use pre-installed EIGEN. Need to provide eigen_SOURCE_PATH if turn this on." OFF)
option(onnxruntime_BUILD_BENCHMARKS "Build ONNXRuntime micro-benchmarks" OFF)
-option(onnxruntime_USE_LLVM "Build TVM with LLVM" OFF)
option(onnxruntime_USE_VSINPU "Build with VSINPU support" OFF)
cmake_dependent_option(onnxruntime_USE_FLASH_ATTENTION "Build flash attention kernel for scaled dot product attention" ON "onnxruntime_USE_CUDA" OFF)
@@ -129,6 +128,10 @@ option(onnxruntime_DONT_VECTORIZE "Do not vectorize operations in Eigen" OFF)
option(onnxruntime_USE_FULL_PROTOBUF "Link to libprotobuf instead of libprotobuf-lite when this option is ON" OFF)
option(onnxruntime_DEBUG_NODE_INPUTS_OUTPUTS "Dump debug information about node inputs and outputs when executing the model." OFF)
cmake_dependent_option(onnxruntime_DEBUG_NODE_INPUTS_OUTPUTS_ENABLE_DUMP_TO_SQLDB "Build dump debug information about node inputs and outputs with support for sql database." OFF "onnxruntime_DEBUG_NODE_INPUTS_OUTPUTS" OFF)
+
+# When loading a delay loaded DLL, Windows searches the main EXE's folder first.
+# In a Python process, it searches where python.exe lives, but it doesn't search the python package's installation folder. Therefore we cannot enable this flag when Python is enabled.
+cmake_dependent_option(onnxruntime_ENABLE_DELAY_LOADING_WIN_DLLS "Delay load some of the dependent DLls that are part of the OS" ON "WIN32;NOT GDK_PLATFORM;NOT onnxruntime_ENABLE_PYTHON" OFF)
option(onnxruntime_USE_DML "Build with DirectML support" OFF)
option(onnxruntime_USE_MIGRAPHX "Build with AMDMIGraphX support" OFF)
option(onnxruntime_USE_WINML "Build with WinML support" OFF)
@@ -141,13 +144,15 @@ option(onnxruntime_USE_TELEMETRY "Build with Telemetry" OFF)
cmake_dependent_option(onnxruntime_USE_MIMALLOC "Override new/delete and arena allocator with mimalloc" OFF "WIN32;NOT onnxruntime_USE_CUDA;NOT onnxruntime_USE_OPENVINO" OFF)
option(onnxruntime_USE_CANN "Build with CANN support" OFF)
option(onnxruntime_USE_ROCM "Build with AMD GPU support" OFF)
-option(onnxruntime_USE_TVM "Build with TVM support" OFF)
-option(onnxruntime_TVM_CUDA_RUNTIME "Build TVM with CUDA support" OFF)
-option(onnxruntime_TVM_USE_LLVM "Build TVM with LLVM. Set customized path to llvm-config.exe here if need" OFF)
-option(onnxruntime_TVM_USE_HASH "Build ipp-crypto library for support hash algorithm. It is defined for TVM only")
option(onnxruntime_USE_XNNPACK "Build with XNNPACK support. Provides an alternative math library on ARM, WebAssembly and x86." OFF)
option(onnxruntime_USE_WEBNN "Build with WebNN support. Enable hardware acceleration in web browsers." OFF)
option(onnxruntime_USE_WEBGPU "Build with WebGPU support. Enable WebGPU via C/C++ interface." OFF)
+option(onnxruntime_USE_EXTERNAL_DAWN "Build with treating Dawn as external dependency. Will not link Dawn at build time." OFF)
+option(onnxruntime_CUSTOM_DAWN_SRC_PATH "Path to custom Dawn src dir.")
+option(onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY "Build Dawn as a monolithic library" OFF)
+# The following 2 options are only for Windows
+option(onnxruntime_ENABLE_DAWN_BACKEND_VULKAN "Enable Vulkan backend for Dawn (on Windows)" OFF)
+option(onnxruntime_ENABLE_DAWN_BACKEND_D3D12 "Enable D3D12 backend for Dawn (on Windows)" ON)
# Options related to reducing the binary size produced by the build
# XNNPACK EP requires the internal NHWC contrib ops to be available, so this option must be OFF when onnxruntime_USE_XNNPACK is ON
@@ -252,6 +257,7 @@ cmake_dependent_option(MSVC_Z7_OVERRIDE "replacing /Zi and /ZI with /Z7 when usi
option(onnxruntime_USE_AZURE "Build with azure inferencing support" OFF)
option(onnxruntime_USE_LOCK_FREE_QUEUE "Build with lock-free task queue for threadpool." OFF)
+option(onnxruntime_FORCE_GENERIC_ALGORITHMS "Disable optimized arch-specific algorithms. Use only for testing and debugging generic algorithms." OFF)
# ENABLE_TRAINING includes all training functionality
# The following 2 entry points
@@ -901,11 +907,6 @@ if (onnxruntime_USE_SNPE)
list(APPEND ONNXRUNTIME_PROVIDER_NAMES snpe)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_SNPE=1)
endif()
-if (onnxruntime_USE_TVM)
- list(APPEND ORT_PROVIDER_FLAGS -DUSE_TVM=1)
- list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_TVM=1)
- list(APPEND ONNXRUNTIME_PROVIDER_NAMES tvm)
-endif()
if (onnxruntime_USE_WINML)
list(APPEND ORT_PROVIDER_FLAGS -DUSE_WINML=1)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_WINML=1)
@@ -958,6 +959,18 @@ if (onnxruntime_USE_WEBGPU)
list(APPEND ORT_PROVIDER_FLAGS -DUSE_WEBGPU=1)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_WEBGPU=1)
list(APPEND ONNXRUNTIME_PROVIDER_NAMES webgpu)
+ if (onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY)
+ list(APPEND ORT_PROVIDER_FLAGS -DBUILD_DAWN_MONOLITHIC_LIBRARY=1)
+ endif()
+ if (onnxruntime_USE_EXTERNAL_DAWN)
+ list(APPEND ORT_PROVIDER_FLAGS -DUSE_EXTERNAL_DAWN=1)
+ endif()
+ if (onnxruntime_ENABLE_DAWN_BACKEND_VULKAN)
+ list(APPEND ORT_PROVIDER_FLAGS -DDAWN_ENABLE_VULKAN=1)
+ endif()
+ if (onnxruntime_ENABLE_DAWN_BACKEND_D3D12)
+ list(APPEND ORT_PROVIDER_FLAGS -DDAWN_ENABLE_D3D12=1)
+ endif()
endif()
if (onnxruntime_USE_CANN)
list(APPEND ORT_PROVIDER_FLAGS -DUSE_CANN=1)
@@ -973,6 +986,10 @@ if (onnxruntime_USE_LOCK_FREE_QUEUE)
add_compile_definitions(USE_LOCK_FREE_QUEUE)
endif()
+if (onnxruntime_FORCE_GENERIC_ALGORITHMS)
+ add_compile_definitions(FORCE_GENERIC_ALGORITHMS)
+endif()
+
if (onnxruntime_ENABLE_LAZY_TENSOR)
# To support LazyTensor, ORT needs to call Python function from C/C++.
# so onnxruntime_ENABLE_PYTHON is required.
@@ -1305,50 +1322,6 @@ if (onnxruntime_USE_DNNL)
add_compile_definitions(DNNL_OPENMP)
endif()
-# TVM EP
-if (onnxruntime_USE_TVM)
- if (NOT TARGET tvm)
- message(STATUS "Include TVM(*).")
- include(tvm)
- endif()
-
- # ipp-crypto
- if (onnxruntime_TVM_USE_HASH)
- message(STATUS "Include ipp-crypto(*).")
- include(ipp-crypto)
- endif()
-
- # TVM
- if (onnxruntime_TVM_USE_LLVM)
- set(USE_LLVM "${onnxruntime_TVM_USE_LLVM}" CACHE STRING "Path to LLVM for correct TVM build")
- elseif(onnxruntime_USE_LLVM)
- set(USE_LLVM ON CACHE BOOL "Only defined for TVM")
- endif()
-
- if (onnxruntime_TVM_CUDA_RUNTIME)
- set(USE_CUDA ON CACHE BOOL "Only defined for TVM" FORCE)
- endif()
-
- # TODO(vvchernov): customized tvm logger is hidden due to the issue on TVM side (https://github.com/apache/tvm/issues/10139)
- # add_compile_definitions(TVM_LOG_CUSTOMIZE=1)
- # add_library(tvm_custom_logger STATIC ${ONNXRUNTIME_ROOT}/core/providers/tvm/custom_logging.cc)
-
- set(USE_OPENMP gnu CACHE STRING "Only defined for TVM")
- add_subdirectory(${tvm_SOURCE_DIR} ${tvm_BINARY_DIR} EXCLUDE_FROM_ALL)
-
- set_target_properties(tvm PROPERTIES FOLDER ${tvm_SOURCE_DIR})
- # target_link_libraries(tvm PUBLIC tvm_custom_logger)
-
- set(TVM_INCLUDES ${tvm_SOURCE_DIR}/include
- ${tvm_SOURCE_DIR}/3rdparty/dmlc-core/include
- ${tvm_SOURCE_DIR}/3rdparty/dlpack/include
- $)
-
- set(onnxruntime_tvm_libs onnxruntime_providers_tvm)
- list(APPEND onnxruntime_EXTERNAL_LIBRARIES tvm)
- list(APPEND onnxruntime_EXTERNAL_DEPENDENCIES tvm)
-endif()
-
# onnxruntime-extensions
if (onnxruntime_USE_EXTENSIONS)
include(extensions)
@@ -1359,7 +1332,7 @@ endif()
#Adjust warning flags
set_msvc_c_cpp_compiler_warning_level(4)
-set(onnxruntime_DELAYLOAD_FLAGS "")
+set(onnxruntime_DELAYLOAD_FLAGS )
include_directories(
${ONNXRUNTIME_INCLUDE_DIR}
diff --git a/cmake/deps.txt b/cmake/deps.txt
index 2aec0e35e1d7f..21f9ee1701c46 100644
--- a/cmake/deps.txt
+++ b/cmake/deps.txt
@@ -36,8 +36,8 @@ microsoft_wil;https://github.com/microsoft/wil/archive/refs/tags/v1.0.230629.1.z
mimalloc;https://github.com/microsoft/mimalloc/archive/refs/tags/v2.1.1.zip;d5ee7d34223d0567892db5179849939c8769dc41
mp11;https://github.com/boostorg/mp11/archive/refs/tags/boost-1.82.0.zip;9bc9e01dffb64d9e0773b2e44d2f22c51aace063
onnx;https://github.com/onnx/onnx/archive/refs/tags/v1.16.1.zip;2eb9198bb352757d5ff13977cbe0634898e0837c
-# Use the latest commit of 10.4-GA-ORT-DDS
-onnx_tensorrt;https://github.com/onnx/onnx-tensorrt/archive/9f98e2ebe7507fe0774d06a44bbf4b0e82cc9ce7.zip;1d92137f424513bce20033ab4fb31cc0be8d1185
+# Use the latest commit of 10.6-GA-ORT-DDS
+onnx_tensorrt;https://github.com/onnx/onnx-tensorrt/archive/bc0d2e35909b8456abe32f3b30a49bb0c125e8b7.zip;f233ae871ad82c023da62e5dd620639f00bc2d15
protobuf;https://github.com/protocolbuffers/protobuf/archive/refs/tags/v21.12.zip;7cf2733949036c7d52fda017badcab093fe73bfa
protoc_win64;https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip;b4521f7ada5b260380f94c4bd7f1b7684c76969a
protoc_win32;https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win32.zip;3688010318192c46ce73213cdfb6b3e5656da874
@@ -58,5 +58,5 @@ extensions;https://github.com/microsoft/onnxruntime-extensions/archive/94142d839
composable_kernel;https://github.com/ROCmSoftwarePlatform/composable_kernel/archive/204da9c522cebec5220bba52cd3542ebcaf99e7a.zip;1827348efd47831c13074245274d41b7cae8a557
directx_headers;https://github.com/microsoft/DirectX-Headers/archive/refs/tags/v1.613.1.zip;47653509a3371eabb156360f42faf582f314bf2e
cudnn_frontend;https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v1.7.0.zip;d0753d8d5b39947ca0729d7773cb84653a129eb1
-dawn;https://github.com/google/dawn/archive/511eb80847afe6bded34ec491a38d5d78ba2d604.zip;c493f5aca5586f6634e25d0121c85df71189fb99
+dawn;https://github.com/google/dawn/archive/12a3b24c456cebd9fd11f23ac0164f78129b00c6.zip;ad428f6dc16f1336d584f7bad5714e1097dafc43
kleidiai;https://gitlab.arm.com/kleidi/kleidiai/-/archive/v0.2.0/kleidiai-v0.2.0.zip;B1E3173992FD91F20DB904AB77D6E901778C2681
diff --git a/cmake/external/dml.cmake b/cmake/external/dml.cmake
index e03506de12728..3cfcdd4b04c62 100644
--- a/cmake/external/dml.cmake
+++ b/cmake/external/dml.cmake
@@ -41,7 +41,7 @@ if (NOT onnxruntime_USE_CUSTOM_DIRECTML)
set(NUGET_CONFIG ${PROJECT_SOURCE_DIR}/../NuGet.config)
set(PACKAGES_CONFIG ${PROJECT_SOURCE_DIR}/../packages.config)
get_filename_component(PACKAGES_DIR ${CMAKE_CURRENT_BINARY_DIR}/../packages ABSOLUTE)
- set(DML_PACKAGE_DIR ${PACKAGES_DIR}/Microsoft.AI.DirectML.1.15.2)
+ set(DML_PACKAGE_DIR ${PACKAGES_DIR}/Microsoft.AI.DirectML.1.15.4)
# Restore nuget packages, which will pull down the DirectML redist package.
add_custom_command(
diff --git a/cmake/external/eigen.cmake b/cmake/external/eigen.cmake
index 339cded091b29..95dd438702a18 100644
--- a/cmake/external/eigen.cmake
+++ b/cmake/external/eigen.cmake
@@ -15,6 +15,7 @@ else ()
eigen
URL ${DEP_URL_eigen}
URL_HASH SHA1=${DEP_SHA1_eigen}
+ PATCH_COMMAND ${Patch_EXECUTABLE} --binary --ignore-whitespace -p1 < ${PROJECT_SOURCE_DIR}/patches/eigen/eigen-edge.patch
)
endif()
diff --git a/cmake/external/onnxruntime_external_deps.cmake b/cmake/external/onnxruntime_external_deps.cmake
index a69d2649ad832..aeaaa7b51d595 100644
--- a/cmake/external/onnxruntime_external_deps.cmake
+++ b/cmake/external/onnxruntime_external_deps.cmake
@@ -615,17 +615,39 @@ if (onnxruntime_USE_COREML)
endif()
if (onnxruntime_USE_WEBGPU)
- FetchContent_Declare(
- dawn
- URL ${DEP_URL_dawn}
- URL_HASH SHA1=${DEP_SHA1_dawn}
- PATCH_COMMAND ${Patch_EXECUTABLE} --binary --ignore-whitespace -p1 < ${PROJECT_SOURCE_DIR}/patches/dawn/dawn.patch
- )
+ if (onnxruntime_CUSTOM_DAWN_SRC_PATH)
+ # use the custom dawn source path if provided
+ #
+ # specified as:
+ # build.py --use_webgpu --cmake_extra_defines "onnxruntime_CUSTOM_DAWN_SRC_PATH="
+ FetchContent_Declare(
+ dawn
+ SOURCE_DIR ${onnxruntime_CUSTOM_DAWN_SRC_PATH}
+ )
+ else()
+ FetchContent_Declare(
+ dawn
+ URL ${DEP_URL_dawn}
+ URL_HASH SHA1=${DEP_SHA1_dawn}
+ # All previous patches are merged into the upstream dawn project. We don't need to apply any patches right now.
+ # if we need to apply patches in the future, we can uncomment the following line.
+ # PATCH_COMMAND ${Patch_EXECUTABLE} --binary --ignore-whitespace -p1 < ${PROJECT_SOURCE_DIR}/patches/dawn/dawn.patch
+ )
+ endif()
- # use dawn::dawn_native and dawn::dawn_proc instead of the monolithic dawn::webgpu_dawn to minimize binary size
- set(DAWN_BUILD_MONOLITHIC_LIBRARY OFF CACHE BOOL "" FORCE)
+ if (onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY)
+ set(DAWN_BUILD_MONOLITHIC_LIBRARY ON CACHE BOOL "" FORCE)
+ set(DAWN_ENABLE_INSTALL ON CACHE BOOL "" FORCE)
+
+ if (onnxruntime_USE_EXTERNAL_DAWN)
+ message(FATAL_ERROR "onnxruntime_USE_EXTERNAL_DAWN and onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY cannot be enabled at the same time.")
+ endif()
+ else()
+ # use dawn::dawn_native and dawn::dawn_proc instead of the monolithic dawn::webgpu_dawn to minimize binary size
+ set(DAWN_BUILD_MONOLITHIC_LIBRARY OFF CACHE BOOL "" FORCE)
+ set(DAWN_ENABLE_INSTALL OFF CACHE BOOL "" FORCE)
+ endif()
set(DAWN_BUILD_SAMPLES OFF CACHE BOOL "" FORCE)
- set(DAWN_ENABLE_INSTALL OFF CACHE BOOL "" FORCE)
set(DAWN_ENABLE_NULL OFF CACHE BOOL "" FORCE)
set(DAWN_FETCH_DEPENDENCIES ON CACHE BOOL "" FORCE)
@@ -654,13 +676,34 @@ if (onnxruntime_USE_WEBGPU)
set(DAWN_USE_BUILT_DXC ON CACHE BOOL "" FORCE)
set(TINT_BUILD_HLSL_WRITER ON CACHE BOOL "" FORCE)
- # Vulkan may optionally be included in a Windows build. Exclude until we have an explicit use case that requires it.
- set(DAWN_ENABLE_VULKAN OFF CACHE BOOL "" FORCE)
+ if ((NOT onnxruntime_ENABLE_DAWN_BACKEND_VULKAN) AND (NOT onnxruntime_ENABLE_DAWN_BACKEND_D3D12))
+ message(FATAL_ERROR "At least one of onnxruntime_ENABLE_DAWN_BACKEND_VULKAN or onnxruntime_ENABLE_DAWN_BACKEND_D3D12 must be enabled when using Dawn on Windows.")
+ endif()
+ if (onnxruntime_ENABLE_DAWN_BACKEND_VULKAN)
+ set(DAWN_ENABLE_VULKAN ON CACHE BOOL "" FORCE)
+ set(TINT_BUILD_SPV_WRITER ON CACHE BOOL "" FORCE)
+ else()
+ set(DAWN_ENABLE_VULKAN OFF CACHE BOOL "" FORCE)
+ endif()
+ if (onnxruntime_ENABLE_DAWN_BACKEND_D3D12)
+ set(DAWN_ENABLE_D3D12 ON CACHE BOOL "" FORCE)
+ else()
+ set(DAWN_ENABLE_D3D12 OFF CACHE BOOL "" FORCE)
+ endif()
+ # We are currently always using the D3D12 backend.
+ set(DAWN_ENABLE_D3D11 OFF CACHE BOOL "" FORCE)
endif()
onnxruntime_fetchcontent_makeavailable(dawn)
- list(APPEND onnxruntime_EXTERNAL_LIBRARIES dawn::dawn_native dawn::dawn_proc)
+ if (onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY)
+ list(APPEND onnxruntime_EXTERNAL_LIBRARIES dawn::webgpu_dawn)
+ else()
+ if (NOT onnxruntime_USE_EXTERNAL_DAWN)
+ list(APPEND onnxruntime_EXTERNAL_LIBRARIES dawn::dawn_native)
+ endif()
+ list(APPEND onnxruntime_EXTERNAL_LIBRARIES dawn::dawn_proc)
+ endif()
endif()
set(onnxruntime_LINK_DIRS)
diff --git a/cmake/external/tvm.cmake b/cmake/external/tvm.cmake
deleted file mode 100644
index 93049c8b85853..0000000000000
--- a/cmake/external/tvm.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-if (onnxruntime_USE_TVM)
- message(STATUS "onnxruntime_USE_TVM: Fetch tvm for TVM EP")
-
- FetchContent_Declare(
- tvm
- GIT_REPOSITORY https://github.com/apache/tvm.git
- GIT_TAG 2379917985919ed3918dc12cad47f469f245be7a
- )
-
- FetchContent_GetProperties(tvm)
- if(NOT tvm_POPULATED)
- FetchContent_Populate(tvm)
- if (WIN32)
- execute_process(
- COMMAND ${CMAKE_COMMAND} -E create_symlink ${tvm_BINARY_DIR}/${CMAKE_BUILD_TYPE} ${tvm_SOURCE_DIR}/build
- )
- else()
- file(CREATE_LINK ${tvm_BINARY_DIR} ${tvm_SOURCE_DIR}/build SYMBOLIC)
- endif()
- endif()
-
- set(tvm_INCLUDE_DIRS ${tvm_SOURCE_DIR}/include)
-
-endif()
diff --git a/cmake/onnxruntime.cmake b/cmake/onnxruntime.cmake
index 9602e54f3bc2d..732c0511d400f 100644
--- a/cmake/onnxruntime.cmake
+++ b/cmake/onnxruntime.cmake
@@ -122,8 +122,12 @@ else()
else()
onnxruntime_add_shared_library(onnxruntime ${CMAKE_CURRENT_BINARY_DIR}/generated_source.c )
endif()
- if (onnxruntime_USE_CUDA)
- set_property(TARGET onnxruntime APPEND_STRING PROPERTY LINK_FLAGS " -Xlinker -rpath=\\$ORIGIN")
+ if(NOT APPLE)
+ include(CheckLinkerFlag)
+ check_linker_flag(CXX "LINKER:-rpath=\$ORIGIN" LINKER_SUPPORT_RPATH)
+ if(LINKER_SUPPORT_RPATH)
+ target_link_options(onnxruntime PRIVATE "LINKER:-rpath=\$ORIGIN")
+ endif()
endif()
endif()
@@ -139,17 +143,17 @@ target_compile_definitions(onnxruntime PRIVATE FILE_NAME=\"onnxruntime.dll\")
if(UNIX)
if (APPLE)
- set(ONNXRUNTIME_SO_LINK_FLAG " -Xlinker -dead_strip")
+ target_link_options(onnxruntime PRIVATE "LINKER:-dead_strip")
elseif(NOT ${CMAKE_SYSTEM_NAME} MATCHES "AIX")
- set(ONNXRUNTIME_SO_LINK_FLAG " -Xlinker --version-script=${SYMBOL_FILE} -Xlinker --no-undefined -Xlinker --gc-sections -z noexecstack")
+ target_link_options(onnxruntime PRIVATE "LINKER:--version-script=${SYMBOL_FILE}" "LINKER:--no-undefined" "LINKER:--gc-sections")
endif()
else()
- set(ONNXRUNTIME_SO_LINK_FLAG " -DEF:${SYMBOL_FILE}")
+ target_link_options(onnxruntime PRIVATE "-DEF:${SYMBOL_FILE}")
endif()
-if (NOT WIN32)
- if (APPLE OR ${CMAKE_SYSTEM_NAME} MATCHES "^iOS")
- set(ONNXRUNTIME_SO_LINK_FLAG " -Wl,-exported_symbols_list,${SYMBOL_FILE}")
+
+if (APPLE OR ${CMAKE_SYSTEM_NAME} MATCHES "^iOS")
+ target_link_options(onnxruntime PRIVATE "LINKER:-exported_symbols_list,${SYMBOL_FILE}")
if (${CMAKE_SYSTEM_NAME} STREQUAL "iOS")
set_target_properties(onnxruntime PROPERTIES
MACOSX_RPATH TRUE
@@ -159,12 +163,10 @@ if (NOT WIN32)
else()
set_target_properties(onnxruntime PROPERTIES INSTALL_RPATH "@loader_path")
endif()
- elseif (NOT CMAKE_SYSTEM_NAME STREQUAL "Emscripten" AND NOT ${CMAKE_SYSTEM_NAME} MATCHES "AIX")
- set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath='$ORIGIN'")
- endif()
endif()
+
if(CMAKE_SYSTEM_NAME STREQUAL "Android" AND onnxruntime_MINIMAL_BUILD)
# target onnxruntime is a shared library, the dummy __cxa_demangle is only attach to it to avoid
# affecting downstream ort library users with the behavior of dummy __cxa_demangle. So the dummy
@@ -208,7 +210,6 @@ set(onnxruntime_INTERNAL_LIBRARIES
${PROVIDERS_NNAPI}
${PROVIDERS_QNN}
${PROVIDERS_SNPE}
- ${PROVIDERS_TVM}
${PROVIDERS_RKNPU}
${PROVIDERS_VSINPU}
${PROVIDERS_XNNPACK}
@@ -219,7 +220,6 @@ set(onnxruntime_INTERNAL_LIBRARIES
${onnxruntime_winml}
onnxruntime_optimizer
onnxruntime_providers
- ${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_graph
@@ -248,7 +248,9 @@ target_link_libraries(onnxruntime PRIVATE
${onnxruntime_EXTERNAL_LIBRARIES}
)
-set_property(TARGET onnxruntime APPEND_STRING PROPERTY LINK_FLAGS ${ONNXRUNTIME_SO_LINK_FLAG} ${onnxruntime_DELAYLOAD_FLAGS})
+if(WIN32)
+ target_link_options(onnxruntime PRIVATE ${onnxruntime_DELAYLOAD_FLAGS})
+endif()
#See: https://cmake.org/cmake/help/latest/prop_tgt/SOVERSION.html
if(NOT APPLE AND NOT WIN32)
if(${CMAKE_SYSTEM_NAME} MATCHES "AIX")
diff --git a/cmake/onnxruntime_codegen_tvm.cmake b/cmake/onnxruntime_codegen_tvm.cmake
deleted file mode 100644
index 7b50d8f8603ae..0000000000000
--- a/cmake/onnxruntime_codegen_tvm.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License.
-
-file(GLOB_RECURSE onnxruntime_codegen_common_srcs
- "${ONNXRUNTIME_ROOT}/core/codegen/common/*.h"
- "${ONNXRUNTIME_ROOT}/core/codegen/common/*.cc"
-)
-
-file(GLOB_RECURSE onnxruntime_codegen_tvm_srcs CONFIGURE_DEPENDS
- "${ONNXRUNTIME_ROOT}/core/codegen/mti/*.h"
- "${ONNXRUNTIME_ROOT}/core/codegen/mti/*.cc"
- "${ONNXRUNTIME_ROOT}/core/codegen/passes/*.h"
- "${ONNXRUNTIME_ROOT}/core/codegen/passes/*.cc"
-)
-
-source_group(TREE ${ONNXRUNTIME_ROOT}/core FILES ${onnxruntime_codegen_common_srcs} ${onnxruntime_codegen_tvm_srcs})
-
-#onnxruntime_codegen_tvm depends on onnxruntime framework
-onnxruntime_add_static_library(onnxruntime_codegen_tvm ${onnxruntime_codegen_common_srcs} ${onnxruntime_codegen_tvm_srcs})
-set_target_properties(onnxruntime_codegen_tvm PROPERTIES FOLDER "ONNXRuntime")
-target_include_directories(onnxruntime_codegen_tvm PRIVATE ${ONNXRUNTIME_ROOT} ${TVM_INCLUDES} ${MKLML_INCLUDE_DIR} ${eigen_INCLUDE_DIRS})
-onnxruntime_add_include_to_target(onnxruntime_codegen_tvm onnxruntime_common onnxruntime_framework onnx onnx_proto ${PROTOBUF_LIB} flatbuffers::flatbuffers safeint_interface Boost::mp11)
-target_compile_options(onnxruntime_codegen_tvm PRIVATE ${DISABLED_WARNINGS_FOR_TVM})
-# need onnx to build to create headers that this project includes
-add_dependencies(onnxruntime_codegen_tvm ${onnxruntime_EXTERNAL_DEPENDENCIES})
diff --git a/cmake/onnxruntime_csharp.cmake b/cmake/onnxruntime_csharp.cmake
index 22c993d07f7f9..39533429e181c 100644
--- a/cmake/onnxruntime_csharp.cmake
+++ b/cmake/onnxruntime_csharp.cmake
@@ -30,10 +30,6 @@ if (onnxruntime_USE_NNAPI_BUILTIN)
STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_NNAPI;")
endif()
-if (onnxruntime_USE_TVM)
- STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_TVM,")
-endif()
-
if (onnxruntime_USE_OPENVINO)
STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_OPENVINO;")
endif()
diff --git a/cmake/onnxruntime_java.cmake b/cmake/onnxruntime_java.cmake
index 765ebab111ac7..b15b9632e9e24 100644
--- a/cmake/onnxruntime_java.cmake
+++ b/cmake/onnxruntime_java.cmake
@@ -7,7 +7,7 @@
include(FindJava)
find_package(Java REQUIRED)
include(UseJava)
-if (NOT CMAKE_SYSTEM_NAME STREQUAL "Android")
+if (NOT ANDROID)
find_package(JNI REQUIRED)
endif()
@@ -21,23 +21,28 @@ endif()
set(GRADLE_EXECUTABLE "${JAVA_ROOT}/gradlew")
+set(COMMON_GRADLE_ARGS --console=plain)
+if(WIN32)
+ list(APPEND COMMON_GRADLE_ARGS -Dorg.gradle.daemon=false)
+elseif (ANDROID)
+ # For Android build, we may run gradle multiple times in same build,
+ # sometimes gradle JVM will run out of memory if we keep the daemon running
+ # it is better to not keep a daemon running
+ list(APPEND COMMON_GRADLE_ARGS --no-daemon)
+endif()
+
# Specify the Java source files
file(GLOB_RECURSE onnxruntime4j_gradle_files "${JAVA_ROOT}/*.gradle")
file(GLOB_RECURSE onnxruntime4j_src "${JAVA_ROOT}/src/main/java/ai/onnxruntime/*.java")
set(JAVA_OUTPUT_JAR ${JAVA_ROOT}/build/libs/onnxruntime.jar)
# this jar is solely used to signaling mechanism for dependency management in CMake
# if any of the Java sources change, the jar (and generated headers) will be regenerated and the onnxruntime4j_jni target will be rebuilt
-set(GRADLE_ARGS --console=plain clean jar -x test)
-if(WIN32)
- set(GRADLE_ARGS ${GRADLE_ARGS} -Dorg.gradle.daemon=false)
-elseif (CMAKE_SYSTEM_NAME STREQUAL "Android")
- # For Android build, we may run gradle multiple times in same build,
- # sometimes gradle JVM will run out of memory if we keep the daemon running
- # it is better to not keep a daemon running
- set(GRADLE_ARGS ${GRADLE_ARGS} --no-daemon)
-endif()
+set(GRADLE_ARGS clean jar -x test)
-add_custom_command(OUTPUT ${JAVA_OUTPUT_JAR} COMMAND ${GRADLE_EXECUTABLE} ${GRADLE_ARGS} WORKING_DIRECTORY ${JAVA_ROOT} DEPENDS ${onnxruntime4j_gradle_files} ${onnxruntime4j_src})
+add_custom_command(OUTPUT ${JAVA_OUTPUT_JAR}
+ COMMAND ${GRADLE_EXECUTABLE} ${COMMON_GRADLE_ARGS} ${GRADLE_ARGS}
+ WORKING_DIRECTORY ${JAVA_ROOT}
+ DEPENDS ${onnxruntime4j_gradle_files} ${onnxruntime4j_src})
add_custom_target(onnxruntime4j DEPENDS ${JAVA_OUTPUT_JAR})
set_source_files_properties(${JAVA_OUTPUT_JAR} PROPERTIES GENERATED TRUE)
set_property(TARGET onnxruntime4j APPEND PROPERTY ADDITIONAL_CLEAN_FILES "${JAVA_OUTPUT_DIR}")
@@ -62,7 +67,7 @@ target_link_libraries(onnxruntime4j_jni PUBLIC onnxruntime)
set(JAVA_PACKAGE_OUTPUT_DIR ${JAVA_OUTPUT_DIR}/build)
file(MAKE_DIRECTORY ${JAVA_PACKAGE_OUTPUT_DIR})
-if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+if (ANDROID)
set(ANDROID_PACKAGE_OUTPUT_DIR ${JAVA_PACKAGE_OUTPUT_DIR}/android)
file(MAKE_DIRECTORY ${ANDROID_PACKAGE_OUTPUT_DIR})
endif()
@@ -88,7 +93,7 @@ if(APPLE)
elseif(JNI_ARCH STREQUAL "arm64")
set(JNI_ARCH aarch64)
endif()
-elseif (CMAKE_SYSTEM_NAME STREQUAL "Android")
+elseif (ANDROID)
set(JNI_ARCH ${ANDROID_ABI})
elseif (ARM64)
set(JNI_ARCH aarch64)
@@ -180,15 +185,7 @@ else()
endif()
# run the build process (this copies the results back into CMAKE_CURRENT_BINARY_DIR)
-set(GRADLE_ARGS --console=plain cmakeBuild -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR})
-if(WIN32)
- set(GRADLE_ARGS ${GRADLE_ARGS} -Dorg.gradle.daemon=false)
-elseif (CMAKE_SYSTEM_NAME STREQUAL "Android")
- # For Android build, we may run gradle multiple times in same build,
- # sometimes gradle JVM will run out of memory if we keep the daemon running
- # it is better to not keep a daemon running
- set(GRADLE_ARGS ${GRADLE_ARGS} --no-daemon)
-endif()
+set(GRADLE_ARGS cmakeBuild -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR})
# Append relevant native build flags to gradle command
set(GRADLE_ARGS ${GRADLE_ARGS} ${ORT_PROVIDER_FLAGS})
@@ -197,9 +194,11 @@ if (onnxruntime_ENABLE_TRAINING_APIS)
endif()
message(STATUS "GRADLE_ARGS: ${GRADLE_ARGS}")
-add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${GRADLE_EXECUTABLE} ${GRADLE_ARGS} WORKING_DIRECTORY ${JAVA_ROOT})
+add_custom_command(TARGET onnxruntime4j_jni POST_BUILD
+ COMMAND ${GRADLE_EXECUTABLE} ${COMMON_GRADLE_ARGS} ${GRADLE_ARGS}
+ WORKING_DIRECTORY ${JAVA_ROOT})
-if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+if (ANDROID)
set(ANDROID_PACKAGE_JNILIBS_DIR ${JAVA_OUTPUT_DIR}/android)
set(ANDROID_PACKAGE_ABI_DIR ${ANDROID_PACKAGE_JNILIBS_DIR}/${ANDROID_ABI})
file(MAKE_DIRECTORY ${ANDROID_PACKAGE_JNILIBS_DIR})
@@ -214,6 +213,7 @@ if (CMAKE_SYSTEM_NAME STREQUAL "Android")
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E echo "Generating Android AAR package..."
COMMAND ${GRADLE_EXECUTABLE}
+ ${COMMON_GRADLE_ARGS}
build
-b build-android.gradle -c settings-android.gradle
-DjniLibsDir=${ANDROID_PACKAGE_JNILIBS_DIR} -DbuildDir=${ANDROID_PACKAGE_OUTPUT_DIR}
@@ -237,6 +237,7 @@ if (CMAKE_SYSTEM_NAME STREQUAL "Android")
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E echo "Building and running Android test for Android AAR package..."
COMMAND ${GRADLE_EXECUTABLE}
+ ${COMMON_GRADLE_ARGS}
clean assembleDebug assembleDebugAndroidTest
-DminSdkVer=${ANDROID_MIN_SDK}
--stacktrace
diff --git a/cmake/onnxruntime_mlas.cmake b/cmake/onnxruntime_mlas.cmake
index 9e5a51ca3bee8..f5d8bde0b0427 100644
--- a/cmake/onnxruntime_mlas.cmake
+++ b/cmake/onnxruntime_mlas.cmake
@@ -36,11 +36,13 @@ onnxruntime_add_static_library(onnxruntime_mlas
${MLAS_SRC_DIR}/qpostprocessor.cpp
${MLAS_SRC_DIR}/qlgavgpool.cpp
${MLAS_SRC_DIR}/qdwconv_kernelsize.cpp
- ${MLAS_SRC_DIR}/sqnbitgemm.h
- ${MLAS_SRC_DIR}/sqnbitgemm.cpp
+ ${MLAS_SRC_DIR}/qnbitgemm.h
+ ${MLAS_SRC_DIR}/qnbitgemm.cpp
${MLAS_SRC_DIR}/sqnbitgemm_q8_block.h
${MLAS_SRC_DIR}/flashattn.cpp
${MLAS_SRC_DIR}/cast.cpp
+ ${MLAS_SRC_DIR}/rotary_embedding.h
+ ${MLAS_SRC_DIR}/rotary_embedding.cpp
${MLAS_SRC_DIR}/qsoftmax.cpp
${MLAS_SRC_DIR}/qsoftmax_kernel_naive.cpp
)
@@ -86,11 +88,15 @@ function(setup_mlas_source_for_windows)
${MLAS_SRC_DIR}/qgemm_kernel_neon.cpp
${MLAS_SRC_DIR}/qgemm_kernel_udot.cpp
${MLAS_SRC_DIR}/qgemm_kernel_sdot.cpp
- ${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon.h
- ${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon.cpp
+ ${MLAS_SRC_DIR}/qnbitgemm_kernel_neon.h
+ ${MLAS_SRC_DIR}/qnbitgemm_kernel_neon.cpp
${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon_fp32.cpp
${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon_int8.cpp
- ${MLAS_SRC_DIR}/fp16_neon_common.cpp
+ ${MLAS_SRC_DIR}/cast_kernel_neon.cpp
+ ${MLAS_SRC_DIR}/hqnbitgemm_kernel_neon_fp16.cpp
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon.h
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon.cpp
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon_fp16.cpp
)
set(mlas_platform_preprocess_srcs
@@ -369,10 +375,12 @@ else()
${MLAS_SRC_DIR}/qgemm_kernel_neon.cpp
${MLAS_SRC_DIR}/qgemm_kernel_udot.cpp
${MLAS_SRC_DIR}/qgemm_kernel_sdot.cpp
- ${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon.h
- ${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon.cpp
+ ${MLAS_SRC_DIR}/qnbitgemm_kernel_neon.h
+ ${MLAS_SRC_DIR}/qnbitgemm_kernel_neon.cpp
${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon_fp32.cpp
${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon_int8.cpp
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon.h
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon.cpp
)
set_source_files_properties(${MLAS_SRC_DIR}/sqnbitgemm_kernel_neon_int8.cpp
PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+dotprod")
@@ -390,7 +398,9 @@ else()
${MLAS_SRC_DIR}/qgemm_kernel_smmla.cpp
${MLAS_SRC_DIR}/qgemm_kernel_ummla.cpp
${MLAS_SRC_DIR}/sbgemm_kernel_neon.cpp
- ${MLAS_SRC_DIR}/fp16_neon_common.cpp
+ ${MLAS_SRC_DIR}/cast_kernel_neon.cpp
+ ${MLAS_SRC_DIR}/hqnbitgemm_kernel_neon_fp16.cpp
+ ${MLAS_SRC_DIR}/rotary_embedding_kernel_neon_fp16.cpp
)
set_source_files_properties(${MLAS_SRC_DIR}/aarch64/HalfGemmKernelNeon.S PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
set_source_files_properties(${MLAS_SRC_DIR}/aarch64/QgemmS8S8KernelSmmla.S PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+i8mm ")
@@ -400,7 +410,9 @@ else()
set_source_files_properties(${MLAS_SRC_DIR}/dwconv.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
set_source_files_properties(${MLAS_SRC_DIR}/pooling_fp16.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
set_source_files_properties(${MLAS_SRC_DIR}/sbgemm_kernel_neon.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+bf16 ")
- set_source_files_properties(${MLAS_SRC_DIR}/fp16_neon_common.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
+ set_source_files_properties(${MLAS_SRC_DIR}/cast_kernel_neon.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
+ set_source_files_properties(${MLAS_SRC_DIR}/hqnbitgemm_kernel_neon_fp16.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
+ set_source_files_properties(${MLAS_SRC_DIR}/rotary_embedding_kernel_neon_fp16.cpp PROPERTIES COMPILE_FLAGS " -march=armv8.2-a+fp16 ")
endif()
if(ONNXRUNTIME_MLAS_MULTI_ARCH)
@@ -460,7 +472,6 @@ else()
bool HasP10 = ((hwcap2 & PPC_FEATURE2_MMA) && (hwcap2 & PPC_FEATURE2_ARCH_3_1));
return 0;
}
- }
#endif"
HAS_P10_RUNTIME
)
@@ -686,6 +697,13 @@ endif()
if(NOT ONNXRUNTIME_MLAS_MULTI_ARCH AND MLAS_SOURCE_IS_NOT_SET)
file(GLOB_RECURSE mlas_platform_srcs
"${MLAS_SRC_DIR}/scalar/*.cpp")
+ elseif (onnxruntime_FORCE_GENERIC_ALGORITHMS)
+ file(GLOB_RECURSE mlas_platform_srcs_generic
+ "${MLAS_SRC_DIR}/scalar/*.cpp")
+ set(mlas_platform_srcs
+ ${mlas_platform_srcs}
+ ${mlas_platform_srcs_generic}
+ )
endif()
target_sources(onnxruntime_mlas PRIVATE ${mlas_platform_srcs})
endif()
diff --git a/cmake/onnxruntime_providers.cmake b/cmake/onnxruntime_providers.cmake
index 9666877cdc206..582491de9503d 100644
--- a/cmake/onnxruntime_providers.cmake
+++ b/cmake/onnxruntime_providers.cmake
@@ -101,9 +101,6 @@ endif()
if(onnxruntime_USE_ROCM)
set(PROVIDERS_ROCM onnxruntime_providers_rocm)
endif()
-if (onnxruntime_USE_TVM)
- set(PROVIDERS_TVM onnxruntime_providers_tvm)
-endif()
if (onnxruntime_USE_XNNPACK)
set(PROVIDERS_XNNPACK onnxruntime_providers_xnnpack)
endif()
@@ -194,10 +191,6 @@ if (onnxruntime_USE_ROCM)
include(onnxruntime_providers_rocm.cmake)
endif()
-if (onnxruntime_USE_TVM)
- include(onnxruntime_providers_tvm.cmake)
-endif()
-
if (onnxruntime_USE_VSINPU)
include(onnxruntime_providers_vsinpu.cmake)
endif()
diff --git a/cmake/onnxruntime_providers_cuda.cmake b/cmake/onnxruntime_providers_cuda.cmake
index 39ad530146b33..4f86717026118 100644
--- a/cmake/onnxruntime_providers_cuda.cmake
+++ b/cmake/onnxruntime_providers_cuda.cmake
@@ -224,8 +224,7 @@
include(cutlass)
target_include_directories(${target} PRIVATE ${cutlass_SOURCE_DIR}/include ${cutlass_SOURCE_DIR}/examples ${cutlass_SOURCE_DIR}/tools/util/include)
- target_include_directories(${target} PRIVATE ${ONNXRUNTIME_ROOT} ${CMAKE_CURRENT_BINARY_DIR} ${eigen_INCLUDE_DIRS} ${TVM_INCLUDES}
- PUBLIC ${CUDAToolkit_INCLUDE_DIRS})
+ target_include_directories(${target} PRIVATE ${ONNXRUNTIME_ROOT} ${CMAKE_CURRENT_BINARY_DIR} ${eigen_INCLUDE_DIRS} PUBLIC ${CUDAToolkit_INCLUDE_DIRS})
# ${CMAKE_CURRENT_BINARY_DIR} is so that #include "onnxruntime_config.h" inside tensor_shape.h is found
set_target_properties(${target} PROPERTIES LINKER_LANGUAGE CUDA)
set_target_properties(${target} PROPERTIES FOLDER "ONNXRuntime")
diff --git a/cmake/onnxruntime_providers_dml.cmake b/cmake/onnxruntime_providers_dml.cmake
index 439be882dcc5e..3141aa85a1163 100644
--- a/cmake/onnxruntime_providers_dml.cmake
+++ b/cmake/onnxruntime_providers_dml.cmake
@@ -61,8 +61,9 @@
target_link_libraries(onnxruntime_providers_dml PRIVATE delayimp.lib)
- if (NOT GDK_PLATFORM)
- set(onnxruntime_DELAYLOAD_FLAGS "${onnxruntime_DELAYLOAD_FLAGS} /DELAYLOAD:DirectML.dll /DELAYLOAD:d3d12.dll /DELAYLOAD:dxgi.dll /DELAYLOAD:dxcore.dll /DELAYLOAD:api-ms-win-core-com-l1-1-0.dll /DELAYLOAD:shlwapi.dll /DELAYLOAD:oleaut32.dll /DELAYLOAD:ext-ms-win-dxcore-l1-*.dll /ignore:4199")
+ if (onnxruntime_ENABLE_DELAY_LOADING_WIN_DLLS AND NOT GDK_PLATFORM)
+ #NOTE: the flags are only applied to onnxruntime.dll and the PYD file in our python package. Our C/C++ unit tests do not use these flags.
+ list(APPEND onnxruntime_DELAYLOAD_FLAGS "/DELAYLOAD:DirectML.dll" "/DELAYLOAD:d3d12.dll" "/DELAYLOAD:dxgi.dll" "/DELAYLOAD:dxcore.dll" "/DELAYLOAD:api-ms-win-core-com-l1-1-0.dll" "/DELAYLOAD:shlwapi.dll" "/DELAYLOAD:oleaut32.dll" "/DELAYLOAD:ext-ms-win-dxcore-l1-*.dll" "/ignore:4199")
endif()
target_compile_definitions(onnxruntime_providers_dml
diff --git a/cmake/onnxruntime_providers_openvino.cmake b/cmake/onnxruntime_providers_openvino.cmake
index 5dcee285a5b13..f5fae8d169ccc 100644
--- a/cmake/onnxruntime_providers_openvino.cmake
+++ b/cmake/onnxruntime_providers_openvino.cmake
@@ -11,22 +11,22 @@
"${ONNXRUNTIME_ROOT}/core/providers/shared_library/*.cc"
)
- if (WIN32)
- set(CMAKE_MAP_IMPORTED_CONFIG_RELWITHDEBINFO Release)
- endif()
-
# Header paths
find_package(OpenVINO REQUIRED COMPONENTS Runtime ONNX)
- if(OpenVINO_VERSION VERSION_LESS 2024.0)
- message(FATAL_ERROR "OpenVINO 2024.0 and newer are supported. Please, use latest OpenVINO release")
+ if(OpenVINO_VERSION VERSION_LESS 2024.4)
+ message(FATAL_ERROR "OpenVINO 2024.4 and newer are supported. Please, use latest OpenVINO release")
endif()
if(OpenVINO_VERSION VERSION_GREATER_EQUAL 2024.4)
add_definitions(-DUSE_OVEP_NPU_MEMORY=1)
endif()
- if (WIN32)
- unset(CMAKE_MAP_IMPORTED_CONFIG_RELWITHDEBINFO)
+ # If building RelWithDebInfo and OV package does not have that configuration map to Release
+ get_target_property(ov_rt_implib_rwdi openvino::runtime IMPORTED_IMPLIB_RELWITHDEBINFO)
+ if ((CMAKE_BUILD_TYPE STREQUAL RelWithDebInfo) AND NOT ov_rt_implib_rwdi)
+ set_target_properties(openvino::runtime PROPERTIES
+ MAP_IMPORTED_CONFIG_RELWITHDEBINFO Release
+ )
endif()
list(APPEND OPENVINO_LIB_LIST openvino::frontend::onnx openvino::runtime ${PYTHON_LIBRARIES})
@@ -82,3 +82,8 @@
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
endif()
+
+set_target_properties(onnxruntime_providers_openvino PROPERTIES
+ MAP_IMPORTED_CONFIG_RELEASE RelWithDebInfo
+ MAP_IMPORTED_CONFIG_DEBUG RelWithDebInfo
+ )
\ No newline at end of file
diff --git a/cmake/onnxruntime_providers_tvm.cmake b/cmake/onnxruntime_providers_tvm.cmake
deleted file mode 100644
index 8fd50c70dd5d7..0000000000000
--- a/cmake/onnxruntime_providers_tvm.cmake
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License.
-
- add_definitions(-DUSE_TVM=1)
- if (onnxruntime_TVM_USE_HASH)
- add_definitions(-DUSE_TVM_HASH=1)
- endif()
-
- if (onnxruntime_TVM_USE_HASH)
- file (GLOB_RECURSE onnxruntime_providers_tvm_cc_srcs CONFIGURE_DEPENDS
- "${ONNXRUNTIME_ROOT}/core/providers/tvm/*.h"
- "${ONNXRUNTIME_ROOT}/core/providers/tvm/*.cc"
- )
- else()
- file (GLOB onnxruntime_providers_tvm_cc_srcs CONFIGURE_DEPENDS
- "${ONNXRUNTIME_ROOT}/core/providers/tvm/*.h"
- "${ONNXRUNTIME_ROOT}/core/providers/tvm/*.cc"
- )
- endif()
-
- source_group(TREE ${ONNXRUNTIME_ROOT}/core FILES ${onnxruntime_providers_tvm_cc_srcs})
- onnxruntime_add_static_library(onnxruntime_providers_tvm ${onnxruntime_providers_tvm_cc_srcs})
-
- if ( CMAKE_COMPILER_IS_GNUCC )
- target_compile_options(onnxruntime_providers_tvm PRIVATE -Wno-unused-parameter -Wno-missing-field-initializers)
- endif()
-
- target_include_directories(onnxruntime_providers_tvm PRIVATE
- ${TVM_INCLUDES}
- ${PYTHON_INCLUDE_DIRS})
- onnxruntime_add_include_to_target(onnxruntime_providers_tvm onnxruntime_common onnxruntime_framework onnx onnx_proto ${PROTOBUF_LIB} flatbuffers::flatbuffers Boost::mp11 safeint_interface)
-
- add_dependencies(onnxruntime_providers_tvm ${onnxruntime_EXTERNAL_DEPENDENCIES})
-
- if (onnxruntime_TVM_USE_HASH)
- add_dependencies(onnxruntime_providers_tvm ippcp_s)
- target_include_directories(onnxruntime_providers_tvm PRIVATE ${IPP_CRYPTO_INCLUDE_DIR})
- target_link_libraries(onnxruntime_providers_tvm PRIVATE ippcp_s)
- endif()
-
- set_target_properties(onnxruntime_providers_tvm PROPERTIES FOLDER "ONNXRuntime")
- set_target_properties(onnxruntime_providers_tvm PROPERTIES LINKER_LANGUAGE CXX)
-
- if (WIN32 AND MSVC)
- # wd4100: identifier' : unreferenced formal parameter
- # wd4127: conditional expression is constant
- # wd4244: conversion from 'int' to 'char', possible loss of data
- # TODO: 4244 should not be disabled
- target_compile_options(onnxruntime_providers_tvm PRIVATE "/wd4100" "/wd4127" "/wd4244")
- else()
- target_compile_options(onnxruntime_providers_tvm PRIVATE "-Wno-error=type-limits")
- endif()
- target_compile_definitions(onnxruntime_providers_tvm PUBLIC DMLC_USE_LOGGING_LIBRARY=)
-
- install(FILES ${PROJECT_SOURCE_DIR}/../include/onnxruntime/core/providers/tvm/tvm_provider_factory.h
- DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/onnxruntime/)
-
- if (NOT onnxruntime_BUILD_SHARED_LIB)
- install(TARGETS onnxruntime_providers_tvm
- ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
- LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
- RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
- FRAMEWORK DESTINATION ${CMAKE_INSTALL_BINDIR})
- endif()
\ No newline at end of file
diff --git a/cmake/onnxruntime_providers_vitisai.cmake b/cmake/onnxruntime_providers_vitisai.cmake
index 764cde9491da8..561a323533f48 100644
--- a/cmake/onnxruntime_providers_vitisai.cmake
+++ b/cmake/onnxruntime_providers_vitisai.cmake
@@ -12,6 +12,7 @@
file(GLOB onnxruntime_providers_vitisai_cc_srcs CONFIGURE_DEPENDS
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/*.cc"
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/*.h"
+ "${ONNXRUNTIME_ROOT}/core/providers/vitisai/include/vaip/*.h"
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/imp/*.cc"
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/imp/*.h"
"${ONNXRUNTIME_ROOT}/core/providers/shared_library/*.h"
diff --git a/cmake/onnxruntime_providers_webgpu.cmake b/cmake/onnxruntime_providers_webgpu.cmake
index eb25c55ab23e0..fea5964f0dda9 100644
--- a/cmake/onnxruntime_providers_webgpu.cmake
+++ b/cmake/onnxruntime_providers_webgpu.cmake
@@ -22,6 +22,25 @@
onnxruntime_add_static_library(onnxruntime_providers_webgpu ${onnxruntime_providers_webgpu_cc_srcs})
onnxruntime_add_include_to_target(onnxruntime_providers_webgpu
onnxruntime_common dawn::dawncpp_headers dawn::dawn_headers onnx onnx_proto flatbuffers::flatbuffers Boost::mp11 safeint_interface)
- target_link_libraries(onnxruntime_providers_webgpu dawn::dawn_native dawn::dawn_proc)
+
+ if (onnxruntime_BUILD_DAWN_MONOLITHIC_LIBRARY)
+ target_link_libraries(onnxruntime_providers_webgpu dawn::webgpu_dawn)
+
+ if (onnxruntime_ENABLE_DELAY_LOADING_WIN_DLLS)
+ list(APPEND onnxruntime_DELAYLOAD_FLAGS "/DELAYLOAD:webgpu_dawn.dll")
+ endif()
+
+ # Copy webgpu_dawn.dll to the output directory
+ add_custom_command(
+ TARGET onnxruntime_providers_webgpu
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy_if_different "$" "$"
+ VERBATIM )
+ else()
+ if (NOT onnxruntime_USE_EXTERNAL_DAWN)
+ target_link_libraries(onnxruntime_providers_webgpu dawn::dawn_native)
+ endif()
+ target_link_libraries(onnxruntime_providers_webgpu dawn::dawn_proc)
+ endif()
set_target_properties(onnxruntime_providers_webgpu PROPERTIES FOLDER "ONNXRuntime")
diff --git a/cmake/onnxruntime_python.cmake b/cmake/onnxruntime_python.cmake
index 0d038d210ea2b..5a87252b08573 100644
--- a/cmake/onnxruntime_python.cmake
+++ b/cmake/onnxruntime_python.cmake
@@ -110,17 +110,17 @@ if (onnxruntime_USE_NCCL)
endif()
if(APPLE)
- set(ONNXRUNTIME_SO_LINK_FLAG "-Xlinker -exported_symbols_list -Xlinker ${ONNXRUNTIME_ROOT}/python/exported_symbols.lst")
+ target_link_options(onnxruntime_pybind11_state PRIVATE "LINKER:-exported_symbols_list,${ONNXRUNTIME_ROOT}/python/exported_symbols.lst")
elseif(UNIX)
if (onnxruntime_ENABLE_EXTERNAL_CUSTOM_OP_SCHEMAS)
- set(ONNXRUNTIME_SO_LINK_FLAG "-Xlinker --version-script=${ONNXRUNTIME_ROOT}/python/version_script_expose_onnx_protobuf.lds -Xlinker --gc-sections")
+ target_link_options(onnxruntime_pybind11_state PRIVATE "LINKER:--version-script=${ONNXRUNTIME_ROOT}/python/version_script_expose_onnx_protobuf.lds" "LINKER:--gc-sections")
else()
if (NOT CMAKE_SYSTEM_NAME MATCHES "AIX")
- set(ONNXRUNTIME_SO_LINK_FLAG "-Xlinker --version-script=${ONNXRUNTIME_ROOT}/python/version_script.lds -Xlinker --gc-sections")
+ target_link_options(onnxruntime_pybind11_state PRIVATE "LINKER:--version-script=${ONNXRUNTIME_ROOT}/python/version_script.lds" "LINKER:--gc-sections")
endif()
endif()
else()
- set(ONNXRUNTIME_SO_LINK_FLAG "-DEF:${ONNXRUNTIME_ROOT}/python/pybind.def")
+ target_link_options(onnxruntime_pybind11_state PRIVATE "-DEF:${ONNXRUNTIME_ROOT}/python/pybind.def")
endif()
if (onnxruntime_ENABLE_ATEN)
@@ -169,8 +169,8 @@ endif()
target_link_libraries(onnxruntime_pybind11_state PRIVATE
onnxruntime_session
${onnxruntime_libs}
- ${PROVIDERS_TVM}
${PROVIDERS_NNAPI}
+ ${PROVIDERS_VSINPU}
${PROVIDERS_XNNPACK}
${PROVIDERS_COREML}
${PROVIDERS_RKNPU}
@@ -184,7 +184,6 @@ target_link_libraries(onnxruntime_pybind11_state PRIVATE
onnxruntime_optimizer
onnxruntime_providers
onnxruntime_util
- ${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_util
@@ -199,11 +198,11 @@ set(onnxruntime_pybind11_state_dependencies
${onnxruntime_EXTERNAL_DEPENDENCIES}
${pybind11_dep}
)
-set_property(TARGET onnxruntime_pybind11_state APPEND_STRING PROPERTY LINK_FLAGS ${ONNXRUNTIME_SO_LINK_FLAG} ${onnxruntime_DELAYLOAD_FLAGS})
+
add_dependencies(onnxruntime_pybind11_state ${onnxruntime_pybind11_state_dependencies})
if (MSVC)
- set_target_properties(onnxruntime_pybind11_state PROPERTIES LINK_FLAGS "${ONNXRUNTIME_SO_LINK_FLAG}")
+ target_link_options(onnxruntime_pybind11_state PRIVATE ${onnxruntime_DELAYLOAD_FLAGS})
# if MSVC, pybind11 undefines _DEBUG in pybind11/detail/common.h, which causes the pragma in pyconfig.h
# from the python installation to require the release version of the lib
# e.g. from a python 3.10 install:
@@ -220,14 +219,15 @@ if (MSVC)
# Explicitly use the release version of the python library to make the project file consistent with this.
target_link_libraries(onnxruntime_pybind11_state PRIVATE ${Python_LIBRARY_RELEASE})
elseif (APPLE)
- set_target_properties(onnxruntime_pybind11_state PROPERTIES LINK_FLAGS "${ONNXRUNTIME_SO_LINK_FLAG} -Xlinker -undefined -Xlinker dynamic_lookup")
+ # The following flag no longer works
+ #target_link_options(onnxruntime_pybind11_state PRIVATE "LINKER:-undefined,dynamic_lookup")
set_target_properties(onnxruntime_pybind11_state PROPERTIES
INSTALL_RPATH "@loader_path"
BUILD_WITH_INSTALL_RPATH TRUE
INSTALL_RPATH_USE_LINK_PATH FALSE)
else()
if (NOT CMAKE_SYSTEM_NAME MATCHES "AIX")
- set_property(TARGET onnxruntime_pybind11_state APPEND_STRING PROPERTY LINK_FLAGS " -Xlinker -rpath=\\$ORIGIN")
+ target_link_options(onnxruntime_pybind11_state PRIVATE "LINKER:-rpath=\$ORIGIN")
endif()
endif()
@@ -238,8 +238,8 @@ if (onnxruntime_ENABLE_EXTERNAL_CUSTOM_OP_SCHEMAS)
MATH(EXPR PROTOBUF_INDEX_NEXT "${PROTOBUF_INDEX} + 1")
if (ONNX_INDEX GREATER_EQUAL 0 AND PROTOBUF_INDEX GREATER_EQUAL 0)
# Expect protobuf to follow onnx due to dependence
- list(INSERT onnxruntime_CUSTOM_EXTERNAL_LIBRARIES ${ONNX_INDEX} "-Wl,--no-as-needed")
- list(INSERT onnxruntime_CUSTOM_EXTERNAL_LIBRARIES ${PROTOBUF_INDEX_NEXT} "-Wl,--as-needed")
+ list(INSERT onnxruntime_CUSTOM_EXTERNAL_LIBRARIES ${ONNX_INDEX} "LINKER:--no-as-needed")
+ list(INSERT onnxruntime_CUSTOM_EXTERNAL_LIBRARIES ${PROTOBUF_INDEX_NEXT} "LINKER:--as-needed")
else()
message(FATAL_ERROR "Required external libraries onnx and protobuf are not found in onnxruntime_EXTERNAL_LIBRARIES")
endif()
@@ -964,37 +964,6 @@ if (onnxruntime_USE_ROCM)
)
endif()
-if (onnxruntime_USE_TVM)
- file(GLOB onnxruntime_python_providers_tvm_srcs CONFIGURE_DEPENDS
- "${ONNXRUNTIME_ROOT}/python/providers/tvm/*.py"
- )
- add_custom_command(
- TARGET onnxruntime_pybind11_state POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/providers
- COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/providers/tvm
- COMMAND ${CMAKE_COMMAND} -E copy
- ${onnxruntime_python_providers_tvm_srcs}
- $/onnxruntime/providers/tvm
- COMMAND ${CMAKE_COMMAND} -E copy
- $
- $/onnxruntime/capi/
- )
-
- add_custom_command(
- TARGET onnxruntime_pybind11_state POST_BUILD
- WORKING_DIRECTORY ${tvm_SOURCE_DIR}/python
- COMMAND ${Python_EXECUTABLE} setup.py bdist_wheel
- )
-
- add_custom_command(
- TARGET onnxruntime_pybind11_state POST_BUILD
- COMMAND ${Python_EXECUTABLE}
- $/onnxruntime/providers/tvm/extend_python_file.py
- --target_file $/onnxruntime/capi/_ld_preload.py
- )
-
-endif()
-
if (onnxruntime_USE_DML)
if (NOT onnxruntime_USE_CUSTOM_DIRECTML)
set(dml_shared_lib_path ${DML_PACKAGE_DIR}/bin/${onnxruntime_target_platform}-win/${DML_SHARED_LIB})
@@ -1050,4 +1019,13 @@ if (onnxruntime_USE_QNN)
endif()
endif()
+if (onnxruntime_USE_VSINPU)
+ add_custom_command(
+ TARGET onnxruntime_pybind11_state POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy
+ $
+ $/onnxruntime/capi/
+ )
+endif()
+
endif()
diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake
index 67e5a9c0aa08b..e822f0a3655fc 100644
--- a/cmake/onnxruntime_unittests.cmake
+++ b/cmake/onnxruntime_unittests.cmake
@@ -9,9 +9,6 @@ set(TEST_INC_DIR ${ONNXRUNTIME_ROOT})
if (onnxruntime_ENABLE_TRAINING)
list(APPEND TEST_INC_DIR ${ORTTRAINING_ROOT})
endif()
-if (onnxruntime_USE_TVM)
- list(APPEND TEST_INC_DIR ${TVM_INCLUDES})
-endif()
set(disabled_warnings)
function(AddTest)
@@ -67,7 +64,10 @@ function(AddTest)
if(onnxruntime_USE_CUDA)
#XXX: we should not need to do this. onnxruntime_test_all.exe should not have direct dependency on CUDA DLLs,
# otherwise it will impact when CUDA DLLs can be unloaded.
- target_link_libraries(${_UT_TARGET} PRIVATE CUDA::cudart cudnn_frontend)
+ target_link_libraries(${_UT_TARGET} PRIVATE CUDA::cudart)
+ if(NOT onnxruntime_CUDA_MINIMAL)
+ target_link_libraries(${_UT_TARGET} PRIVATE cudnn_frontend)
+ endif()
endif()
target_link_libraries(${_UT_TARGET} PRIVATE ${_UT_LIBS} GTest::gtest GTest::gmock ${onnxruntime_EXTERNAL_LIBRARIES})
endif()
@@ -111,7 +111,6 @@ function(AddTest)
endif()
target_compile_options(${_UT_TARGET} PRIVATE ${disabled_warnings})
else()
- target_compile_options(${_UT_TARGET} PRIVATE ${DISABLED_WARNINGS_FOR_TVM})
target_compile_options(${_UT_TARGET} PRIVATE "$<$:SHELL:--compiler-options -Wno-error=sign-compare>"
"$<$>:-Wno-error=sign-compare>")
if (${HAS_NOERROR})
@@ -523,6 +522,9 @@ set (onnxruntime_global_thread_pools_test_SRC
${ONNXRUNTIME_GLOBAL_THREAD_POOLS_TEST_SRC_DIR}/test_main.cc
${ONNXRUNTIME_GLOBAL_THREAD_POOLS_TEST_SRC_DIR}/test_inference.cc)
+set (onnxruntime_webgpu_external_dawn_test_SRC
+ ${TEST_SRC_DIR}/webgpu/external_dawn/main.cc)
+
# tests from lowest level library up.
# the order of libraries should be maintained, with higher libraries being added first in the list
@@ -638,13 +640,11 @@ set(ONNXRUNTIME_TEST_LIBS
${PROVIDERS_ACL}
${PROVIDERS_ARMNN}
${PROVIDERS_COREML}
- # ${PROVIDERS_TVM}
${PROVIDERS_XNNPACK}
${PROVIDERS_AZURE}
onnxruntime_optimizer
onnxruntime_providers
onnxruntime_util
- ${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_util
@@ -746,12 +746,6 @@ if(onnxruntime_USE_AZURE)
list(APPEND onnxruntime_test_providers_libs onnxruntime_providers_azure)
endif()
-if(WIN32)
- if (onnxruntime_USE_TVM)
- list(APPEND disabled_warnings ${DISABLED_WARNINGS_FOR_TVM})
- endif()
-endif()
-
file(GLOB onnxruntime_test_framework_src CONFIGURE_DEPENDS
${onnxruntime_test_framework_src_patterns}
)
@@ -852,9 +846,6 @@ if (onnxruntime_ENABLE_TRAINING_APIS)
list(APPEND all_tests ${onnxruntime_test_training_api_src})
endif()
-if (onnxruntime_USE_TVM)
- list(APPEND all_tests ${onnxruntime_test_tvm_src})
-endif()
if (onnxruntime_USE_OPENVINO)
list(APPEND all_tests ${onnxruntime_test_openvino_src})
@@ -1086,15 +1077,6 @@ if (NOT onnxruntime_ENABLE_TRAINING_TORCH_INTEROP)
COMMAND ${CMAKE_COMMAND} -E copy ${DNNL_DLL_PATH} $
)
endif()
- if(WIN32)
- if (onnxruntime_USE_TVM)
- add_custom_command(
- TARGET ${test_data_target} POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy $ $
- )
- endif()
- endif()
-
if(WIN32)
set(wide_get_opt_src_dir ${TEST_SRC_DIR}/win_getopt/wide)
onnxruntime_add_static_library(win_getopt_wide ${wide_get_opt_src_dir}/getopt.cc ${wide_get_opt_src_dir}/include/getopt.h)
@@ -1136,12 +1118,6 @@ if (NOT IOS)
endif()
set_target_properties(onnx_test_runner PROPERTIES FOLDER "ONNXRuntimeTest")
- if (onnxruntime_USE_TVM)
- if (WIN32)
- target_link_options(onnx_test_runner PRIVATE "/STACK:4000000")
- endif()
- endif()
-
install(TARGETS onnx_test_runner
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
@@ -1295,11 +1271,6 @@ if (NOT onnxruntime_ENABLE_TRAINING_TORCH_INTEROP)
endif()
set_target_properties(onnxruntime_perf_test PROPERTIES FOLDER "ONNXRuntimeTest")
- if (onnxruntime_USE_TVM)
- if (WIN32)
- target_link_options(onnxruntime_perf_test PRIVATE "/STACK:4000000")
- endif()
- endif()
endif()
@@ -1884,4 +1855,13 @@ if (NOT onnxruntime_MINIMAL_BUILD AND NOT onnxruntime_EXTENDED_MINIMAL_BUILD
endif()
endif()
+if (onnxruntime_USE_WEBGPU AND onnxruntime_USE_EXTERNAL_DAWN)
+ AddTest(TARGET onnxruntime_webgpu_external_dawn_test
+ SOURCES ${onnxruntime_webgpu_external_dawn_test_SRC}
+ LIBS dawn::dawn_native ${onnxruntime_test_providers_libs}
+ DEPENDS ${all_dependencies}
+ )
+ onnxruntime_add_include_to_target(onnxruntime_webgpu_external_dawn_test dawn::dawncpp_headers dawn::dawn_headers)
+endif()
+
include(onnxruntime_fuzz_test.cmake)
diff --git a/cmake/patches/dawn/dawn.patch b/cmake/patches/dawn/dawn.patch
deleted file mode 100644
index d696d386452e8..0000000000000
--- a/cmake/patches/dawn/dawn.patch
+++ /dev/null
@@ -1,66 +0,0 @@
-diff --git a/src/dawn/native/CMakeLists.txt b/src/dawn/native/CMakeLists.txt
-index 9c0bd6fa4e..bf8a57aeac 100644
---- a/src/dawn/native/CMakeLists.txt
-+++ b/src/dawn/native/CMakeLists.txt
-@@ -857,6 +857,11 @@ if (DAWN_ENABLE_SWIFTSHADER)
- target_compile_definitions(dawn_native PRIVATE "DAWN_ENABLE_SWIFTSHADER")
- endif()
-
-+if (IOS)
-+ target_compile_options(dawn_native_objects PRIVATE -fno-objc-arc)
-+ target_compile_options(dawn_native PRIVATE -fno-objc-arc)
-+endif()
-+
- if (DAWN_BUILD_MONOLITHIC_LIBRARY)
- ###############################################################################
- # Do the 'complete_lib' build.
-diff --git a/src/dawn/native/Surface_metal.mm b/src/dawn/native/Surface_metal.mm
-index ce55acbd43..baa4835362 100644
---- a/src/dawn/native/Surface_metal.mm
-+++ b/src/dawn/native/Surface_metal.mm
-@@ -36,7 +36,13 @@
- namespace dawn::native {
-
- bool InheritsFromCAMetalLayer(void* obj) {
-- id object = static_cast(obj);
-+ id object =
-+#if TARGET_OS_IOS
-+ (__bridge id)obj;
-+#else
-+ static_cast(obj);
-+#endif
-+
- return [object isKindOfClass:[CAMetalLayer class]];
- }
-
-diff --git a/src/dawn/native/metal/SharedFenceMTL.mm b/src/dawn/native/metal/SharedFenceMTL.mm
-index bde8bfea07..f2f6459e91 100644
---- a/src/dawn/native/metal/SharedFenceMTL.mm
-+++ b/src/dawn/native/metal/SharedFenceMTL.mm
-@@ -40,7 +40,13 @@ ResultOrError> SharedFence::Create(
- DAWN_INVALID_IF(descriptor->sharedEvent == nullptr, "MTLSharedEvent is missing.");
- if (@available(macOS 10.14, iOS 12.0, *)) {
- return AcquireRef(new SharedFence(
-- device, label, static_cast>(descriptor->sharedEvent)));
-+ device, label,
-+#if TARGET_OS_IOS
-+ (__bridge id)(descriptor->sharedEvent)
-+#else
-+ static_cast>(descriptor->sharedEvent)
-+#endif
-+ ));
- } else {
- return DAWN_INTERNAL_ERROR("MTLSharedEvent not supported.");
- }
-diff --git a/src/tint/api/BUILD.cmake b/src/tint/api/BUILD.cmake
-index 0037d83276..6372c4ee77 100644
---- a/src/tint/api/BUILD.cmake
-+++ b/src/tint/api/BUILD.cmake
-@@ -57,6 +57,7 @@ tint_target_add_dependencies(tint_api lib
- tint_lang_wgsl_ast_transform
- tint_lang_wgsl_common
- tint_lang_wgsl_features
-+ tint_lang_wgsl_inspector
- tint_lang_wgsl_program
- tint_lang_wgsl_sem
- tint_lang_wgsl_writer_ir_to_program
diff --git a/cmake/patches/eigen/eigen-edge.patch b/cmake/patches/eigen/eigen-edge.patch
new file mode 100644
index 0000000000000..d8dc850b4bd55
--- /dev/null
+++ b/cmake/patches/eigen/eigen-edge.patch
@@ -0,0 +1,13 @@
+diff --git a/Eigen/src/Core/util/IndexedViewHelper.h b/Eigen/src/Core/util/IndexedViewHelper.h
+index f85de305f..3dc2bb5e7 100644
+--- a/Eigen/src/Core/util/IndexedViewHelper.h
++++ b/Eigen/src/Core/util/IndexedViewHelper.h
+@@ -178,7 +178,7 @@ namespace placeholders {
+
+ EIGEN_DEPRECATED static const all_t all = Eigen::all; // PLEASE use Eigen::all instead of Eigen::placeholders::all
+ EIGEN_DEPRECATED static const last_t last = Eigen::last; // PLEASE use Eigen::last instead of Eigen::placeholders::last
+- EIGEN_DEPRECATED static const end_t end = Eigen::lastp1; // PLEASE use Eigen::lastp1 instead of Eigen::placeholders::end
++ // EIGEN_DEPRECATED static const end_t end = Eigen::lastp1; // PLEASE use Eigen::lastp1 instead of Eigen::placeholders::end
+ }
+
+ } // end namespace Eigen
diff --git a/cmake/target_delayload.cmake b/cmake/target_delayload.cmake
index 53f252a3e71ac..92273f5424233 100644
--- a/cmake/target_delayload.cmake
+++ b/cmake/target_delayload.cmake
@@ -6,9 +6,12 @@ function(target_delayload target_name)
if(NOT MSVC)
message(SEND_ERROR "Delayloading is only supported in MSVC")
endif()
- foreach(lib ${ARGN})
- target_link_options(${target_name} PRIVATE /DELAYLOAD:"${lib}")
- endforeach()
+ if(onnxruntime_ENABLE_DELAY_LOADING_WIN_DLLS)
+ foreach(lib ${ARGN})
+ target_link_options(${target_name} PRIVATE /DELAYLOAD:"${lib}")
+ endforeach()
- target_link_libraries(${target_name} PRIVATE delayimp.lib)
+ target_link_libraries(${target_name} PRIVATE delayimp.lib)
+ endif()
endfunction()
+
diff --git a/cmake/vcpkg.json b/cmake/vcpkg.json
index 159b8654c1cb1..fcb2c7d5de89b 100644
--- a/cmake/vcpkg.json
+++ b/cmake/vcpkg.json
@@ -66,6 +66,12 @@
"platform": "windows"
}
],
+ "overrides": [
+ {
+ "name": "flatbuffers",
+ "version": "23.5.26"
+ }
+ ],
"features": {
"tests": {
"description": "Build ONNXRuntime unit tests",
diff --git a/csharp/ApiDocs/docfx.json b/csharp/ApiDocs/docfx.json
index 0671d4aeb7d95..88a3283ad76e8 100644
--- a/csharp/ApiDocs/docfx.json
+++ b/csharp/ApiDocs/docfx.json
@@ -14,7 +14,7 @@
"disableDefaultFilter": false,
"noRestore": true,
"properties": {
- "AllowUnsafeBlocks": true,
+ "AllowUnsafeBlocks": "true",
"TargetFramework": "net8.0",
"Nullable": "enable",
"LangVersion": "8.0",
diff --git a/csharp/OnnxRuntime.CSharp.proj b/csharp/OnnxRuntime.CSharp.proj
index 95207d158affe..6779fd60bcd0a 100644
--- a/csharp/OnnxRuntime.CSharp.proj
+++ b/csharp/OnnxRuntime.CSharp.proj
@@ -64,13 +64,6 @@ CMake creates a target to this project
-
-
-
-
-
-
-
@@ -153,7 +146,7 @@ CMake creates a target to this project
$(BaseTargets);$(MobileTargets)
+
+
+ true
+ true
+ true
+
+
+ true
+ true
+ true
+ true
+
+ $(ProjectDir)..\..\..
+
+
+ true
+
+
+
Microsoft.ML.OnnxRuntimeMicrosoft.ML.OnnxRuntime
@@ -66,54 +93,31 @@
Commit: $(BUILD_SOURCEVERSION)
Build: https://aiinfra.visualstudio.com/Lotus/_build/results?buildId=$(BUILD_BUILDID)
+ README.md
+ LICENSE.txt
+
+
+ true
+
+ true
+ ..\..\OnnxRuntime.snk
+
+ $(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb
+ AnyCPU;x86defaulttrue
- true
- ..\..\OnnxRuntime.snk
-
- $(ProjectDir)..\..\..
- $(OnnxRuntimeRoot)\csharpx64falsefalseportable
-
- true
-
-
- true
-
-
-
-
- false
- $(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdbDebug;Release;RelWithDebInfo
-
- true
- true
- true
-
-
- true
- true
- true
-
-
- $(OnnxRuntimeCsharpRoot)\..\build\Linux
- $(OnnxRuntimeBuildDirectory)\$(Configuration)
-
-
-
- $(OnnxRuntimeCsharpRoot)\..\build\Windows$(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)
-
-
-
- $(OnnxRuntimeCsharpRoot)\..\build\MacOS
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)
-
+ $(OrtConstants);__MOBILE__
@@ -155,12 +148,12 @@
$(OrtConstants);__ANDROID__
-
+ $(OrtConstants);__IOS__
-
-
+
+ $(OrtConstants);__ENABLE_COREML__
@@ -178,128 +171,6 @@
$(DefineConstants);$(OrtConstants)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs
index be157a0419fc0..d628b065ceaa7 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs
@@ -1142,9 +1142,6 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca
[DllImport(NativeLib.DllName, CharSet = CharSet.Ansi)]
public static extern IntPtr /*(OrtStatus*)*/ OrtSessionOptionsAppendExecutionProvider_MIGraphX(IntPtr /*(OrtSessionOptions*)*/ options, int device_id);
-
- [DllImport(NativeLib.DllName, CharSet = CharSet.Ansi)]
- public static extern IntPtr /*(OrtStatus*)*/ OrtSessionOptionsAppendExecutionProvider_Tvm(IntPtr /*(OrtSessionOptions*) */ options, byte[] /*(char char*)*/ settings);
#endif
///
/// Append a TensorRT EP instance (configured based on given provider options) to the native OrtSessionOptions instance
@@ -1272,7 +1269,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca
///
/// Append an execution provider instance to the native OrtSessionOptions instance.
///
- /// 'SNPE' and 'XNNPACK' are currently supported as providerName values.
+ /// 'SNPE', 'XNNPACK' and 'CoreML' are currently supported as providerName values.
///
/// The number of providerOptionsKeys must match the number of providerOptionsValues and equal numKeys.
///
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
index 3acd84b3016de..bd450451a1265 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
@@ -146,27 +146,6 @@ public static SessionOptions MakeSessionOptionWithTensorrtProvider(OrtTensorRTPr
}
}
- ///
- /// A helper method to construct a SessionOptions object for TVM execution.
- /// Use only if you have the onnxruntime package specific to this Execution Provider.
- ///
- /// settings string, comprises of comma separated key:value pairs. default is empty
- /// A SessionsOptions() object configured for execution with TVM
- public static SessionOptions MakeSessionOptionWithTvmProvider(String settings = "")
- {
- SessionOptions options = new SessionOptions();
- try
- {
- options.AppendExecutionProvider_Tvm(settings);
- return options;
- }
- catch (Exception)
- {
- options.Dispose();
- throw;
- }
- }
-
///
/// A helper method to construct a SessionOptions object for ROCM execution.
/// Use only if ROCM is installed and you have the onnxruntime package specific to this Execution Provider.
@@ -397,20 +376,6 @@ public void AppendExecutionProvider_CoreML(CoreMLFlags coremlFlags = CoreMLFlags
#endif
}
- ///
- /// Use only if you have the onnxruntime package specific to this Execution Provider.
- ///
- /// string with TVM specific settings
- public void AppendExecutionProvider_Tvm(string settings = "")
- {
-#if __MOBILE__
- throw new NotSupportedException("The TVM Execution Provider is not supported in this build");
-#else
- var utf8 = NativeOnnxValueHelper.StringToZeroTerminatedUtf8(settings);
- NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Tvm(handle, utf8));
-#endif
- }
-
private class ExecutionProviderAppender
{
private byte[] _utf8ProviderName;
@@ -430,16 +395,10 @@ public IntPtr Appender(IntPtr handle, IntPtr[] optKeys, IntPtr[] optValues, UInt
///
/// Append QNN, SNPE or XNNPACK execution provider
///
- /// Execution provider to add. 'QNN', 'SNPE' or 'XNNPACK' are currently supported.
+ /// Execution provider to add. 'QNN', 'SNPE' 'XNNPACK', 'CoreML and 'AZURE are currently supported.
/// Optional key/value pairs to specify execution provider options.
public void AppendExecutionProvider(string providerName, Dictionary providerOptions = null)
{
- if (providerName != "SNPE" && providerName != "XNNPACK" && providerName != "QNN" && providerName != "AZURE")
- {
- throw new NotSupportedException(
- "Only QNN, SNPE, XNNPACK and AZURE execution providers can be enabled by this method.");
- }
-
if (providerOptions == null)
{
providerOptions = new Dictionary();
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
index aa0e6ee62248a..17738da515134 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
@@ -146,10 +146,6 @@ public void TestSessionOptions()
opt.AppendExecutionProvider_Nnapi(0);
#endif
-#if USE_TVM
- opt.AppendExecutionProvider_Tvm("Vulkan -device=amd_apu");
-#endif
-
#if USE_OPENVINO
opt.AppendExecutionProvider_OpenVINO();
#endif
@@ -179,6 +175,12 @@ public void TestSessionOptions()
ex = Assert.Throws(() => { opt.AppendExecutionProvider("QNN"); });
Assert.Contains("QNN execution provider is not supported in this build", ex.Message);
#endif
+#if USE_COREML
+ opt.AppendExecutionProvider("CoreML");
+#else
+ ex = Assert.Throws(() => { opt.AppendExecutionProvider("CoreML"); });
+ Assert.Contains("CoreML execution provider is not supported in this build", ex.Message);
+#endif
opt.AppendExecutionProvider_CPU(1);
}
@@ -2041,7 +2043,7 @@ public SkipNonPackageTests()
}
// Test hangs on mobile.
-#if !(ANDROID || IOS)
+#if !(ANDROID || IOS)
[Fact(DisplayName = "TestModelRunAsyncTask")]
private async Task TestModelRunAsyncTask()
{
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj
index 60d18ad31e811..07ca7fe7c64bf 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj
@@ -1,16 +1,19 @@
+
+ true
+ true
+ true
+
+ $(ProjectDir)..\..\..
+ netstandard2.0;net8.0false
- $(ProjectDir)..\..AnyCPUbin\$(Configuration)\
- true
- true
- true
- $(OnnxRuntimeCsharpRoot)\..\cmake\external\onnx
+ $(OnnxRuntimeRoot)\cmake\external\onnx8981
@@ -22,30 +25,22 @@
..\..\OnnxRuntime.snkDebug;Release;RelWithDebInfo
+
Microsoft.ML.OnnxRuntime.TestsMicrosoft.ML.OnnxRuntime.Tests.Common
-
-
- $(OnnxRuntimeCsharpRoot)\..\build\Linux
- $(OnnxRuntimeBuildDirectory)\$(Configuration)
- $(OnnxRuntimeBuildDirectory)\$(Configuration)\external\protobuf\cmake
- $(ProtocDirectory)\protoc
-
-
-
- $(OnnxRuntimeCsharpRoot)\..\build\Windows
- $(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)$(OnnxRuntimeBuildDirectory)\$(Configuration)\external\protobuf\cmake\$(Configuration)$(ProtocDirectory)\protoc.exe
+
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)\external\protobuf\cmake
+ $(ProtocDirectory)\protoc
+
+
-
- $(OnnxRuntimeCsharpRoot)\..\build\MacOS
- $(OnnxRuntimeBuildDirectory)\$(Configuration)$(OnnxRuntimeBuildDirectory)\$(Configuration)\external\protobuf\cmake$(ProtocDirectory)\protoc
@@ -102,28 +97,6 @@
-
-
-
- PreserveNewest
- false
-
-
-
- PreserveNewest
- false
-
-
-
- PreserveNewest
- false
-
-
-
@@ -132,16 +105,20 @@
-
+
-
+
+
-
+
+
@@ -152,20 +129,20 @@
+
- TestData\%(Filename)%(Extension)
+ TestData\%(Filename)%(Extension)
-
- TestData\overridable_initializer.onnx
+
+ TestData\overridable_initializer.onnx
-
- TestData\capi_symbolic_dims.onnx
+
+ TestData\capi_symbolic_dims.onnx
-
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/NativeLibraryInclude.props b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/NativeLibraryInclude.props
new file mode 100644
index 0000000000000..3daab21dbcbac
--- /dev/null
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/NativeLibraryInclude.props
@@ -0,0 +1,171 @@
+
+
+
+
+ true
+ true
+ true
+
+
+ true
+ true
+ true
+ true
+
+
+ false
+ 1.20.0-dev-20241007
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(OnnxRuntimeRoot)\build\Windows
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)
+
+
+
+ $(OnnxRuntimeRoot)\build\Linux
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)
+
+
+
+ $(OnnxRuntimeRoot)\build\MacOS
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)
+
+
+
+ $(OnnxRuntimeRoot)\build\Android
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)
+
+
+
+ $(OnnxRuntimeRoot)\build\iOS
+ iPhoneSimulator
+ $(Platform.ToLower())
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)-$(PlatformLower)
+
+
+
+ $(OnnxRuntimeRoot)\build\macOS
+ $(OnnxRuntimeBuildDirectory)\$(Configuration)
+
+
+
+
+ PreserveNewest
+ true
+
+
+
+
+
+ PreserveNewest
+ false
+
+
+
+
+
+ PreserveNewest
+ false
+
+
+
+
+
+ libs\libonnxruntime.so
+
+
+
+
+
+ libs\libonnxruntime.dylib
+ Dynamic
+ True
+ True
+
+
+
+
+
+ libs\libonnxruntime.dylib
+ Dynamic
+ True
+ True
+
+
+
+
+
+
+
+
+ false
+ true
+ false
+ true
+ false
+ true
+
+
+
+
+
+
+
+
+
+
+
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs
index 27cde1dbe9ed8..46dd292e8514e 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs
@@ -2180,10 +2180,13 @@ public void GetArrayString(TensorConstructor constructor)
{22,23}
}
}";
+ // remove \r so the newlines are just \n on all platforms
+ expected = expected.Replace("\r", "");
+ var actual= tensor.GetArrayString().Replace("\r", "");
- Assert.Equal(expected, tensor.GetArrayString());
+ Assert.Equal(expected, actual);
- var expectedNoSpace = expected.Replace(Environment.NewLine, "").Replace(" ", "");
+ var expectedNoSpace = expected.Replace("\n", "").Replace(" ", "");
Assert.Equal(expectedNoSpace, tensor.GetArrayString(false));
}
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/Microsoft.ML.OnnxRuntime.Tests.MAUI.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/Microsoft.ML.OnnxRuntime.Tests.MAUI.csproj
index 210a04d78f107..e07448daeea7f 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/Microsoft.ML.OnnxRuntime.Tests.MAUI.csproj
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/Microsoft.ML.OnnxRuntime.Tests.MAUI.csproj
@@ -1,306 +1,125 @@
-
-
- true
- true
- true
- true
- $(ProjectDir)..\..\..
-
-
-
-
- net8.0-android;net8.0-ios;net8.0-maccatalyst
- $(TargetFrameworks);net8.0-windows10.0.19041.0
-
-
-
-
- Exe
- Microsoft.ML.OnnxRuntime.Tests.MAUI
- true
- true
- enable
- enable
- true
-
- 8002
-
-
- $(DefineConstants);INCLUDE_FAILING_TESTS
- $(DefineConstants);MODE_NON_INTERACTIVE_VISUAL
- $(DefineConstants);MODE_XHARNESS
-
-
- Microsoft.ML.OnnxRuntime.Tests.MAUI
-
-
- ORT.CSharp.Tests.MAUI
-
-
- 1.0
- 1
-
- 15.0
- 13.1
- 30.0
- 10.0.17763.0
- 10.0.17763.0
-
- true
- ..\..\OnnxRuntime.snk
-
-
- false
-
-
-
-
- $(OnnxRuntimeRoot)\build\microsoft.ml.onnxruntime.1.18.1\runtimes
-
- true
-
-
-
- $(OnnxRuntimeRoot)\build\Windows
- $(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)
-
- $(PrebuiltRuntimesDir)\win-x64\native
-
-
- $(OnnxRuntimeRoot)\build\Android
- $(OnnxRuntimeBuildDirectory)\$(Configuration)
- $(PrebuiltRuntimesDir)\android\native\onnxruntime.aar
-
-
- $(OnnxRuntimeRoot)\build\iOS
- iPhoneSimulator
- $(Platform.ToLower())
- $(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)-$(PlatformLower)
- $(PrebuiltRuntimesDir)\ios\native\onnxruntime.xcframework
-
-
- $(OnnxRuntimeRoot)\build\macOS
- $(OnnxRuntimeBuildDirectory)\$(Configuration)
- $(PrebuiltRuntimesDir)\ios\native\onnxruntime.xcframework
-
-
-
-
-
- PreserveNewest
- true
-
-
-
-
- PreserveNewest
- true
-
-
-
-
- PreserveNewest
- false
-
-
- PreserveNewest
- false
-
-
- PreserveNewest
- false
-
-
- PreserveNewest
- false
-
-
- PreserveNewest
- false
-
-
- PreserveNewest
- false
-
-
-
-
-
-
- libs\libonnxruntime.so
-
-
-
-
-
-
-
-
-
- libs\libonnxruntime.dylib
- Dynamic
- True
- True
-
-
-
-
- Framework
- True
- True
-
-
-
-
-
-
- libs\libonnxruntime.dylib
- Dynamic
- True
- True
-
-
-
-
- Framework
- True
- True
-
-
-
-
-
-
- false
- true
- false
- true
- false
- true
-
- false
- true
- false
- true
- false
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- InferenceTest.cs
-
-
- OrtIoBindingAllocationTest.cs
-
-
- TensorTests.cs
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- <_VisualStudioTestRunnerFiles Include="@(PackagingOutputs)" Condition="$([System.String]::Copy('%(PackagingOutputs.FullPath)').Contains('xunit.runner.visualstudio'))" />
-
-
-
+
+ $(ProjectDir)..\..\..
+
+
+
+
+
+
+ net8.0-android;net8.0-ios;net8.0-maccatalyst
+ $(TargetFrameworks);net8.0-windows10.0.19041.0
+
+
+
+
+ Exe
+ Microsoft.ML.OnnxRuntime.Tests.MAUI
+ true
+ true
+ enable
+ enable
+ true
+
+ 8002
+
+
+ $(DefineConstants);INCLUDE_FAILING_TESTS
+ $(DefineConstants);MODE_NON_INTERACTIVE_VISUAL
+ $(DefineConstants);MODE_XHARNESS
+
+
+ Microsoft.ML.OnnxRuntime.Tests.MAUI
+
+
+ ORT.CSharp.Tests.MAUI
+
+
+ 1.0
+ 1
+
+ 15.0
+ 13.1
+ 30.0
+ 10.0.17763.0
+ 10.0.17763.0
+
+ true
+ ..\..\OnnxRuntime.snk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ InferenceTest.cs
+
+
+ OrtIoBindingAllocationTest.cs
+
+
+ TensorTests.cs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <_VisualStudioTestRunnerFiles
+ Include="@(PackagingOutputs)"
+ Condition="$([System.String]::Copy('%(PackagingOutputs.FullPath)').Contains('xunit.runner.visualstudio'))" />
+
+
+
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/ReadMe.md b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/ReadMe.md
new file mode 100644
index 0000000000000..07cb5fe7c9b3d
--- /dev/null
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.MAUI/ReadMe.md
@@ -0,0 +1,9 @@
+The MAUI test project can be optionally used with a pre-built ONNX Runtime native nuget package (Microsoft.ML.OnnxRuntime).
+
+To do so, specify the `UsePrebuiltNativePackage` and `CurrentOnnxRuntimeVersion` properties when building the project. These can be set via the command-line or as environment variables.
+
+For example:
+
+```cmd
+dotnet build csharp\test\Microsoft.ML.OnnxRuntime.Tests.MAUI\Microsoft.ML.OnnxRuntime.Tests.MAUI.csproj --property:UsePrebuiltNativePackage=true --property:CurrentOnnxRuntimeVersion=1.19.2 --source directory_containing_native_nuget_package --source https://api.nuget.org/v3/index.json
+```
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj
index b822c999e4d39..a8abcd2b4aa1c 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj
@@ -1,4 +1,9 @@
+
+ $(ProjectDir)..\..\..
+
+
+ net8.0
@@ -6,9 +11,7 @@
$(ProjectDir)..\..AnyCPU;x86bin\$(Configuration)\
- true
- true
- true
+
$(OnnxSourceDirectory)\onnxdefault
@@ -35,19 +38,19 @@
- $(OnnxRuntimeCsharpRoot)\..\build\Linux
+ $(OnnxRuntimeRoot)\build\Linux$(OnnxRuntimeBuildDirectory)\$(Configuration)
- $(OnnxRuntimeCsharpRoot)\..\build\Windows
+ $(OnnxRuntimeRoot)\build\Windows$(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)
- $(OnnxRuntimeCsharpRoot)\..\build\MacOS
+ $(OnnxRuntimeRoot)\build\MacOS$(OnnxRuntimeBuildDirectory)\$(Configuration)
@@ -58,15 +61,14 @@
PreserveNewest
@@ -74,45 +76,39 @@
PreserveNewestfalsePreserveNewestfalse
-
- PreserveNewest
- false
-
-
+ PreserveNewestfalse
-
- PreserveNewest
- false
-
-
+
+ PreserveNewestfalse
-
+
+ PreserveNewestfalse
-
+
+ PreserveNewestfalse
+
@@ -131,7 +127,7 @@
-
+ PreserveNewestfalse
diff --git a/csharp/tools/MauiModelTester/Platforms/iOS/Info.plist b/csharp/tools/MauiModelTester/Platforms/iOS/Info.plist
index 0004a4fdee5d5..fbb865624bbda 100644
--- a/csharp/tools/MauiModelTester/Platforms/iOS/Info.plist
+++ b/csharp/tools/MauiModelTester/Platforms/iOS/Info.plist
@@ -27,6 +27,6 @@
UIInterfaceOrientationLandscapeRightXSAppIconAssets
- Assets.xcassets/appicon.appiconset
+ Assets.xcassets/onnxruntime_icon.appiconset
diff --git a/dockerfiles/Dockerfile.cuda b/dockerfiles/Dockerfile.cuda
index d2d656648f2e7..40f11dca623a7 100644
--- a/dockerfiles/Dockerfile.cuda
+++ b/dockerfiles/Dockerfile.cuda
@@ -48,7 +48,7 @@ RUN cd /code \
&& python3 -m venv /code/env \
&& . /code/env/bin/activate \
&& pip install --upgrade psutil setuptools wheel packaging \
- && pip install -r tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/requirements.txt \
+ && pip install -r /code/tools/ci_build/github/linux/python/requirements.txt \
&& python /code/tools/ci_build/build.py --build_dir /code/build/Linux \
--allow_running_as_root --skip_submodule_sync \
--use_cuda --cuda_home /usr/local/cuda \
@@ -56,7 +56,6 @@ RUN cd /code \
--build_shared_lib --skip_tests \
--config Release --build_wheel --update --build --parallel \
--cmake_generator Ninja \
- --enable_cuda_nhwc_ops \
--cmake_extra_defines ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) "CMAKE_CUDA_ARCHITECTURES=${CMAKE_CUDA_ARCHITECTURES}" onnxruntime_BUILD_UNIT_TESTS=OFF
# Start second stage to copy the build artifacts
diff --git a/dockerfiles/Dockerfile.migraphx b/dockerfiles/Dockerfile.migraphx
index c5d998d503899..876a07e4ffaf6 100644
--- a/dockerfiles/Dockerfile.migraphx
+++ b/dockerfiles/Dockerfile.migraphx
@@ -10,7 +10,7 @@ FROM rocm/pytorch:rocm6.2.3_ubuntu22.04_py3.10_pytorch_release_2.3.0
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=main
-ENV PATH /code/cmake-3.27.3-linux-x86_64/bin:${PATH}
+ENV PATH=/code/cmake-3.27.3-linux-x86_64/bin:${PATH}
RUN apt-get update &&\
apt-get install -y migraphx
diff --git a/dockerfiles/Dockerfile.openvino b/dockerfiles/Dockerfile.openvino
index 39e75a68a369f..d1ebdae3cbdd6 100644
--- a/dockerfiles/Dockerfile.openvino
+++ b/dockerfiles/Dockerfile.openvino
@@ -11,7 +11,7 @@ FROM openvino/ubuntu22_runtime:${OPENVINO_VERSION} AS builder
ENV WORKDIR_PATH=/home/openvino
WORKDIR $WORKDIR_PATH
-ENV DEBIAN_FRONTEND noninteractive
+ENV DEBIAN_FRONTEND=noninteractive
ARG DEVICE=CPU
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git
@@ -41,7 +41,7 @@ RUN tar cvf GPL_sources.tar.gz /sources
# Deploy stage
FROM openvino/ubuntu22_runtime:${OPENVINO_VERSION}
-ENV DEBIAN_FRONTEND noninteractive
+ENV DEBIAN_FRONTEND=noninteractive
USER root
COPY --from=builder /home/openvino/onnxruntime/build/Linux/Release/dist/*.whl ./
COPY --from=builder /GPL_sources.tar.gz ./
@@ -50,7 +50,7 @@ ARG BUILD_UID=1001
ARG BUILD_USER=onnxruntimedev
RUN adduser --uid $BUILD_UID $BUILD_USER
RUN usermod -a -G video,users ${BUILD_USER}
-ENV WORKDIR_PATH /home/${BUILD_USER}
+ENV WORKDIR_PATH=/home/${BUILD_USER}
WORKDIR ${WORKDIR_PATH}
USER ${BUILD_USER}
diff --git a/dockerfiles/Dockerfile.rocm b/dockerfiles/Dockerfile.rocm
index bef8d7a5f47d2..aca8c3feaff71 100644
--- a/dockerfiles/Dockerfile.rocm
+++ b/dockerfiles/Dockerfile.rocm
@@ -12,7 +12,7 @@ ARG ONNXRUNTIME_BRANCH=main
WORKDIR /code
-ENV PATH /code/cmake-3.27.3-linux-x86_64/bin:${PATH}
+ENV PATH=/code/cmake-3.27.3-linux-x86_64/bin:${PATH}
# Prepare onnxruntime repository & build onnxruntime
RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
diff --git a/dockerfiles/Dockerfile.tensorrt b/dockerfiles/Dockerfile.tensorrt
index ef51d41c5ff1b..24947df6308a6 100644
--- a/dockerfiles/Dockerfile.tensorrt
+++ b/dockerfiles/Dockerfile.tensorrt
@@ -17,7 +17,7 @@ RUN apt-get update &&\
RUN unattended-upgrade
WORKDIR /code
-ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:/code/cmake-3.27.3-linux-x86_64/bin:/opt/miniconda/bin:${PATH}
+ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/code/cmake-3.27.3-linux-x86_64/bin:/opt/miniconda/bin:${PATH}
# Prepare onnxruntime repository & build onnxruntime with TensorRT
RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
diff --git a/dockerfiles/Dockerfile.vitisai b/dockerfiles/Dockerfile.vitisai
index e11ab70a61332..c6226155e01e3 100644
--- a/dockerfiles/Dockerfile.vitisai
+++ b/dockerfiles/Dockerfile.vitisai
@@ -22,8 +22,8 @@ RUN apt-get update && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
-ENV PATH /code/cmake-3.27.3-linux-x86_64/bin:$PATH
-ENV LD_LIBRARY_PATH /opt/xilinx/xrt/lib:$LD_LIBRARY_PATH
+ENV PATH=/code/cmake-3.27.3-linux-x86_64/bin:$PATH
+ENV LD_LIBRARY_PATH=/opt/xilinx/xrt/lib:$LD_LIBRARY_PATH
WORKDIR /code
RUN . $VAI_ROOT/conda/etc/profile.d/conda.sh &&\
diff --git a/docs/ContribOperators.md b/docs/ContribOperators.md
index b87532debe4bc..6ea3f93cdea12 100644
--- a/docs/ContribOperators.md
+++ b/docs/ContribOperators.md
@@ -1596,6 +1596,8 @@ This version of the operator has been available since version 1 of the 'com.micr
(Optional) Hardware architecture.
main_context : int
Usually each single EPContext associate with a graph partition.But for some case like QNN, it has single EPContext contains all partitions.In that case, the node with ep_cache_context should set main_context=1. Other nodes set main_context=0 and skip ep_cache_context.The path is relative to this Onnx file. Default is 1.