ONNX Runtime GitHub →Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
+ "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
'windows,C++,X64,CUDA':
- "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
+ "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
'windows,C#,X64,CUDA':
- "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
+ "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
'windows,Python,X64,CUDA':
- "For CUDA 12.X (default):
pip install onnxruntime-gpu
For CUDA 11.X:
pip install onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/
Refer to
docs for requirements.",
+ "For CUDA 12.X (default):
pip install onnxruntime-gpu
For CUDA 11.X:
pip install onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/
Refer to
docs for requirements.",
'linux,Python,ARM64,CUDA':
"For Jetpack 4.4+, follow installation instructions from
here.
Note: We do not have CUDA 12.X ARM64 support at this time.",
'linux,C-API,X64,CUDA':
- "Download .tgz file from
Github Refer to
docs for requirements.",
+ "Download .tgz file from
Github Refer to
docs for requirements.",
'linux,C++,X64,CUDA':
- "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
+ "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
'linux,C#,X64,CUDA':
- "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
+ "For CUDA 12.X (default): Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux.
For CUDA 11.X: Follow instructions
here.
Refer to
docs for requirements.",
'linux,Python,X64,CUDA':
- "For CUDA 12.X (default):
pip install onnxruntime-gpu
For CUDA 11.X:
pip install onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/
Refer to
docs for requirements.",
+ "For CUDA 12.X (default):
pip install onnxruntime-gpu
For CUDA 11.X:
pip install onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/
Refer to
docs for requirements.",
'linux,C-API,ARM32,DefaultCPU':
"Follow build instructions from
here",
@@ -275,34 +275,34 @@
'windows,Python,X64,OpenVINO': 'pip install onnxruntime-openvino',
'windows,C-API,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
'windows,C++,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
'windows,C#,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Windows Refer to
docs for usage details.",
'windows,Python,X64,TensorRT':
- "pip install onnxruntime-gpu
Refer to
docs for requirements.",
+ "pip install onnxruntime-gpu
Refer to
docs for requirements.",
'linux,C-API,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
'linux,C++,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
'linux,C#,X64,TensorRT':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.Gpu.Linux Refer to
docs for usage details.",
'linux,Python,X64,TensorRT':
- "pip install onnxruntime-gpu
Refer to
docs for requirements.",
+ "pip install onnxruntime-gpu
Refer to
docs for requirements.",
'linux,Python,ARM64,TensorRT':
- "pip install onnxruntime-gpu
Refer to
docs for requirements.",
+ "pip install onnxruntime-gpu
Refer to
docs for requirements.",
'windows,C-API,X86,DirectML':
- "Install Nuget package
Microsoft.ML.OnnxRuntime.DirectML Refer to
docs for requirements.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime.DirectML Refer to
docs for requirements.",
'windows,C++,X86,DirectML':
"Install Nuget package
Microsoft.ML.OnnxRuntime.DirectML",
@@ -340,7 +340,7 @@
"Add a dependency on
com.microsoft.onnxruntime:onnxruntime using Maven/Gradle",
'linux,Java,X64,CUDA':
- "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for requirements.",
+ "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for requirements.",
'mac,Java,X64,DefaultCPU':
"Add a dependency on
com.microsoft.onnxruntime:onnxruntime using Maven/Gradle",
@@ -387,25 +387,25 @@
"Add a dependency on
com.microsoft.onnxruntime:onnxruntime using Maven/Gradle",
'windows,Java,X64,CUDA':
- "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for requirements.",
+ "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for requirements.",
'windows,Java,X64,TensorRT':
- "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for usage details.",
+ "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for usage details.",
'windows,Java,X64,DNNL':
- "Follow
build and
API instructions",
+ "Follow
build and
API instructions",
'windows,Java,X64,OpenVINO':
- "Follow
build and
API instructions",
+ "Follow
build and
API instructions",
'linux,Java,X64,TensorRT':
- "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for usage details.",
+ "Add a dependency on
com.microsoft.onnxruntime:onnxruntime_gpu using Maven/Gradle.
Refer to
docs for usage details.",
'linux,Java,X64,DNNL':
- "Follow
build and
API instructions",
+ "Follow
build and
API instructions",
'linux,Java,X64,OpenVINO':
- "Follow
build and
API instructions",
+ "Follow
build and
API instructions",
'android,C-API,ARM64,NNAPI':
"Follow build instructions from
here",
@@ -607,7 +607,7 @@
"Add 'onnxruntime-c' using CocoaPods or download the .tgz file from
Github.",
'mac,C#,ARM64,CoreML':
- "Install Nuget package
Microsoft.ML.OnnxRuntime Refer to
docs for requirements.",
+ "Install Nuget package
Microsoft.ML.OnnxRuntime Refer to
docs for requirements.",
'mac,C++,ARM64,CoreML':
"Add 'onnxruntime-c' using CocoaPods or download the .tgz file from
Github.",
@@ -669,11 +669,9 @@
'android,C++,ARM64,QNN':
"Follow build instructions from
here",
- 'windows,Python,ARM64,QNN':
- "pip install onnxruntime-qnn",
+ 'windows,Python,ARM64,QNN': 'pip install onnxruntime-qnn',
- 'windows,Python,X64,QNN':
- "pip install onnxruntime-qnn",
+ 'windows,Python,X64,QNN': 'pip install onnxruntime-qnn',
'android,Java,ARM64,QNN':
"Add a dependency on
com.microsoft.onnxruntime:onnxruntime-android-qnn using Maven/Gradle and refer to the
mobile deployment guide",
@@ -716,22 +714,22 @@
"Follow build instructions from
here",
'linux,Python,ARM64,CANN':
- "pip install onnxruntime-cann
Refer to
docs for requirements.",
+ "pip install onnxruntime-cann
Refer to
docs for requirements.",
'linux,C-API,ARM64,CANN':
- "Follow build instructions from
here.",
+ "Follow build instructions from
here.",
'linux,C++,ARM64,CANN':
- "Follow build instructions from
here.",
+ "Follow build instructions from
here.",
'linux,Python,X64,CANN':
- "pip install onnxruntime-cann
Refer to
docs for requirements.",
+ "pip install onnxruntime-cann
Refer to
docs for requirements.",
'linux,C-API,X64,CANN':
- "Follow build instructions from
here.",
+ "Follow build instructions from
here.",
'linux,C++,X64,CANN':
- "Follow build instructions from
here.",
+ "Follow build instructions from
here.",
'windows,Python,X64,Azure':
"Follow build instructions from
here",
@@ -777,7 +775,7 @@
'python -m pip install cerberus flatbuffers h5py numpy>=1.16.6 onnx packaging protobuf sympy setuptools>=41.4.0
pip install --pre -i https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ort-cuda-12-nightly/pypi/simple/ onnxruntime-training',
'ot_linux,ot_on_device,ot_cplusplus,ot_X64,ot_CPU,ot_stable':
- "Download .tgz file from
Github Refer to
docs for requirements.",
+ "Download .tgz file from
Github Refer to
docs for requirements.",
'ot_linux,ot_on_device,ot_csharp,ot_X64,ot_CPU,ot_stable':
"Install Nuget package
Microsoft.ML.OnnxRuntime.Training",
@@ -792,7 +790,7 @@
"Follow build instructions from
here",
'ot_linux,ot_on_device,ot_c,ot_X64,ot_CPU,ot_stable':
- "Download .tgz file from
Github Refer to
docs for requirements.",
+ "Download .tgz file from
Github Refer to
docs for requirements.",
'ot_windows,ot_on_device,ot_python,ot_X64,ot_CPU,ot_stable':
'python -m pip install cerberus flatbuffers h5py numpy>=1.16.6 onnx packaging protobuf sympy setuptools>=41.4.0
pip install -i https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT/pypi/simple/ onnxruntime-training-cpu',
diff --git a/src/routes/huggingface/+page.svelte b/src/routes/huggingface/+page.svelte
index cc7fd1108b905..a300307d8ebc7 100644
--- a/src/routes/huggingface/+page.svelte
+++ b/src/routes/huggingface/+page.svelte
@@ -1,4 +1,4 @@
-scenario
-
+
+scenario
-
+
-
+
@@ -81,14 +83,26 @@ scenario
+
@@ -21,9 +23,9 @@
-
+
-
+
@@ -156,7 +158,7 @@