diff --git a/tools/nuget/generate_nuspec_for_native_nuget.py b/tools/nuget/generate_nuspec_for_native_nuget.py
index be477bb293249..56e739f5ff3b5 100644
--- a/tools/nuget/generate_nuspec_for_native_nuget.py
+++ b/tools/nuget/generate_nuspec_for_native_nuget.py
@@ -213,6 +213,10 @@ def generate_repo_url(line_list, repo_url, commit_id):
line_list.append('')
+def generate_readme(line_list):
+ line_list.append("README.md")
+
+
def add_common_dependencies(xml_text, package_name, version):
xml_text.append('')
if package_name == "Microsoft.ML.OnnxRuntime.Gpu":
@@ -327,6 +331,7 @@ def generate_metadata(line_list, args):
generate_license(metadata_list)
generate_project_url(metadata_list, "https://github.com/Microsoft/onnxruntime")
generate_repo_url(metadata_list, "https://github.com/Microsoft/onnxruntime.git", args.commit_id)
+ generate_readme(metadata_list)
generate_dependencies(metadata_list, args.package_name, args.package_version)
generate_release_notes(metadata_list, args.sdk_info)
metadata_list.append("")
@@ -1045,7 +1050,9 @@ def generate_files(line_list, args):
)
# README
- files_list.append("')
+ files_list.append(
+ "'
+ )
# Process License, ThirdPartyNotices, Privacy
files_list.append("')
diff --git a/tools/nuget/nupkg.README.md b/tools/nuget/nupkg.README.md
new file mode 100644
index 0000000000000..e48480787c7f9
--- /dev/null
+++ b/tools/nuget/nupkg.README.md
@@ -0,0 +1,52 @@
+## About
+
+![ONNX Runtime Logo](https://raw.githubusercontent.com/microsoft/onnxruntime/main/docs/images/ONNX_Runtime_logo_dark.png)
+
+**ONNX Runtime is a cross-platform machine-learning inferencing accelerator**.
+
+**ONNX Runtime** can enable faster customer experiences and lower costs, supporting models from deep learning frameworks such as PyTorch and TensorFlow/Keras as well as classical machine learning libraries such as scikit-learn, LightGBM, XGBoost, etc.
+ONNX Runtime is compatible with different hardware, drivers, and operating systems, and provides optimal performance by leveraging hardware accelerators where applicable alongside graph optimizations and transforms.
+
+Learn more → [here](https://www.onnxruntime.ai/docs)
+
+## NuGet Packages
+
+### ONNX Runtime Native packages
+
+#### Microsoft.ML.OnnxRuntime
+ - Native libraries for all supported platforms
+ - CPU Execution Provider
+ - CoreML Execution Provider on macOS/iOS
+ - https://onnxruntime.ai/docs/execution-providers/CoreML-ExecutionProvider.html
+ - XNNPACK Execution Provider on Android/iOS
+ - https://onnxruntime.ai/docs/execution-providers/Xnnpack-ExecutionProvider.html
+
+#### Microsoft.ML.OnnxRuntime.Gpu
+ - Windows and Linux
+ - TensorRT Execution Provider
+ - https://onnxruntime.ai/docs/execution-providers/TensorRT-ExecutionProvider.html
+ - CUDA Execution Provider
+ - https://onnxruntime.ai/docs/execution-providers/CUDA-ExecutionProvider.html
+ - CPU Execution Provider
+
+#### Microsoft.ML.OnnxRuntime.DirectML
+ - Windows
+ - DirectML Execution Provider
+ - https://onnxruntime.ai/docs/execution-providers/DirectML-ExecutionProvider.html
+ - CPU Execution Provider
+
+#### Microsoft.ML.OnnxRuntime.QNN
+ - 64-bit Windows
+ - QNN Execution Provider
+ - https://onnxruntime.ai/docs/execution-providers/QNN-ExecutionProvider.html
+ - CPU Execution Provider
+
+
+### Other packages
+
+#### Microsoft.ML.OnnxRuntime.Managed
+ - C# language bindings
+
+#### Microsoft.ML.OnnxRuntime.Extensions
+ - Custom operators for pre/post processing on all supported platforms.
+ - https://github.com/microsoft/onnxruntime-extensions