From 0f213efb0f2b07431e01c57ff66a2318cc96303e Mon Sep 17 00:00:00 2001 From: Scott McKay Date: Mon, 23 Dec 2024 11:01:16 +1000 Subject: [PATCH] cmake update --- cmake/onnxruntime_unittests.cmake | 2 +- csharp/UpgradeLog.htm | 290 ++++++++++++++++++ .../InferenceTest.cs | 35 +++ 3 files changed, 326 insertions(+), 1 deletion(-) create mode 100644 csharp/UpgradeLog.htm diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake index 7689880a132be..8b85715cb59eb 100644 --- a/cmake/onnxruntime_unittests.cmake +++ b/cmake/onnxruntime_unittests.cmake @@ -511,7 +511,7 @@ set (onnxruntime_shared_lib_test_SRC if (NOT onnxruntime_MINIMAL_BUILD) list(APPEND onnxruntime_shared_lib_test_SRC ${ONNXRUNTIME_SHARED_LIB_TEST_SRC_DIR}/test_inference.cc) - list(APPEND onnxruntime_shared_lib_test_SRC ${ONNXRUNTIME_SHARED_LIB_TEST_SRC_DIR}/test_graph_api.cc) + list(APPEND onnxruntime_shared_lib_test_SRC ${ONNXRUNTIME_SHARED_LIB_TEST_SRC_DIR}/test_model_builder_api.cc) endif() if(onnxruntime_RUN_ONNX_TESTS) diff --git a/csharp/UpgradeLog.htm b/csharp/UpgradeLog.htm new file mode 100644 index 0000000000000..a95987ef04d02 --- /dev/null +++ b/csharp/UpgradeLog.htm @@ -0,0 +1,290 @@ + + + + Migration Report +

+ Migration Report -

Overview

ProjectPathErrorsWarningsMessages
Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Androidsample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Android\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Android.csproj100
Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOSsample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOS\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOS.csproj100
Microsoft.ML.OnnxRuntime.Tests.Droidtest\Microsoft.ML.OnnxRuntime.Tests.Droid\Microsoft.ML.OnnxRuntime.Tests.Droid.csproj100
Microsoft.ML.OnnxRuntime.Tests.iOStest\Microsoft.ML.OnnxRuntime.Tests.iOS\Microsoft.ML.OnnxRuntime.Tests.iOS.csproj100
ApiDocsApiDocs\ApiDocs.csproj000
DocsDocs000
Microsoft.ML.OnnxRuntimesrc\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj000
Microsoft.ML.OnnxRuntime.InferenceSamplesample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.csproj000
Microsoft.ML.OnnxRuntime.InferenceSample.Formssample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Forms\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.csproj000
Microsoft.ML.OnnxRuntime.InferenceSample.NetCoreAppsample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.NetCoreApp\Microsoft.ML.OnnxRuntime.InferenceSample.NetCoreApp.csproj000
Microsoft.ML.OnnxRuntime.PerfTooltools\Microsoft.ML.OnnxRuntime.PerfTool\Microsoft.ML.OnnxRuntime.PerfTool.csproj000
Microsoft.ML.OnnxRuntime.Tests.Commontest\Microsoft.ML.OnnxRuntime.Tests.Common\Microsoft.ML.OnnxRuntime.Tests.Common.csproj000
Microsoft.ML.OnnxRuntime.Tests.Devicestest\Microsoft.ML.OnnxRuntime.Tests.Devices\Microsoft.ML.OnnxRuntime.Tests.Devices.csproj000
Microsoft.ML.OnnxRuntime.Tests.NetCoreApptest\Microsoft.ML.OnnxRuntime.Tests.NetCoreApp\Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj000
OnnxRuntimeOnnxRuntime000
PerfPerf000
SampleSample000
TestsTests000
SolutionOnnxRuntime.CSharp.sln001

Solution and projects

Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Android

Message
sample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Android\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.Android.csproj: + The application which this project type is based on was not found. Please try this link for further information: http://go.microsoft.com/fwlink/?LinkID=299083&projecttype=EFBA0AD7-5A72-4C68-AF49-83D382785DCF

Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOS

Message
sample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOS\Microsoft.ML.OnnxRuntime.InferenceSample.Forms.iOS.csproj: + The application which this project type is based on was not found. Please try this link for further information: http://go.microsoft.com/fwlink/?LinkID=299083&projecttype=FEACFBD2-3405-455C-9665-78FE426C6842

Microsoft.ML.OnnxRuntime.Tests.Droid

Message
test\Microsoft.ML.OnnxRuntime.Tests.Droid\Microsoft.ML.OnnxRuntime.Tests.Droid.csproj: + The application which this project type is based on was not found. Please try this link for further information: http://go.microsoft.com/fwlink/?LinkID=299083&projecttype=EFBA0AD7-5A72-4C68-AF49-83D382785DCF

Microsoft.ML.OnnxRuntime.Tests.iOS

Message
test\Microsoft.ML.OnnxRuntime.Tests.iOS\Microsoft.ML.OnnxRuntime.Tests.iOS.csproj: + The application which this project type is based on was not found. Please try this link for further information: http://go.microsoft.com/fwlink/?LinkID=299083&projecttype=FEACFBD2-3405-455C-9665-78FE426C6842

ApiDocs

Message
ApiDocs logged no messages. +

Docs

Message
Docs logged no messages. +

Microsoft.ML.OnnxRuntime

Message
Microsoft.ML.OnnxRuntime logged no messages. +

Microsoft.ML.OnnxRuntime.InferenceSample

Message
Microsoft.ML.OnnxRuntime.InferenceSample logged no messages. +

Microsoft.ML.OnnxRuntime.InferenceSample.Forms

Message
Microsoft.ML.OnnxRuntime.InferenceSample.Forms logged no messages. +

Microsoft.ML.OnnxRuntime.InferenceSample.NetCoreApp

Message
Microsoft.ML.OnnxRuntime.InferenceSample.NetCoreApp logged no messages. +

Microsoft.ML.OnnxRuntime.PerfTool

Message
Microsoft.ML.OnnxRuntime.PerfTool logged no messages. +

Microsoft.ML.OnnxRuntime.Tests.Common

Message
Microsoft.ML.OnnxRuntime.Tests.Common logged no messages. +

Microsoft.ML.OnnxRuntime.Tests.Devices

Message
Microsoft.ML.OnnxRuntime.Tests.Devices logged no messages. +

Microsoft.ML.OnnxRuntime.Tests.NetCoreApp

Message
Microsoft.ML.OnnxRuntime.Tests.NetCoreApp logged no messages. +

OnnxRuntime

Message
OnnxRuntime logged no messages. +

Perf

Message
Perf logged no messages. +

Sample

Message
Sample logged no messages. +

Tests

Message
Tests logged no messages. +

Solution

Message
+ Show 1 additional messages +
OnnxRuntime.CSharp.sln: + The solution file does not require migration.
+ Hide 1 additional messages +
\ No newline at end of file diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs index 17738da515134..353db443bc23b 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs @@ -2078,6 +2078,41 @@ private async Task TestModelRunAsyncTask() } } } + + [Fact(DisplayName = "TestModelRunAsyncUnallocatedOutputTask")] + private async Task TestModelRunAsyncTask() + { + Float16[] inputData = { new Float16(15360), new Float16(16384), new Float16(16896), new Float16(17408), new Float16(17664) }; + long[] shape = { 1, 5 }; + + var inputNames = new List { "input" }; + var inputValues = new List { OrtValue.CreateTensorValueFromMemory(inputData, shape) }; + + var outputNames = new List { "output" }; + var outputValues = new List { OrtValue.CreateAllocatedTensorValue(OrtAllocator.DefaultInstance, + TensorElementType.Float16, shape) }; + + var model = TestDataLoader.LoadModelFromEmbeddedResource("test_types_FLOAT16.onnx"); + using (SessionOptions opt = new SessionOptions()) + { + opt.IntraOpNumThreads = 2; + using (var session = new InferenceSession(model, opt)) + { + try + { + var task = session.RunAsync(null, inputNames, inputValues, outputNames, outputValues); + var outputs = await task; + var valueOut = outputs.ElementAt(0); + var float16s = valueOut.GetTensorDataAsSpan().ToArray(); + Assert.Equal(new Float16(16896), float16s[2]); + } + catch + { + Assert.True(false); + } + } + } + } #endif [Fact(DisplayName = "TestModelRunAsyncTaskFail")]