diff --git a/.lintrunner.toml b/.lintrunner.toml index be95e03479cf9..ad782079bf76e 100644 --- a/.lintrunner.toml +++ b/.lintrunner.toml @@ -130,6 +130,7 @@ exclude_patterns = [ 'js/**', 'onnxruntime/contrib_ops/cuda/bert/tensorrt_fused_multihead_attention/**', # Contains data chunks 'onnxruntime/core/flatbuffers/schema/*.fbs.h', # Generated code + 'onnxruntime/test/flatbuffers/*.fbs.h', # Generated code 'onnxruntime/core/graph/contrib_ops/quantization_defs.cc', 'onnxruntime/core/mlas/**', # Contains assembly code 'onnxruntime/core/mickey/cutlass_ext/**', # CUTLASS lib recommends NO automatic code formatting diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake index 1b9a7c9b5163b..4b4d5df47f330 100644 --- a/cmake/onnxruntime_unittests.cmake +++ b/cmake/onnxruntime_unittests.cmake @@ -250,11 +250,16 @@ file(GLOB onnxruntime_test_common_src CONFIGURE_DEPENDS "${TEST_SRC_DIR}/common/logging/*.h" ) -file(GLOB onnxruntime_test_quantiztion_src CONFIGURE_DEPENDS +file(GLOB onnxruntime_test_quantization_src CONFIGURE_DEPENDS "${TEST_SRC_DIR}/quantization/*.cc" "${TEST_SRC_DIR}/quantization/*.h" ) +file(GLOB onnxruntime_test_flatbuffers_src CONFIGURE_DEPENDS + "${TEST_SRC_DIR}/flatbuffers/*.cc" + "${TEST_SRC_DIR}/flatbuffers/*.h" +) + if(NOT onnxruntime_MINIMAL_BUILD AND NOT onnxruntime_REDUCED_OPS_BUILD) file(GLOB onnxruntime_test_ir_src CONFIGURE_DEPENDS @@ -767,7 +772,8 @@ if(NOT IOS) endif() set(all_tests ${onnxruntime_test_common_src} ${onnxruntime_test_ir_src} ${onnxruntime_test_optimizer_src} - ${onnxruntime_test_framework_src} ${onnxruntime_test_providers_src} ${onnxruntime_test_quantiztion_src}) + ${onnxruntime_test_framework_src} ${onnxruntime_test_providers_src} ${onnxruntime_test_quantization_src} + ${onnxruntime_test_flatbuffers_src}) if (onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS) file(GLOB onnxruntime_test_providers_cuda_ut_src CONFIGURE_DEPENDS diff --git a/onnxruntime/core/flatbuffers/flatbuffers_utils.cc b/onnxruntime/core/flatbuffers/flatbuffers_utils.cc index fbfcfed4b71ff..06b5a7cceb9f2 100644 --- a/onnxruntime/core/flatbuffers/flatbuffers_utils.cc +++ b/onnxruntime/core/flatbuffers/flatbuffers_utils.cc @@ -315,5 +315,4 @@ bool IsOrtFormatModelBytes(const void* bytes, int num_bytes) { return num_bytes > 8 && // check buffer is large enough to contain identifier so we don't read random memory fbs::InferenceSessionBufferHasIdentifier(bytes); } - } // namespace onnxruntime::fbs::utils diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgType.py index a0328a9f469e7..3dc50a9eb6b9f 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgType.py @@ -5,4 +5,3 @@ class ArgType(object): INPUT = 0 OUTPUT = 1 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgTypeAndIndex.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgTypeAndIndex.py index 32aaa298dd99a..9b9a2bf51b8f9 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgTypeAndIndex.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ArgTypeAndIndex.py @@ -10,13 +10,17 @@ class ArgTypeAndIndex(object): __slots__ = ['_tab'] @classmethod - def GetRootAsArgTypeAndIndex(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArgTypeAndIndex() x.Init(buf, n + offset) return x @classmethod + def GetRootAsArgTypeAndIndex(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ArgTypeAndIndexBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Index(self): return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 -def ArgTypeAndIndexStart(builder): builder.StartObject(2) -def ArgTypeAndIndexAddArgType(builder, argType): builder.PrependInt8Slot(0, argType, 0) -def ArgTypeAndIndexAddIndex(builder, index): builder.PrependUint32Slot(1, index, 0) -def ArgTypeAndIndexEnd(builder): return builder.EndObject() +def ArgTypeAndIndexStart(builder): + builder.StartObject(2) + +def Start(builder): + ArgTypeAndIndexStart(builder) + +def ArgTypeAndIndexAddArgType(builder, argType): + builder.PrependInt8Slot(0, argType, 0) + +def AddArgType(builder, argType): + ArgTypeAndIndexAddArgType(builder, argType) + +def ArgTypeAndIndexAddIndex(builder, index): + builder.PrependUint32Slot(1, index, 0) + +def AddIndex(builder, index): + ArgTypeAndIndexAddIndex(builder, index) + +def ArgTypeAndIndexEnd(builder): + return builder.EndObject() + +def End(builder): + return ArgTypeAndIndexEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Attribute.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Attribute.py index a2e496b2c0654..d9dccd5c506fb 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Attribute.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Attribute.py @@ -10,13 +10,17 @@ class Attribute(object): __slots__ = ['_tab'] @classmethod - def GetRootAsAttribute(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Attribute() x.Init(buf, n + offset) return x @classmethod + def GetRootAsAttribute(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -212,23 +216,122 @@ def GraphsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) return o == 0 -def AttributeStart(builder): builder.StartObject(13) -def AttributeAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AttributeAddDocString(builder, docString): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) -def AttributeAddType(builder, type): builder.PrependInt32Slot(2, type, 0) -def AttributeAddF(builder, f): builder.PrependFloat32Slot(3, f, 0.0) -def AttributeAddI(builder, i): builder.PrependInt64Slot(4, i, 0) -def AttributeAddS(builder, s): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(s), 0) -def AttributeAddT(builder, t): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(t), 0) -def AttributeAddG(builder, g): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(g), 0) -def AttributeAddFloats(builder, floats): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(floats), 0) -def AttributeStartFloatsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def AttributeAddInts(builder, ints): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(ints), 0) -def AttributeStartIntsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def AttributeAddStrings(builder, strings): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(strings), 0) -def AttributeStartStringsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def AttributeAddTensors(builder, tensors): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(tensors), 0) -def AttributeStartTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def AttributeAddGraphs(builder, graphs): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(graphs), 0) -def AttributeStartGraphsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def AttributeEnd(builder): return builder.EndObject() +def AttributeStart(builder): + builder.StartObject(13) + +def Start(builder): + AttributeStart(builder) + +def AttributeAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + AttributeAddName(builder, name) + +def AttributeAddDocString(builder, docString): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) + +def AddDocString(builder, docString): + AttributeAddDocString(builder, docString) + +def AttributeAddType(builder, type): + builder.PrependInt32Slot(2, type, 0) + +def AddType(builder, type): + AttributeAddType(builder, type) + +def AttributeAddF(builder, f): + builder.PrependFloat32Slot(3, f, 0.0) + +def AddF(builder, f): + AttributeAddF(builder, f) + +def AttributeAddI(builder, i): + builder.PrependInt64Slot(4, i, 0) + +def AddI(builder, i): + AttributeAddI(builder, i) + +def AttributeAddS(builder, s): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(s), 0) + +def AddS(builder, s): + AttributeAddS(builder, s) + +def AttributeAddT(builder, t): + builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(t), 0) + +def AddT(builder, t): + AttributeAddT(builder, t) + +def AttributeAddG(builder, g): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(g), 0) + +def AddG(builder, g): + AttributeAddG(builder, g) + +def AttributeAddFloats(builder, floats): + builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(floats), 0) + +def AddFloats(builder, floats): + AttributeAddFloats(builder, floats) + +def AttributeStartFloatsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartFloatsVector(builder, numElems: int) -> int: + return AttributeStartFloatsVector(builder, numElems) + +def AttributeAddInts(builder, ints): + builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(ints), 0) + +def AddInts(builder, ints): + AttributeAddInts(builder, ints) + +def AttributeStartIntsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StartIntsVector(builder, numElems: int) -> int: + return AttributeStartIntsVector(builder, numElems) + +def AttributeAddStrings(builder, strings): + builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(strings), 0) + +def AddStrings(builder, strings): + AttributeAddStrings(builder, strings) + +def AttributeStartStringsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartStringsVector(builder, numElems: int) -> int: + return AttributeStartStringsVector(builder, numElems) + +def AttributeAddTensors(builder, tensors): + builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(tensors), 0) + +def AddTensors(builder, tensors): + AttributeAddTensors(builder, tensors) + +def AttributeStartTensorsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartTensorsVector(builder, numElems: int) -> int: + return AttributeStartTensorsVector(builder, numElems) + +def AttributeAddGraphs(builder, graphs): + builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(graphs), 0) + +def AddGraphs(builder, graphs): + AttributeAddGraphs(builder, graphs) + +def AttributeStartGraphsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartGraphsVector(builder, numElems: int) -> int: + return AttributeStartGraphsVector(builder, numElems) + +def AttributeEnd(builder): + return builder.EndObject() + +def End(builder): + return AttributeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/AttributeType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/AttributeType.py index 2548a53bce61e..9046cf73574a3 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/AttributeType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/AttributeType.py @@ -16,4 +16,3 @@ class AttributeType(object): GRAPHS = 10 SPARSE_TENSOR = 11 SPARSE_TENSORS = 12 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Checkpoint.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Checkpoint.py index ec68fd373cf91..9d5f5febd4762 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Checkpoint.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Checkpoint.py @@ -10,13 +10,17 @@ class Checkpoint(object): __slots__ = ['_tab'] @classmethod - def GetRootAsCheckpoint(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Checkpoint() x.Init(buf, n + offset) return x @classmethod + def GetRootAsCheckpoint(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def CheckpointBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -78,10 +82,44 @@ def PropertyBag(self): return obj return None -def CheckpointStart(builder): builder.StartObject(4) -def CheckpointAddVersion(builder, version): builder.PrependInt32Slot(0, version, 0) -def CheckpointAddModuleState(builder, moduleState): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(moduleState), 0) -def CheckpointAddOptimizerGroups(builder, optimizerGroups): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerGroups), 0) -def CheckpointStartOptimizerGroupsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def CheckpointAddPropertyBag(builder, propertyBag): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(propertyBag), 0) -def CheckpointEnd(builder): return builder.EndObject() +def CheckpointStart(builder): + builder.StartObject(4) + +def Start(builder): + CheckpointStart(builder) + +def CheckpointAddVersion(builder, version): + builder.PrependInt32Slot(0, version, 0) + +def AddVersion(builder, version): + CheckpointAddVersion(builder, version) + +def CheckpointAddModuleState(builder, moduleState): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(moduleState), 0) + +def AddModuleState(builder, moduleState): + CheckpointAddModuleState(builder, moduleState) + +def CheckpointAddOptimizerGroups(builder, optimizerGroups): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerGroups), 0) + +def AddOptimizerGroups(builder, optimizerGroups): + CheckpointAddOptimizerGroups(builder, optimizerGroups) + +def CheckpointStartOptimizerGroupsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOptimizerGroupsVector(builder, numElems: int) -> int: + return CheckpointStartOptimizerGroupsVector(builder, numElems) + +def CheckpointAddPropertyBag(builder, propertyBag): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(propertyBag), 0) + +def AddPropertyBag(builder, propertyBag): + CheckpointAddPropertyBag(builder, propertyBag) + +def CheckpointEnd(builder): + return builder.EndObject() + +def End(builder): + return CheckpointEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedKernelCreateInfos.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedKernelCreateInfos.py index 9f93bffa499d0..50bf8819e057f 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedKernelCreateInfos.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedKernelCreateInfos.py @@ -11,13 +11,17 @@ class DeprecatedKernelCreateInfos(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDeprecatedKernelCreateInfos(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = DeprecatedKernelCreateInfos() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDeprecatedKernelCreateInfos(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DeprecatedKernelCreateInfosBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -79,9 +83,38 @@ def KernelDefHashesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def DeprecatedKernelCreateInfosStart(builder): builder.StartObject(2) -def DeprecatedKernelCreateInfosAddNodeIndices(builder, nodeIndices): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(nodeIndices), 0) -def DeprecatedKernelCreateInfosStartNodeIndicesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def DeprecatedKernelCreateInfosAddKernelDefHashes(builder, kernelDefHashes): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(kernelDefHashes), 0) -def DeprecatedKernelCreateInfosStartKernelDefHashesVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def DeprecatedKernelCreateInfosEnd(builder): return builder.EndObject() +def DeprecatedKernelCreateInfosStart(builder): + builder.StartObject(2) + +def Start(builder): + DeprecatedKernelCreateInfosStart(builder) + +def DeprecatedKernelCreateInfosAddNodeIndices(builder, nodeIndices): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(nodeIndices), 0) + +def AddNodeIndices(builder, nodeIndices): + DeprecatedKernelCreateInfosAddNodeIndices(builder, nodeIndices) + +def DeprecatedKernelCreateInfosStartNodeIndicesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartNodeIndicesVector(builder, numElems: int) -> int: + return DeprecatedKernelCreateInfosStartNodeIndicesVector(builder, numElems) + +def DeprecatedKernelCreateInfosAddKernelDefHashes(builder, kernelDefHashes): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(kernelDefHashes), 0) + +def AddKernelDefHashes(builder, kernelDefHashes): + DeprecatedKernelCreateInfosAddKernelDefHashes(builder, kernelDefHashes) + +def DeprecatedKernelCreateInfosStartKernelDefHashesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StartKernelDefHashesVector(builder, numElems: int) -> int: + return DeprecatedKernelCreateInfosStartKernelDefHashesVector(builder, numElems) + +def DeprecatedKernelCreateInfosEnd(builder): + return builder.EndObject() + +def End(builder): + return DeprecatedKernelCreateInfosEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedNodeIndexAndKernelDefHash.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedNodeIndexAndKernelDefHash.py index 7137233a9e726..bab036a30702f 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedNodeIndexAndKernelDefHash.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedNodeIndexAndKernelDefHash.py @@ -11,13 +11,17 @@ class DeprecatedNodeIndexAndKernelDefHash(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDeprecatedNodeIndexAndKernelDefHash(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = DeprecatedNodeIndexAndKernelDefHash() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDeprecatedNodeIndexAndKernelDefHash(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DeprecatedNodeIndexAndKernelDefHashBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -39,7 +43,26 @@ def KernelDefHash(self): return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 -def DeprecatedNodeIndexAndKernelDefHashStart(builder): builder.StartObject(2) -def DeprecatedNodeIndexAndKernelDefHashAddNodeIndex(builder, nodeIndex): builder.PrependUint32Slot(0, nodeIndex, 0) -def DeprecatedNodeIndexAndKernelDefHashAddKernelDefHash(builder, kernelDefHash): builder.PrependUint64Slot(1, kernelDefHash, 0) -def DeprecatedNodeIndexAndKernelDefHashEnd(builder): return builder.EndObject() +def DeprecatedNodeIndexAndKernelDefHashStart(builder): + builder.StartObject(2) + +def Start(builder): + DeprecatedNodeIndexAndKernelDefHashStart(builder) + +def DeprecatedNodeIndexAndKernelDefHashAddNodeIndex(builder, nodeIndex): + builder.PrependUint32Slot(0, nodeIndex, 0) + +def AddNodeIndex(builder, nodeIndex): + DeprecatedNodeIndexAndKernelDefHashAddNodeIndex(builder, nodeIndex) + +def DeprecatedNodeIndexAndKernelDefHashAddKernelDefHash(builder, kernelDefHash): + builder.PrependUint64Slot(1, kernelDefHash, 0) + +def AddKernelDefHash(builder, kernelDefHash): + DeprecatedNodeIndexAndKernelDefHashAddKernelDefHash(builder, kernelDefHash) + +def DeprecatedNodeIndexAndKernelDefHashEnd(builder): + return builder.EndObject() + +def End(builder): + return DeprecatedNodeIndexAndKernelDefHashEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSessionState.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSessionState.py index fbf21a38c2f5d..485346f92f37c 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSessionState.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSessionState.py @@ -11,13 +11,17 @@ class DeprecatedSessionState(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDeprecatedSessionState(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = DeprecatedSessionState() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDeprecatedSessionState(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DeprecatedSessionStateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -61,8 +65,32 @@ def SubGraphSessionStatesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def DeprecatedSessionStateStart(builder): builder.StartObject(2) -def DeprecatedSessionStateAddKernels(builder, kernels): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(kernels), 0) -def DeprecatedSessionStateAddSubGraphSessionStates(builder, subGraphSessionStates): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(subGraphSessionStates), 0) -def DeprecatedSessionStateStartSubGraphSessionStatesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def DeprecatedSessionStateEnd(builder): return builder.EndObject() +def DeprecatedSessionStateStart(builder): + builder.StartObject(2) + +def Start(builder): + DeprecatedSessionStateStart(builder) + +def DeprecatedSessionStateAddKernels(builder, kernels): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(kernels), 0) + +def AddKernels(builder, kernels): + DeprecatedSessionStateAddKernels(builder, kernels) + +def DeprecatedSessionStateAddSubGraphSessionStates(builder, subGraphSessionStates): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(subGraphSessionStates), 0) + +def AddSubGraphSessionStates(builder, subGraphSessionStates): + DeprecatedSessionStateAddSubGraphSessionStates(builder, subGraphSessionStates) + +def DeprecatedSessionStateStartSubGraphSessionStatesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartSubGraphSessionStatesVector(builder, numElems: int) -> int: + return DeprecatedSessionStateStartSubGraphSessionStatesVector(builder, numElems) + +def DeprecatedSessionStateEnd(builder): + return builder.EndObject() + +def End(builder): + return DeprecatedSessionStateEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSubGraphSessionState.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSubGraphSessionState.py index 52b450408632c..1dd8b9ec6e777 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSubGraphSessionState.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DeprecatedSubGraphSessionState.py @@ -11,13 +11,17 @@ class DeprecatedSubGraphSessionState(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDeprecatedSubGraphSessionState(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = DeprecatedSubGraphSessionState() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDeprecatedSubGraphSessionState(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DeprecatedSubGraphSessionStateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -43,7 +47,26 @@ def SessionState(self): return obj return None -def DeprecatedSubGraphSessionStateStart(builder): builder.StartObject(2) -def DeprecatedSubGraphSessionStateAddGraphId(builder, graphId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(graphId), 0) -def DeprecatedSubGraphSessionStateAddSessionState(builder, sessionState): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(sessionState), 0) -def DeprecatedSubGraphSessionStateEnd(builder): return builder.EndObject() +def DeprecatedSubGraphSessionStateStart(builder): + builder.StartObject(2) + +def Start(builder): + DeprecatedSubGraphSessionStateStart(builder) + +def DeprecatedSubGraphSessionStateAddGraphId(builder, graphId): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(graphId), 0) + +def AddGraphId(builder, graphId): + DeprecatedSubGraphSessionStateAddGraphId(builder, graphId) + +def DeprecatedSubGraphSessionStateAddSessionState(builder, sessionState): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(sessionState), 0) + +def AddSessionState(builder, sessionState): + DeprecatedSubGraphSessionStateAddSessionState(builder, sessionState) + +def DeprecatedSubGraphSessionStateEnd(builder): + return builder.EndObject() + +def End(builder): + return DeprecatedSubGraphSessionStateEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Dimension.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Dimension.py index d7c278fbaf0e8..7b9b61c7ef688 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Dimension.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Dimension.py @@ -10,13 +10,17 @@ class Dimension(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDimension(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Dimension() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDimension(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DimensionBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -42,7 +46,26 @@ def Denotation(self): return self._tab.String(o + self._tab.Pos) return None -def DimensionStart(builder): builder.StartObject(2) -def DimensionAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def DimensionAddDenotation(builder, denotation): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(denotation), 0) -def DimensionEnd(builder): return builder.EndObject() +def DimensionStart(builder): + builder.StartObject(2) + +def Start(builder): + DimensionStart(builder) + +def DimensionAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + +def AddValue(builder, value): + DimensionAddValue(builder, value) + +def DimensionAddDenotation(builder, denotation): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(denotation), 0) + +def AddDenotation(builder, denotation): + DimensionAddDenotation(builder, denotation) + +def DimensionEnd(builder): + return builder.EndObject() + +def End(builder): + return DimensionEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValue.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValue.py index 0ca271b9f1703..4aa5f0fddc20e 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValue.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValue.py @@ -10,13 +10,17 @@ class DimensionValue(object): __slots__ = ['_tab'] @classmethod - def GetRootAsDimensionValue(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = DimensionValue() x.Init(buf, n + offset) return x @classmethod + def GetRootAsDimensionValue(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def DimensionValueBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -45,8 +49,32 @@ def DimParam(self): return self._tab.String(o + self._tab.Pos) return None -def DimensionValueStart(builder): builder.StartObject(3) -def DimensionValueAddDimType(builder, dimType): builder.PrependInt8Slot(0, dimType, 0) -def DimensionValueAddDimValue(builder, dimValue): builder.PrependInt64Slot(1, dimValue, 0) -def DimensionValueAddDimParam(builder, dimParam): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimParam), 0) -def DimensionValueEnd(builder): return builder.EndObject() +def DimensionValueStart(builder): + builder.StartObject(3) + +def Start(builder): + DimensionValueStart(builder) + +def DimensionValueAddDimType(builder, dimType): + builder.PrependInt8Slot(0, dimType, 0) + +def AddDimType(builder, dimType): + DimensionValueAddDimType(builder, dimType) + +def DimensionValueAddDimValue(builder, dimValue): + builder.PrependInt64Slot(1, dimValue, 0) + +def AddDimValue(builder, dimValue): + DimensionValueAddDimValue(builder, dimValue) + +def DimensionValueAddDimParam(builder, dimParam): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimParam), 0) + +def AddDimParam(builder, dimParam): + DimensionValueAddDimParam(builder, dimParam) + +def DimensionValueEnd(builder): + return builder.EndObject() + +def End(builder): + return DimensionValueEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValueType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValueType.py index aae61bf4cf27c..eac437ec87024 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValueType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/DimensionValueType.py @@ -6,4 +6,3 @@ class DimensionValueType(object): UNKNOWN = 0 VALUE = 1 PARAM = 2 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/EdgeEnd.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/EdgeEnd.py index 5ecb37013e7ac..34237bdf527ba 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/EdgeEnd.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/EdgeEnd.py @@ -9,6 +9,10 @@ class EdgeEnd(object): __slots__ = ['_tab'] + @classmethod + def SizeOf(cls): + return 12 + # EdgeEnd def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/FloatProperty.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/FloatProperty.py index 49c5c6d4725c9..a7fd3043f5dc8 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/FloatProperty.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/FloatProperty.py @@ -10,13 +10,17 @@ class FloatProperty(object): __slots__ = ['_tab'] @classmethod - def GetRootAsFloatProperty(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = FloatProperty() x.Init(buf, n + offset) return x @classmethod + def GetRootAsFloatProperty(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def FloatPropertyBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 -def FloatPropertyStart(builder): builder.StartObject(2) -def FloatPropertyAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def FloatPropertyAddValue(builder, value): builder.PrependFloat32Slot(1, value, 0.0) -def FloatPropertyEnd(builder): return builder.EndObject() +def FloatPropertyStart(builder): + builder.StartObject(2) + +def Start(builder): + FloatPropertyStart(builder) + +def FloatPropertyAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + FloatPropertyAddName(builder, name) + +def FloatPropertyAddValue(builder, value): + builder.PrependFloat32Slot(1, value, 0.0) + +def AddValue(builder, value): + FloatPropertyAddValue(builder, value) + +def FloatPropertyEnd(builder): + return builder.EndObject() + +def End(builder): + return FloatPropertyEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Graph.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Graph.py index 55394ef2ab769..98cac5edb3356 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Graph.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Graph.py @@ -10,13 +10,17 @@ class Graph(object): __slots__ = ['_tab'] @classmethod - def GetRootAsGraph(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Graph() x.Init(buf, n + offset) return x @classmethod + def GetRootAsGraph(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def GraphBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -207,21 +211,110 @@ def RuntimeOptimizations(self): return obj return None -def GraphStart(builder): builder.StartObject(9) -def GraphAddInitializers(builder, initializers): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(initializers), 0) -def GraphStartInitializersVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddNodeArgs(builder, nodeArgs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodeArgs), 0) -def GraphStartNodeArgsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddNodes(builder, nodes): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(nodes), 0) -def GraphStartNodesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddMaxNodeIndex(builder, maxNodeIndex): builder.PrependUint32Slot(3, maxNodeIndex, 0) -def GraphAddNodeEdges(builder, nodeEdges): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(nodeEdges), 0) -def GraphStartNodeEdgesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) -def GraphStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) -def GraphStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddSparseInitializers(builder, sparseInitializers): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(sparseInitializers), 0) -def GraphStartSparseInitializersVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def GraphAddRuntimeOptimizations(builder, runtimeOptimizations): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(runtimeOptimizations), 0) -def GraphEnd(builder): return builder.EndObject() +def GraphStart(builder): + builder.StartObject(9) + +def Start(builder): + GraphStart(builder) + +def GraphAddInitializers(builder, initializers): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(initializers), 0) + +def AddInitializers(builder, initializers): + GraphAddInitializers(builder, initializers) + +def GraphStartInitializersVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartInitializersVector(builder, numElems: int) -> int: + return GraphStartInitializersVector(builder, numElems) + +def GraphAddNodeArgs(builder, nodeArgs): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodeArgs), 0) + +def AddNodeArgs(builder, nodeArgs): + GraphAddNodeArgs(builder, nodeArgs) + +def GraphStartNodeArgsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartNodeArgsVector(builder, numElems: int) -> int: + return GraphStartNodeArgsVector(builder, numElems) + +def GraphAddNodes(builder, nodes): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(nodes), 0) + +def AddNodes(builder, nodes): + GraphAddNodes(builder, nodes) + +def GraphStartNodesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartNodesVector(builder, numElems: int) -> int: + return GraphStartNodesVector(builder, numElems) + +def GraphAddMaxNodeIndex(builder, maxNodeIndex): + builder.PrependUint32Slot(3, maxNodeIndex, 0) + +def AddMaxNodeIndex(builder, maxNodeIndex): + GraphAddMaxNodeIndex(builder, maxNodeIndex) + +def GraphAddNodeEdges(builder, nodeEdges): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(nodeEdges), 0) + +def AddNodeEdges(builder, nodeEdges): + GraphAddNodeEdges(builder, nodeEdges) + +def GraphStartNodeEdgesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartNodeEdgesVector(builder, numElems: int) -> int: + return GraphStartNodeEdgesVector(builder, numElems) + +def GraphAddInputs(builder, inputs): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) + +def AddInputs(builder, inputs): + GraphAddInputs(builder, inputs) + +def GraphStartInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartInputsVector(builder, numElems: int) -> int: + return GraphStartInputsVector(builder, numElems) + +def GraphAddOutputs(builder, outputs): + builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) + +def AddOutputs(builder, outputs): + GraphAddOutputs(builder, outputs) + +def GraphStartOutputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOutputsVector(builder, numElems: int) -> int: + return GraphStartOutputsVector(builder, numElems) + +def GraphAddSparseInitializers(builder, sparseInitializers): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(sparseInitializers), 0) + +def AddSparseInitializers(builder, sparseInitializers): + GraphAddSparseInitializers(builder, sparseInitializers) + +def GraphStartSparseInitializersVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartSparseInitializersVector(builder, numElems: int) -> int: + return GraphStartSparseInitializersVector(builder, numElems) + +def GraphAddRuntimeOptimizations(builder, runtimeOptimizations): + builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(runtimeOptimizations), 0) + +def AddRuntimeOptimizations(builder, runtimeOptimizations): + GraphAddRuntimeOptimizations(builder, runtimeOptimizations) + +def GraphEnd(builder): + return builder.EndObject() + +def End(builder): + return GraphEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/InferenceSession.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/InferenceSession.py index d5a67bf8b8c61..5f6fc22adf3ee 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/InferenceSession.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/InferenceSession.py @@ -10,13 +10,17 @@ class InferenceSession(object): __slots__ = ['_tab'] @classmethod - def GetRootAsInferenceSession(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = InferenceSession() x.Init(buf, n + offset) return x @classmethod + def GetRootAsInferenceSession(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def InferenceSessionBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -53,8 +57,32 @@ def KernelTypeStrResolver(self): return obj return None -def InferenceSessionStart(builder): builder.StartObject(4) -def InferenceSessionAddOrtVersion(builder, ortVersion): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(ortVersion), 0) -def InferenceSessionAddModel(builder, model): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(model), 0) -def InferenceSessionAddKernelTypeStrResolver(builder, kernelTypeStrResolver): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStrResolver), 0) -def InferenceSessionEnd(builder): return builder.EndObject() +def InferenceSessionStart(builder): + builder.StartObject(4) + +def Start(builder): + InferenceSessionStart(builder) + +def InferenceSessionAddOrtVersion(builder, ortVersion): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(ortVersion), 0) + +def AddOrtVersion(builder, ortVersion): + InferenceSessionAddOrtVersion(builder, ortVersion) + +def InferenceSessionAddModel(builder, model): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(model), 0) + +def AddModel(builder, model): + InferenceSessionAddModel(builder, model) + +def InferenceSessionAddKernelTypeStrResolver(builder, kernelTypeStrResolver): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStrResolver), 0) + +def AddKernelTypeStrResolver(builder, kernelTypeStrResolver): + InferenceSessionAddKernelTypeStrResolver(builder, kernelTypeStrResolver) + +def InferenceSessionEnd(builder): + return builder.EndObject() + +def End(builder): + return InferenceSessionEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/IntProperty.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/IntProperty.py index 195a1547fe7cf..6195a0a5bb105 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/IntProperty.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/IntProperty.py @@ -10,13 +10,17 @@ class IntProperty(object): __slots__ = ['_tab'] @classmethod - def GetRootAsIntProperty(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = IntProperty() x.Init(buf, n + offset) return x @classmethod + def GetRootAsIntProperty(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def IntPropertyBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 -def IntPropertyStart(builder): builder.StartObject(2) -def IntPropertyAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def IntPropertyAddValue(builder, value): builder.PrependInt64Slot(1, value, 0) -def IntPropertyEnd(builder): return builder.EndObject() +def IntPropertyStart(builder): + builder.StartObject(2) + +def Start(builder): + IntPropertyStart(builder) + +def IntPropertyAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + IntPropertyAddName(builder, name) + +def IntPropertyAddValue(builder, value): + builder.PrependInt64Slot(1, value, 0) + +def AddValue(builder, value): + IntPropertyAddValue(builder, value) + +def IntPropertyEnd(builder): + return builder.EndObject() + +def End(builder): + return IntPropertyEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrArgsEntry.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrArgsEntry.py index 94f37b38481fd..467102d24ceac 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrArgsEntry.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrArgsEntry.py @@ -10,13 +10,17 @@ class KernelTypeStrArgsEntry(object): __slots__ = ['_tab'] @classmethod - def GetRootAsKernelTypeStrArgsEntry(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = KernelTypeStrArgsEntry() x.Init(buf, n + offset) return x @classmethod + def GetRootAsKernelTypeStrArgsEntry(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def KernelTypeStrArgsEntryBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -56,8 +60,32 @@ def ArgsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def KernelTypeStrArgsEntryStart(builder): builder.StartObject(2) -def KernelTypeStrArgsEntryAddKernelTypeStr(builder, kernelTypeStr): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStr), 0) -def KernelTypeStrArgsEntryAddArgs(builder, args): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(args), 0) -def KernelTypeStrArgsEntryStartArgsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def KernelTypeStrArgsEntryEnd(builder): return builder.EndObject() +def KernelTypeStrArgsEntryStart(builder): + builder.StartObject(2) + +def Start(builder): + KernelTypeStrArgsEntryStart(builder) + +def KernelTypeStrArgsEntryAddKernelTypeStr(builder, kernelTypeStr): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStr), 0) + +def AddKernelTypeStr(builder, kernelTypeStr): + KernelTypeStrArgsEntryAddKernelTypeStr(builder, kernelTypeStr) + +def KernelTypeStrArgsEntryAddArgs(builder, args): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(args), 0) + +def AddArgs(builder, args): + KernelTypeStrArgsEntryAddArgs(builder, args) + +def KernelTypeStrArgsEntryStartArgsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartArgsVector(builder, numElems: int) -> int: + return KernelTypeStrArgsEntryStartArgsVector(builder, numElems) + +def KernelTypeStrArgsEntryEnd(builder): + return builder.EndObject() + +def End(builder): + return KernelTypeStrArgsEntryEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrResolver.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrResolver.py index ef2cd95df91f7..ff98bf0d5fe38 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrResolver.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/KernelTypeStrResolver.py @@ -10,13 +10,17 @@ class KernelTypeStrResolver(object): __slots__ = ['_tab'] @classmethod - def GetRootAsKernelTypeStrResolver(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = KernelTypeStrResolver() x.Init(buf, n + offset) return x @classmethod + def GetRootAsKernelTypeStrResolver(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def KernelTypeStrResolverBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -49,7 +53,26 @@ def OpKernelTypeStrArgsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def KernelTypeStrResolverStart(builder): builder.StartObject(1) -def KernelTypeStrResolverAddOpKernelTypeStrArgs(builder, opKernelTypeStrArgs): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(opKernelTypeStrArgs), 0) -def KernelTypeStrResolverStartOpKernelTypeStrArgsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def KernelTypeStrResolverEnd(builder): return builder.EndObject() +def KernelTypeStrResolverStart(builder): + builder.StartObject(1) + +def Start(builder): + KernelTypeStrResolverStart(builder) + +def KernelTypeStrResolverAddOpKernelTypeStrArgs(builder, opKernelTypeStrArgs): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(opKernelTypeStrArgs), 0) + +def AddOpKernelTypeStrArgs(builder, opKernelTypeStrArgs): + KernelTypeStrResolverAddOpKernelTypeStrArgs(builder, opKernelTypeStrArgs) + +def KernelTypeStrResolverStartOpKernelTypeStrArgsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOpKernelTypeStrArgsVector(builder, numElems: int) -> int: + return KernelTypeStrResolverStartOpKernelTypeStrArgsVector(builder, numElems) + +def KernelTypeStrResolverEnd(builder): + return builder.EndObject() + +def End(builder): + return KernelTypeStrResolverEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/MapType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/MapType.py index 2886de2759587..d78c0a02256a4 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/MapType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/MapType.py @@ -10,13 +10,17 @@ class MapType(object): __slots__ = ['_tab'] @classmethod - def GetRootAsMapType(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = MapType() x.Init(buf, n + offset) return x @classmethod + def GetRootAsMapType(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def MapTypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -42,7 +46,26 @@ def ValueType(self): return obj return None -def MapTypeStart(builder): builder.StartObject(2) -def MapTypeAddKeyType(builder, keyType): builder.PrependInt32Slot(0, keyType, 0) -def MapTypeAddValueType(builder, valueType): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(valueType), 0) -def MapTypeEnd(builder): return builder.EndObject() +def MapTypeStart(builder): + builder.StartObject(2) + +def Start(builder): + MapTypeStart(builder) + +def MapTypeAddKeyType(builder, keyType): + builder.PrependInt32Slot(0, keyType, 0) + +def AddKeyType(builder, keyType): + MapTypeAddKeyType(builder, keyType) + +def MapTypeAddValueType(builder, valueType): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(valueType), 0) + +def AddValueType(builder, valueType): + MapTypeAddValueType(builder, valueType) + +def MapTypeEnd(builder): + return builder.EndObject() + +def End(builder): + return MapTypeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Model.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Model.py index 4e72568d13ef6..f414464090eca 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Model.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Model.py @@ -10,13 +10,17 @@ class Model(object): __slots__ = ['_tab'] @classmethod - def GetRootAsModel(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Model() x.Init(buf, n + offset) return x @classmethod + def GetRootAsModel(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ModelBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -134,17 +138,86 @@ def MetadataPropsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) return o == 0 -def ModelStart(builder): builder.StartObject(10) -def ModelAddIrVersion(builder, irVersion): builder.PrependInt64Slot(0, irVersion, 0) -def ModelAddOpsetImport(builder, opsetImport): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(opsetImport), 0) -def ModelStartOpsetImportVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ModelAddProducerName(builder, producerName): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(producerName), 0) -def ModelAddProducerVersion(builder, producerVersion): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(producerVersion), 0) -def ModelAddDomain(builder, domain): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) -def ModelAddModelVersion(builder, modelVersion): builder.PrependInt64Slot(5, modelVersion, 0) -def ModelAddDocString(builder, docString): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) -def ModelAddGraph(builder, graph): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(graph), 0) -def ModelAddGraphDocString(builder, graphDocString): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(graphDocString), 0) -def ModelAddMetadataProps(builder, metadataProps): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(metadataProps), 0) -def ModelStartMetadataPropsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ModelEnd(builder): return builder.EndObject() +def ModelStart(builder): + builder.StartObject(10) + +def Start(builder): + ModelStart(builder) + +def ModelAddIrVersion(builder, irVersion): + builder.PrependInt64Slot(0, irVersion, 0) + +def AddIrVersion(builder, irVersion): + ModelAddIrVersion(builder, irVersion) + +def ModelAddOpsetImport(builder, opsetImport): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(opsetImport), 0) + +def AddOpsetImport(builder, opsetImport): + ModelAddOpsetImport(builder, opsetImport) + +def ModelStartOpsetImportVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOpsetImportVector(builder, numElems: int) -> int: + return ModelStartOpsetImportVector(builder, numElems) + +def ModelAddProducerName(builder, producerName): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(producerName), 0) + +def AddProducerName(builder, producerName): + ModelAddProducerName(builder, producerName) + +def ModelAddProducerVersion(builder, producerVersion): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(producerVersion), 0) + +def AddProducerVersion(builder, producerVersion): + ModelAddProducerVersion(builder, producerVersion) + +def ModelAddDomain(builder, domain): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) + +def AddDomain(builder, domain): + ModelAddDomain(builder, domain) + +def ModelAddModelVersion(builder, modelVersion): + builder.PrependInt64Slot(5, modelVersion, 0) + +def AddModelVersion(builder, modelVersion): + ModelAddModelVersion(builder, modelVersion) + +def ModelAddDocString(builder, docString): + builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) + +def AddDocString(builder, docString): + ModelAddDocString(builder, docString) + +def ModelAddGraph(builder, graph): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(graph), 0) + +def AddGraph(builder, graph): + ModelAddGraph(builder, graph) + +def ModelAddGraphDocString(builder, graphDocString): + builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(graphDocString), 0) + +def AddGraphDocString(builder, graphDocString): + ModelAddGraphDocString(builder, graphDocString) + +def ModelAddMetadataProps(builder, metadataProps): + builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(metadataProps), 0) + +def AddMetadataProps(builder, metadataProps): + ModelAddMetadataProps(builder, metadataProps) + +def ModelStartMetadataPropsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartMetadataPropsVector(builder, numElems: int) -> int: + return ModelStartMetadataPropsVector(builder, numElems) + +def ModelEnd(builder): + return builder.EndObject() + +def End(builder): + return ModelEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ModuleState.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ModuleState.py index 19c6b1b6f2753..6c997ef8c1637 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ModuleState.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ModuleState.py @@ -10,13 +10,17 @@ class ModuleState(object): __slots__ = ['_tab'] @classmethod - def GetRootAsModuleState(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ModuleState() x.Init(buf, n + offset) return x @classmethod + def GetRootAsModuleState(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ModuleStateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -81,10 +85,57 @@ def IsNominalState(self): return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False -def ModuleStateStart(builder): builder.StartObject(3) -def ModuleStateAddRequiresGradParams(builder, requiresGradParams): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(requiresGradParams), 0) -def ModuleStateStartRequiresGradParamsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ModuleStateAddFrozenParams(builder, frozenParams): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(frozenParams), 0) -def ModuleStateStartFrozenParamsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ModuleStateAddIsNominalState(builder, isNominalState): builder.PrependBoolSlot(2, isNominalState, 0) -def ModuleStateEnd(builder): return builder.EndObject() + # ModuleState + def HasExternalData(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def ModuleStateStart(builder): + builder.StartObject(4) + +def Start(builder): + ModuleStateStart(builder) + +def ModuleStateAddRequiresGradParams(builder, requiresGradParams): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(requiresGradParams), 0) + +def AddRequiresGradParams(builder, requiresGradParams): + ModuleStateAddRequiresGradParams(builder, requiresGradParams) + +def ModuleStateStartRequiresGradParamsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartRequiresGradParamsVector(builder, numElems: int) -> int: + return ModuleStateStartRequiresGradParamsVector(builder, numElems) + +def ModuleStateAddFrozenParams(builder, frozenParams): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(frozenParams), 0) + +def AddFrozenParams(builder, frozenParams): + ModuleStateAddFrozenParams(builder, frozenParams) + +def ModuleStateStartFrozenParamsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartFrozenParamsVector(builder, numElems: int) -> int: + return ModuleStateStartFrozenParamsVector(builder, numElems) + +def ModuleStateAddIsNominalState(builder, isNominalState): + builder.PrependBoolSlot(2, isNominalState, 0) + +def AddIsNominalState(builder, isNominalState): + ModuleStateAddIsNominalState(builder, isNominalState) + +def ModuleStateAddHasExternalData(builder, hasExternalData): + builder.PrependBoolSlot(3, hasExternalData, 0) + +def AddHasExternalData(builder, hasExternalData): + ModuleStateAddHasExternalData(builder, hasExternalData) + +def ModuleStateEnd(builder): + return builder.EndObject() + +def End(builder): + return ModuleStateEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Node.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Node.py index c7126779abf76..85ea3b5c91c90 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Node.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Node.py @@ -10,13 +10,17 @@ class Node(object): __slots__ = ['_tab'] @classmethod - def GetRootAsNode(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Node() x.Init(buf, n + offset) return x @classmethod + def GetRootAsNode(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def NodeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -192,23 +196,122 @@ def ImplicitInputsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) return o == 0 -def NodeStart(builder): builder.StartObject(13) -def NodeAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def NodeAddDocString(builder, docString): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) -def NodeAddDomain(builder, domain): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) -def NodeAddSinceVersion(builder, sinceVersion): builder.PrependInt32Slot(3, sinceVersion, 0) -def NodeAddIndex(builder, index): builder.PrependUint32Slot(4, index, 0) -def NodeAddOpType(builder, opType): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(opType), 0) -def NodeAddType(builder, type): builder.PrependInt32Slot(6, type, 0) -def NodeAddExecutionProviderType(builder, executionProviderType): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(executionProviderType), 0) -def NodeAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) -def NodeStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodeAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) -def NodeStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodeAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) -def NodeStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodeAddInputArgCounts(builder, inputArgCounts): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(inputArgCounts), 0) -def NodeStartInputArgCountsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodeAddImplicitInputs(builder, implicitInputs): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(implicitInputs), 0) -def NodeStartImplicitInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodeEnd(builder): return builder.EndObject() +def NodeStart(builder): + builder.StartObject(13) + +def Start(builder): + NodeStart(builder) + +def NodeAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + NodeAddName(builder, name) + +def NodeAddDocString(builder, docString): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) + +def AddDocString(builder, docString): + NodeAddDocString(builder, docString) + +def NodeAddDomain(builder, domain): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) + +def AddDomain(builder, domain): + NodeAddDomain(builder, domain) + +def NodeAddSinceVersion(builder, sinceVersion): + builder.PrependInt32Slot(3, sinceVersion, 0) + +def AddSinceVersion(builder, sinceVersion): + NodeAddSinceVersion(builder, sinceVersion) + +def NodeAddIndex(builder, index): + builder.PrependUint32Slot(4, index, 0) + +def AddIndex(builder, index): + NodeAddIndex(builder, index) + +def NodeAddOpType(builder, opType): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(opType), 0) + +def AddOpType(builder, opType): + NodeAddOpType(builder, opType) + +def NodeAddType(builder, type): + builder.PrependInt32Slot(6, type, 0) + +def AddType(builder, type): + NodeAddType(builder, type) + +def NodeAddExecutionProviderType(builder, executionProviderType): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(executionProviderType), 0) + +def AddExecutionProviderType(builder, executionProviderType): + NodeAddExecutionProviderType(builder, executionProviderType) + +def NodeAddInputs(builder, inputs): + builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) + +def AddInputs(builder, inputs): + NodeAddInputs(builder, inputs) + +def NodeStartInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartInputsVector(builder, numElems: int) -> int: + return NodeStartInputsVector(builder, numElems) + +def NodeAddOutputs(builder, outputs): + builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) + +def AddOutputs(builder, outputs): + NodeAddOutputs(builder, outputs) + +def NodeStartOutputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOutputsVector(builder, numElems: int) -> int: + return NodeStartOutputsVector(builder, numElems) + +def NodeAddAttributes(builder, attributes): + builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) + +def AddAttributes(builder, attributes): + NodeAddAttributes(builder, attributes) + +def NodeStartAttributesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartAttributesVector(builder, numElems: int) -> int: + return NodeStartAttributesVector(builder, numElems) + +def NodeAddInputArgCounts(builder, inputArgCounts): + builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(inputArgCounts), 0) + +def AddInputArgCounts(builder, inputArgCounts): + NodeAddInputArgCounts(builder, inputArgCounts) + +def NodeStartInputArgCountsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartInputArgCountsVector(builder, numElems: int) -> int: + return NodeStartInputArgCountsVector(builder, numElems) + +def NodeAddImplicitInputs(builder, implicitInputs): + builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(implicitInputs), 0) + +def AddImplicitInputs(builder, implicitInputs): + NodeAddImplicitInputs(builder, implicitInputs) + +def NodeStartImplicitInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartImplicitInputsVector(builder, numElems: int) -> int: + return NodeStartImplicitInputsVector(builder, numElems) + +def NodeEnd(builder): + return builder.EndObject() + +def End(builder): + return NodeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeEdge.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeEdge.py index dc7e72c24cb8e..b2fa4a582d5d6 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeEdge.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeEdge.py @@ -10,13 +10,17 @@ class NodeEdge(object): __slots__ = ['_tab'] @classmethod - def GetRootAsNodeEdge(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = NodeEdge() x.Init(buf, n + offset) return x @classmethod + def GetRootAsNodeEdge(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def NodeEdgeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -79,10 +83,44 @@ def OutputEdgesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 -def NodeEdgeStart(builder): builder.StartObject(3) -def NodeEdgeAddNodeIndex(builder, nodeIndex): builder.PrependUint32Slot(0, nodeIndex, 0) -def NodeEdgeAddInputEdges(builder, inputEdges): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputEdges), 0) -def NodeEdgeStartInputEdgesVector(builder, numElems): return builder.StartVector(12, numElems, 4) -def NodeEdgeAddOutputEdges(builder, outputEdges): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputEdges), 0) -def NodeEdgeStartOutputEdgesVector(builder, numElems): return builder.StartVector(12, numElems, 4) -def NodeEdgeEnd(builder): return builder.EndObject() +def NodeEdgeStart(builder): + builder.StartObject(3) + +def Start(builder): + NodeEdgeStart(builder) + +def NodeEdgeAddNodeIndex(builder, nodeIndex): + builder.PrependUint32Slot(0, nodeIndex, 0) + +def AddNodeIndex(builder, nodeIndex): + NodeEdgeAddNodeIndex(builder, nodeIndex) + +def NodeEdgeAddInputEdges(builder, inputEdges): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputEdges), 0) + +def AddInputEdges(builder, inputEdges): + NodeEdgeAddInputEdges(builder, inputEdges) + +def NodeEdgeStartInputEdgesVector(builder, numElems): + return builder.StartVector(12, numElems, 4) + +def StartInputEdgesVector(builder, numElems: int) -> int: + return NodeEdgeStartInputEdgesVector(builder, numElems) + +def NodeEdgeAddOutputEdges(builder, outputEdges): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputEdges), 0) + +def AddOutputEdges(builder, outputEdges): + NodeEdgeAddOutputEdges(builder, outputEdges) + +def NodeEdgeStartOutputEdgesVector(builder, numElems): + return builder.StartVector(12, numElems, 4) + +def StartOutputEdgesVector(builder, numElems: int) -> int: + return NodeEdgeStartOutputEdgesVector(builder, numElems) + +def NodeEdgeEnd(builder): + return builder.EndObject() + +def End(builder): + return NodeEdgeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeType.py index 9d104c07e0586..a691b2e7efb50 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodeType.py @@ -5,4 +5,3 @@ class NodeType(object): Primitive = 0 Fused = 1 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodesToOptimizeIndices.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodesToOptimizeIndices.py index be8c02240ab2d..00d9fe4dec6d8 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodesToOptimizeIndices.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/NodesToOptimizeIndices.py @@ -12,13 +12,17 @@ class NodesToOptimizeIndices(object): __slots__ = ['_tab'] @classmethod - def GetRootAsNodesToOptimizeIndices(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = NodesToOptimizeIndices() x.Init(buf, n + offset) return x @classmethod + def GetRootAsNodesToOptimizeIndices(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def NodesToOptimizeIndicesBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -95,13 +99,62 @@ def NumVariadicOutputs(self): return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 -def NodesToOptimizeIndicesStart(builder): builder.StartObject(7) -def NodesToOptimizeIndicesAddNodeIndices(builder, nodeIndices): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(nodeIndices), 0) -def NodesToOptimizeIndicesStartNodeIndicesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NodesToOptimizeIndicesAddNumInputs(builder, numInputs): builder.PrependUint32Slot(1, numInputs, 0) -def NodesToOptimizeIndicesAddNumOutputs(builder, numOutputs): builder.PrependUint32Slot(2, numOutputs, 0) -def NodesToOptimizeIndicesAddHasVariadicInput(builder, hasVariadicInput): builder.PrependBoolSlot(3, hasVariadicInput, 0) -def NodesToOptimizeIndicesAddHasVariadicOutput(builder, hasVariadicOutput): builder.PrependBoolSlot(4, hasVariadicOutput, 0) -def NodesToOptimizeIndicesAddNumVariadicInputs(builder, numVariadicInputs): builder.PrependUint32Slot(5, numVariadicInputs, 0) -def NodesToOptimizeIndicesAddNumVariadicOutputs(builder, numVariadicOutputs): builder.PrependUint32Slot(6, numVariadicOutputs, 0) -def NodesToOptimizeIndicesEnd(builder): return builder.EndObject() +def NodesToOptimizeIndicesStart(builder): + builder.StartObject(7) + +def Start(builder): + NodesToOptimizeIndicesStart(builder) + +def NodesToOptimizeIndicesAddNodeIndices(builder, nodeIndices): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(nodeIndices), 0) + +def AddNodeIndices(builder, nodeIndices): + NodesToOptimizeIndicesAddNodeIndices(builder, nodeIndices) + +def NodesToOptimizeIndicesStartNodeIndicesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartNodeIndicesVector(builder, numElems: int) -> int: + return NodesToOptimizeIndicesStartNodeIndicesVector(builder, numElems) + +def NodesToOptimizeIndicesAddNumInputs(builder, numInputs): + builder.PrependUint32Slot(1, numInputs, 0) + +def AddNumInputs(builder, numInputs): + NodesToOptimizeIndicesAddNumInputs(builder, numInputs) + +def NodesToOptimizeIndicesAddNumOutputs(builder, numOutputs): + builder.PrependUint32Slot(2, numOutputs, 0) + +def AddNumOutputs(builder, numOutputs): + NodesToOptimizeIndicesAddNumOutputs(builder, numOutputs) + +def NodesToOptimizeIndicesAddHasVariadicInput(builder, hasVariadicInput): + builder.PrependBoolSlot(3, hasVariadicInput, 0) + +def AddHasVariadicInput(builder, hasVariadicInput): + NodesToOptimizeIndicesAddHasVariadicInput(builder, hasVariadicInput) + +def NodesToOptimizeIndicesAddHasVariadicOutput(builder, hasVariadicOutput): + builder.PrependBoolSlot(4, hasVariadicOutput, 0) + +def AddHasVariadicOutput(builder, hasVariadicOutput): + NodesToOptimizeIndicesAddHasVariadicOutput(builder, hasVariadicOutput) + +def NodesToOptimizeIndicesAddNumVariadicInputs(builder, numVariadicInputs): + builder.PrependUint32Slot(5, numVariadicInputs, 0) + +def AddNumVariadicInputs(builder, numVariadicInputs): + NodesToOptimizeIndicesAddNumVariadicInputs(builder, numVariadicInputs) + +def NodesToOptimizeIndicesAddNumVariadicOutputs(builder, numVariadicOutputs): + builder.PrependUint32Slot(6, numVariadicOutputs, 0) + +def AddNumVariadicOutputs(builder, numVariadicOutputs): + NodesToOptimizeIndicesAddNumVariadicOutputs(builder, numVariadicOutputs) + +def NodesToOptimizeIndicesEnd(builder): + return builder.EndObject() + +def End(builder): + return NodesToOptimizeIndicesEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OpIdKernelTypeStrArgsEntry.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OpIdKernelTypeStrArgsEntry.py index 97eea172b786b..33c028d39e3df 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OpIdKernelTypeStrArgsEntry.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OpIdKernelTypeStrArgsEntry.py @@ -10,13 +10,17 @@ class OpIdKernelTypeStrArgsEntry(object): __slots__ = ['_tab'] @classmethod - def GetRootAsOpIdKernelTypeStrArgsEntry(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = OpIdKernelTypeStrArgsEntry() x.Init(buf, n + offset) return x @classmethod + def GetRootAsOpIdKernelTypeStrArgsEntry(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def OpIdKernelTypeStrArgsEntryBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -56,8 +60,32 @@ def KernelTypeStrArgsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def OpIdKernelTypeStrArgsEntryStart(builder): builder.StartObject(2) -def OpIdKernelTypeStrArgsEntryAddOpId(builder, opId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(opId), 0) -def OpIdKernelTypeStrArgsEntryAddKernelTypeStrArgs(builder, kernelTypeStrArgs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStrArgs), 0) -def OpIdKernelTypeStrArgsEntryStartKernelTypeStrArgsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def OpIdKernelTypeStrArgsEntryEnd(builder): return builder.EndObject() +def OpIdKernelTypeStrArgsEntryStart(builder): + builder.StartObject(2) + +def Start(builder): + OpIdKernelTypeStrArgsEntryStart(builder) + +def OpIdKernelTypeStrArgsEntryAddOpId(builder, opId): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(opId), 0) + +def AddOpId(builder, opId): + OpIdKernelTypeStrArgsEntryAddOpId(builder, opId) + +def OpIdKernelTypeStrArgsEntryAddKernelTypeStrArgs(builder, kernelTypeStrArgs): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(kernelTypeStrArgs), 0) + +def AddKernelTypeStrArgs(builder, kernelTypeStrArgs): + OpIdKernelTypeStrArgsEntryAddKernelTypeStrArgs(builder, kernelTypeStrArgs) + +def OpIdKernelTypeStrArgsEntryStartKernelTypeStrArgsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartKernelTypeStrArgsVector(builder, numElems: int) -> int: + return OpIdKernelTypeStrArgsEntryStartKernelTypeStrArgsVector(builder, numElems) + +def OpIdKernelTypeStrArgsEntryEnd(builder): + return builder.EndObject() + +def End(builder): + return OpIdKernelTypeStrArgsEntryEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OperatorSetId.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OperatorSetId.py index 5168df8f5df61..21e153b130cec 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OperatorSetId.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OperatorSetId.py @@ -10,13 +10,17 @@ class OperatorSetId(object): __slots__ = ['_tab'] @classmethod - def GetRootAsOperatorSetId(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = OperatorSetId() x.Init(buf, n + offset) return x @classmethod + def GetRootAsOperatorSetId(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def OperatorSetIdBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Version(self): return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 -def OperatorSetIdStart(builder): builder.StartObject(2) -def OperatorSetIdAddDomain(builder, domain): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) -def OperatorSetIdAddVersion(builder, version): builder.PrependInt64Slot(1, version, 0) -def OperatorSetIdEnd(builder): return builder.EndObject() +def OperatorSetIdStart(builder): + builder.StartObject(2) + +def Start(builder): + OperatorSetIdStart(builder) + +def OperatorSetIdAddDomain(builder, domain): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(domain), 0) + +def AddDomain(builder, domain): + OperatorSetIdAddDomain(builder, domain) + +def OperatorSetIdAddVersion(builder, version): + builder.PrependInt64Slot(1, version, 0) + +def AddVersion(builder, version): + OperatorSetIdAddVersion(builder, version) + +def OperatorSetIdEnd(builder): + return builder.EndObject() + +def End(builder): + return OperatorSetIdEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OptimizerGroup.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OptimizerGroup.py index d56069660aee3..70c8a6d0091ff 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OptimizerGroup.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/OptimizerGroup.py @@ -10,13 +10,17 @@ class OptimizerGroup(object): __slots__ = ['_tab'] @classmethod - def GetRootAsOptimizerGroup(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = OptimizerGroup() x.Init(buf, n + offset) return x @classmethod + def GetRootAsOptimizerGroup(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def OptimizerGroupBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -70,10 +74,44 @@ def OptimizerStatesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) return o == 0 -def OptimizerGroupStart(builder): builder.StartObject(4) -def OptimizerGroupAddGroupName(builder, groupName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(groupName), 0) -def OptimizerGroupAddStep(builder, step): builder.PrependInt64Slot(1, step, 0) -def OptimizerGroupAddInitialLearningRate(builder, initialLearningRate): builder.PrependFloat32Slot(2, initialLearningRate, 0.0) -def OptimizerGroupAddOptimizerStates(builder, optimizerStates): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerStates), 0) -def OptimizerGroupStartOptimizerStatesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def OptimizerGroupEnd(builder): return builder.EndObject() +def OptimizerGroupStart(builder): + builder.StartObject(4) + +def Start(builder): + OptimizerGroupStart(builder) + +def OptimizerGroupAddGroupName(builder, groupName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(groupName), 0) + +def AddGroupName(builder, groupName): + OptimizerGroupAddGroupName(builder, groupName) + +def OptimizerGroupAddStep(builder, step): + builder.PrependInt64Slot(1, step, 0) + +def AddStep(builder, step): + OptimizerGroupAddStep(builder, step) + +def OptimizerGroupAddInitialLearningRate(builder, initialLearningRate): + builder.PrependFloat32Slot(2, initialLearningRate, 0.0) + +def AddInitialLearningRate(builder, initialLearningRate): + OptimizerGroupAddInitialLearningRate(builder, initialLearningRate) + +def OptimizerGroupAddOptimizerStates(builder, optimizerStates): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerStates), 0) + +def AddOptimizerStates(builder, optimizerStates): + OptimizerGroupAddOptimizerStates(builder, optimizerStates) + +def OptimizerGroupStartOptimizerStatesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartOptimizerStatesVector(builder, numElems: int) -> int: + return OptimizerGroupStartOptimizerStatesVector(builder, numElems) + +def OptimizerGroupEnd(builder): + return builder.EndObject() + +def End(builder): + return OptimizerGroupEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ParameterOptimizerState.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ParameterOptimizerState.py index 8e7cf8963c6a8..f8a31ff92bb55 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ParameterOptimizerState.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ParameterOptimizerState.py @@ -10,13 +10,17 @@ class ParameterOptimizerState(object): __slots__ = ['_tab'] @classmethod - def GetRootAsParameterOptimizerState(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ParameterOptimizerState() x.Init(buf, n + offset) return x @classmethod + def GetRootAsParameterOptimizerState(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ParameterOptimizerStateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -56,8 +60,32 @@ def MomentumsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def ParameterOptimizerStateStart(builder): builder.StartObject(2) -def ParameterOptimizerStateAddParamName(builder, paramName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(paramName), 0) -def ParameterOptimizerStateAddMomentums(builder, momentums): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(momentums), 0) -def ParameterOptimizerStateStartMomentumsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ParameterOptimizerStateEnd(builder): return builder.EndObject() +def ParameterOptimizerStateStart(builder): + builder.StartObject(2) + +def Start(builder): + ParameterOptimizerStateStart(builder) + +def ParameterOptimizerStateAddParamName(builder, paramName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(paramName), 0) + +def AddParamName(builder, paramName): + ParameterOptimizerStateAddParamName(builder, paramName) + +def ParameterOptimizerStateAddMomentums(builder, momentums): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(momentums), 0) + +def AddMomentums(builder, momentums): + ParameterOptimizerStateAddMomentums(builder, momentums) + +def ParameterOptimizerStateStartMomentumsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartMomentumsVector(builder, numElems: int) -> int: + return ParameterOptimizerStateStartMomentumsVector(builder, numElems) + +def ParameterOptimizerStateEnd(builder): + return builder.EndObject() + +def End(builder): + return ParameterOptimizerStateEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/PropertyBag.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/PropertyBag.py index 17849f72d326b..adfb0785ad1c4 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/PropertyBag.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/PropertyBag.py @@ -10,13 +10,17 @@ class PropertyBag(object): __slots__ = ['_tab'] @classmethod - def GetRootAsPropertyBag(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = PropertyBag() x.Init(buf, n + offset) return x @classmethod + def GetRootAsPropertyBag(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def PropertyBagBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -99,11 +103,50 @@ def StringsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 -def PropertyBagStart(builder): builder.StartObject(3) -def PropertyBagAddInts(builder, ints): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(ints), 0) -def PropertyBagStartIntsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def PropertyBagAddFloats(builder, floats): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(floats), 0) -def PropertyBagStartFloatsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def PropertyBagAddStrings(builder, strings): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(strings), 0) -def PropertyBagStartStringsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def PropertyBagEnd(builder): return builder.EndObject() +def PropertyBagStart(builder): + builder.StartObject(3) + +def Start(builder): + PropertyBagStart(builder) + +def PropertyBagAddInts(builder, ints): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(ints), 0) + +def AddInts(builder, ints): + PropertyBagAddInts(builder, ints) + +def PropertyBagStartIntsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartIntsVector(builder, numElems: int) -> int: + return PropertyBagStartIntsVector(builder, numElems) + +def PropertyBagAddFloats(builder, floats): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(floats), 0) + +def AddFloats(builder, floats): + PropertyBagAddFloats(builder, floats) + +def PropertyBagStartFloatsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartFloatsVector(builder, numElems: int) -> int: + return PropertyBagStartFloatsVector(builder, numElems) + +def PropertyBagAddStrings(builder, strings): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(strings), 0) + +def AddStrings(builder, strings): + PropertyBagAddStrings(builder, strings) + +def PropertyBagStartStringsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartStringsVector(builder, numElems: int) -> int: + return PropertyBagStartStringsVector(builder, numElems) + +def PropertyBagEnd(builder): + return builder.EndObject() + +def End(builder): + return PropertyBagEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecord.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecord.py index 7880cc565f69d..ecfaaf85e6e09 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecord.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecord.py @@ -12,13 +12,17 @@ class RuntimeOptimizationRecord(object): __slots__ = ['_tab'] @classmethod - def GetRootAsRuntimeOptimizationRecord(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = RuntimeOptimizationRecord() x.Init(buf, n + offset) return x @classmethod + def GetRootAsRuntimeOptimizationRecord(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def RuntimeOptimizationRecordBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -64,9 +68,38 @@ def ProducedOpIdsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) return o == 0 -def RuntimeOptimizationRecordStart(builder): builder.StartObject(4) -def RuntimeOptimizationRecordAddActionId(builder, actionId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(actionId), 0) -def RuntimeOptimizationRecordAddNodesToOptimizeIndices(builder, nodesToOptimizeIndices): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodesToOptimizeIndices), 0) -def RuntimeOptimizationRecordAddProducedOpIds(builder, producedOpIds): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(producedOpIds), 0) -def RuntimeOptimizationRecordStartProducedOpIdsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def RuntimeOptimizationRecordEnd(builder): return builder.EndObject() +def RuntimeOptimizationRecordStart(builder): + builder.StartObject(4) + +def Start(builder): + RuntimeOptimizationRecordStart(builder) + +def RuntimeOptimizationRecordAddActionId(builder, actionId): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(actionId), 0) + +def AddActionId(builder, actionId): + RuntimeOptimizationRecordAddActionId(builder, actionId) + +def RuntimeOptimizationRecordAddNodesToOptimizeIndices(builder, nodesToOptimizeIndices): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodesToOptimizeIndices), 0) + +def AddNodesToOptimizeIndices(builder, nodesToOptimizeIndices): + RuntimeOptimizationRecordAddNodesToOptimizeIndices(builder, nodesToOptimizeIndices) + +def RuntimeOptimizationRecordAddProducedOpIds(builder, producedOpIds): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(producedOpIds), 0) + +def AddProducedOpIds(builder, producedOpIds): + RuntimeOptimizationRecordAddProducedOpIds(builder, producedOpIds) + +def RuntimeOptimizationRecordStartProducedOpIdsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartProducedOpIdsVector(builder, numElems: int) -> int: + return RuntimeOptimizationRecordStartProducedOpIdsVector(builder, numElems) + +def RuntimeOptimizationRecordEnd(builder): + return builder.EndObject() + +def End(builder): + return RuntimeOptimizationRecordEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecordContainerEntry.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecordContainerEntry.py index f764b76ff11fd..01851121f46b6 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecordContainerEntry.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizationRecordContainerEntry.py @@ -10,13 +10,17 @@ class RuntimeOptimizationRecordContainerEntry(object): __slots__ = ['_tab'] @classmethod - def GetRootAsRuntimeOptimizationRecordContainerEntry(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = RuntimeOptimizationRecordContainerEntry() x.Init(buf, n + offset) return x @classmethod + def GetRootAsRuntimeOptimizationRecordContainerEntry(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def RuntimeOptimizationRecordContainerEntryBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -56,8 +60,32 @@ def RuntimeOptimizationRecordsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 -def RuntimeOptimizationRecordContainerEntryStart(builder): builder.StartObject(2) -def RuntimeOptimizationRecordContainerEntryAddOptimizerName(builder, optimizerName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerName), 0) -def RuntimeOptimizationRecordContainerEntryAddRuntimeOptimizationRecords(builder, runtimeOptimizationRecords): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(runtimeOptimizationRecords), 0) -def RuntimeOptimizationRecordContainerEntryStartRuntimeOptimizationRecordsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def RuntimeOptimizationRecordContainerEntryEnd(builder): return builder.EndObject() +def RuntimeOptimizationRecordContainerEntryStart(builder): + builder.StartObject(2) + +def Start(builder): + RuntimeOptimizationRecordContainerEntryStart(builder) + +def RuntimeOptimizationRecordContainerEntryAddOptimizerName(builder, optimizerName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(optimizerName), 0) + +def AddOptimizerName(builder, optimizerName): + RuntimeOptimizationRecordContainerEntryAddOptimizerName(builder, optimizerName) + +def RuntimeOptimizationRecordContainerEntryAddRuntimeOptimizationRecords(builder, runtimeOptimizationRecords): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(runtimeOptimizationRecords), 0) + +def AddRuntimeOptimizationRecords(builder, runtimeOptimizationRecords): + RuntimeOptimizationRecordContainerEntryAddRuntimeOptimizationRecords(builder, runtimeOptimizationRecords) + +def RuntimeOptimizationRecordContainerEntryStartRuntimeOptimizationRecordsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartRuntimeOptimizationRecordsVector(builder, numElems: int) -> int: + return RuntimeOptimizationRecordContainerEntryStartRuntimeOptimizationRecordsVector(builder, numElems) + +def RuntimeOptimizationRecordContainerEntryEnd(builder): + return builder.EndObject() + +def End(builder): + return RuntimeOptimizationRecordContainerEntryEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizations.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizations.py index 7ead46d950110..6f1591691ad90 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizations.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/RuntimeOptimizations.py @@ -10,13 +10,17 @@ class RuntimeOptimizations(object): __slots__ = ['_tab'] @classmethod - def GetRootAsRuntimeOptimizations(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = RuntimeOptimizations() x.Init(buf, n + offset) return x @classmethod + def GetRootAsRuntimeOptimizations(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def RuntimeOptimizationsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -50,7 +54,26 @@ def RecordsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def RuntimeOptimizationsStart(builder): builder.StartObject(1) -def RuntimeOptimizationsAddRecords(builder, records): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(records), 0) -def RuntimeOptimizationsStartRecordsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def RuntimeOptimizationsEnd(builder): return builder.EndObject() +def RuntimeOptimizationsStart(builder): + builder.StartObject(1) + +def Start(builder): + RuntimeOptimizationsStart(builder) + +def RuntimeOptimizationsAddRecords(builder, records): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(records), 0) + +def AddRecords(builder, records): + RuntimeOptimizationsAddRecords(builder, records) + +def RuntimeOptimizationsStartRecordsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartRecordsVector(builder, numElems: int) -> int: + return RuntimeOptimizationsStartRecordsVector(builder, numElems) + +def RuntimeOptimizationsEnd(builder): + return builder.EndObject() + +def End(builder): + return RuntimeOptimizationsEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SequenceType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SequenceType.py index 1c9f6fdc88554..007fc31ad8786 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SequenceType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SequenceType.py @@ -10,13 +10,17 @@ class SequenceType(object): __slots__ = ['_tab'] @classmethod - def GetRootAsSequenceType(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = SequenceType() x.Init(buf, n + offset) return x @classmethod + def GetRootAsSequenceType(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def SequenceTypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -35,6 +39,20 @@ def ElemType(self): return obj return None -def SequenceTypeStart(builder): builder.StartObject(1) -def SequenceTypeAddElemType(builder, elemType): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(elemType), 0) -def SequenceTypeEnd(builder): return builder.EndObject() +def SequenceTypeStart(builder): + builder.StartObject(1) + +def Start(builder): + SequenceTypeStart(builder) + +def SequenceTypeAddElemType(builder, elemType): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(elemType), 0) + +def AddElemType(builder, elemType): + SequenceTypeAddElemType(builder, elemType) + +def SequenceTypeEnd(builder): + return builder.EndObject() + +def End(builder): + return SequenceTypeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Shape.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Shape.py index 9cbb2113e84b3..0603ef3cd05da 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Shape.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Shape.py @@ -10,13 +10,17 @@ class Shape(object): __slots__ = ['_tab'] @classmethod - def GetRootAsShape(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Shape() x.Init(buf, n + offset) return x @classmethod + def GetRootAsShape(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ShapeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -49,7 +53,26 @@ def DimIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ShapeStart(builder): builder.StartObject(1) -def ShapeAddDim(builder, dim): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(dim), 0) -def ShapeStartDimVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ShapeEnd(builder): return builder.EndObject() +def ShapeStart(builder): + builder.StartObject(1) + +def Start(builder): + ShapeStart(builder) + +def ShapeAddDim(builder, dim): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(dim), 0) + +def AddDim(builder, dim): + ShapeAddDim(builder, dim) + +def ShapeStartDimVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartDimVector(builder, numElems: int) -> int: + return ShapeStartDimVector(builder, numElems) + +def ShapeEnd(builder): + return builder.EndObject() + +def End(builder): + return ShapeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SparseTensor.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SparseTensor.py index 4191c1d493ad9..48da36c9d9879 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SparseTensor.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/SparseTensor.py @@ -10,13 +10,17 @@ class SparseTensor(object): __slots__ = ['_tab'] @classmethod - def GetRootAsSparseTensor(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = SparseTensor() x.Init(buf, n + offset) return x @classmethod + def GetRootAsSparseTensor(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def SparseTensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -73,9 +77,38 @@ def DimsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 -def SparseTensorStart(builder): builder.StartObject(3) -def SparseTensorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) -def SparseTensorAddIndices(builder, indices): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(indices), 0) -def SparseTensorAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) -def SparseTensorStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def SparseTensorEnd(builder): return builder.EndObject() +def SparseTensorStart(builder): + builder.StartObject(3) + +def Start(builder): + SparseTensorStart(builder) + +def SparseTensorAddValues(builder, values): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) + +def AddValues(builder, values): + SparseTensorAddValues(builder, values) + +def SparseTensorAddIndices(builder, indices): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(indices), 0) + +def AddIndices(builder, indices): + SparseTensorAddIndices(builder, indices) + +def SparseTensorAddDims(builder, dims): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) + +def AddDims(builder, dims): + SparseTensorAddDims(builder, dims) + +def SparseTensorStartDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StartDimsVector(builder, numElems: int) -> int: + return SparseTensorStartDimsVector(builder, numElems) + +def SparseTensorEnd(builder): + return builder.EndObject() + +def End(builder): + return SparseTensorEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringProperty.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringProperty.py index 97c46ec1a4777..ded97ebfb0de4 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringProperty.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringProperty.py @@ -10,13 +10,17 @@ class StringProperty(object): __slots__ = ['_tab'] @classmethod - def GetRootAsStringProperty(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = StringProperty() x.Init(buf, n + offset) return x @classmethod + def GetRootAsStringProperty(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def StringPropertyBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x44\x54\x43", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Value(self): return self._tab.String(o + self._tab.Pos) return None -def StringPropertyStart(builder): builder.StartObject(2) -def StringPropertyAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def StringPropertyAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def StringPropertyEnd(builder): return builder.EndObject() +def StringPropertyStart(builder): + builder.StartObject(2) + +def Start(builder): + StringPropertyStart(builder) + +def StringPropertyAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + StringPropertyAddName(builder, name) + +def StringPropertyAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + +def AddValue(builder, value): + StringPropertyAddValue(builder, value) + +def StringPropertyEnd(builder): + return builder.EndObject() + +def End(builder): + return StringPropertyEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringStringEntry.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringStringEntry.py index 4363d07588718..1a492ca8e5e8a 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringStringEntry.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/StringStringEntry.py @@ -10,13 +10,17 @@ class StringStringEntry(object): __slots__ = ['_tab'] @classmethod - def GetRootAsStringStringEntry(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = StringStringEntry() x.Init(buf, n + offset) return x @classmethod + def GetRootAsStringStringEntry(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def StringStringEntryBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -38,7 +42,26 @@ def Value(self): return self._tab.String(o + self._tab.Pos) return None -def StringStringEntryStart(builder): builder.StartObject(2) -def StringStringEntryAddKey(builder, key): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0) -def StringStringEntryAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def StringStringEntryEnd(builder): return builder.EndObject() +def StringStringEntryStart(builder): + builder.StartObject(2) + +def Start(builder): + StringStringEntryStart(builder) + +def StringStringEntryAddKey(builder, key): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0) + +def AddKey(builder, key): + StringStringEntryAddKey(builder, key) + +def StringStringEntryAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + +def AddValue(builder, value): + StringStringEntryAddValue(builder, value) + +def StringStringEntryEnd(builder): + return builder.EndObject() + +def End(builder): + return StringStringEntryEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Tensor.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Tensor.py index 5705ac9379684..6b745c2a67fa7 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Tensor.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/Tensor.py @@ -10,13 +10,17 @@ class Tensor(object): __slots__ = ['_tab'] @classmethod - def GetRootAsTensor(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Tensor() x.Init(buf, n + offset) return x @classmethod + def GetRootAsTensor(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def TensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -119,14 +123,81 @@ def StringDataIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 -def TensorStart(builder): builder.StartObject(6) -def TensorAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def TensorAddDocString(builder, docString): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) -def TensorAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) -def TensorStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def TensorAddDataType(builder, dataType): builder.PrependInt32Slot(3, dataType, 0) -def TensorAddRawData(builder, rawData): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(rawData), 0) -def TensorStartRawDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def TensorAddStringData(builder, stringData): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(stringData), 0) -def TensorStartStringDataVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def TensorEnd(builder): return builder.EndObject() + # Tensor + def ExternalDataOffset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return -1 + +def TensorStart(builder): + builder.StartObject(7) + +def Start(builder): + TensorStart(builder) + +def TensorAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + TensorAddName(builder, name) + +def TensorAddDocString(builder, docString): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) + +def AddDocString(builder, docString): + TensorAddDocString(builder, docString) + +def TensorAddDims(builder, dims): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) + +def AddDims(builder, dims): + TensorAddDims(builder, dims) + +def TensorStartDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StartDimsVector(builder, numElems: int) -> int: + return TensorStartDimsVector(builder, numElems) + +def TensorAddDataType(builder, dataType): + builder.PrependInt32Slot(3, dataType, 0) + +def AddDataType(builder, dataType): + TensorAddDataType(builder, dataType) + +def TensorAddRawData(builder, rawData): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(rawData), 0) + +def AddRawData(builder, rawData): + TensorAddRawData(builder, rawData) + +def TensorStartRawDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def StartRawDataVector(builder, numElems: int) -> int: + return TensorStartRawDataVector(builder, numElems) + +def TensorAddStringData(builder, stringData): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(stringData), 0) + +def AddStringData(builder, stringData): + TensorAddStringData(builder, stringData) + +def TensorStartStringDataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StartStringDataVector(builder, numElems: int) -> int: + return TensorStartStringDataVector(builder, numElems) + +def TensorAddExternalDataOffset(builder, externalDataOffset): + builder.PrependInt64Slot(6, externalDataOffset, -1) + +def AddExternalDataOffset(builder, externalDataOffset): + TensorAddExternalDataOffset(builder, externalDataOffset) + +def TensorEnd(builder): + return builder.EndObject() + +def End(builder): + return TensorEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorDataType.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorDataType.py index aa97e56e7869f..f588bd8999fed 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorDataType.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorDataType.py @@ -24,4 +24,3 @@ class TensorDataType(object): FLOAT8E4M3FNUZ = 18 FLOAT8E5M2 = 19 FLOAT8E5M2FNUZ = 20 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorTypeAndShape.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorTypeAndShape.py index 7371d65b850cd..21c6e79fb9898 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorTypeAndShape.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TensorTypeAndShape.py @@ -10,13 +10,17 @@ class TensorTypeAndShape(object): __slots__ = ['_tab'] @classmethod - def GetRootAsTensorTypeAndShape(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = TensorTypeAndShape() x.Init(buf, n + offset) return x @classmethod + def GetRootAsTensorTypeAndShape(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def TensorTypeAndShapeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -42,7 +46,26 @@ def Shape(self): return obj return None -def TensorTypeAndShapeStart(builder): builder.StartObject(2) -def TensorTypeAndShapeAddElemType(builder, elemType): builder.PrependInt32Slot(0, elemType, 0) -def TensorTypeAndShapeAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) -def TensorTypeAndShapeEnd(builder): return builder.EndObject() +def TensorTypeAndShapeStart(builder): + builder.StartObject(2) + +def Start(builder): + TensorTypeAndShapeStart(builder) + +def TensorTypeAndShapeAddElemType(builder, elemType): + builder.PrependInt32Slot(0, elemType, 0) + +def AddElemType(builder, elemType): + TensorTypeAndShapeAddElemType(builder, elemType) + +def TensorTypeAndShapeAddShape(builder, shape): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) + +def AddShape(builder, shape): + TensorTypeAndShapeAddShape(builder, shape) + +def TensorTypeAndShapeEnd(builder): + return builder.EndObject() + +def End(builder): + return TensorTypeAndShapeEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfo.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfo.py index f28be247a5e54..b83c037fc48f7 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfo.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfo.py @@ -10,13 +10,17 @@ class TypeInfo(object): __slots__ = ['_tab'] @classmethod - def GetRootAsTypeInfo(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = TypeInfo() x.Init(buf, n + offset) return x @classmethod + def GetRootAsTypeInfo(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def TypeInfoBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -48,8 +52,32 @@ def Value(self): return obj return None -def TypeInfoStart(builder): builder.StartObject(3) -def TypeInfoAddDenotation(builder, denotation): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(denotation), 0) -def TypeInfoAddValueType(builder, valueType): builder.PrependUint8Slot(1, valueType, 0) -def TypeInfoAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def TypeInfoEnd(builder): return builder.EndObject() +def TypeInfoStart(builder): + builder.StartObject(3) + +def Start(builder): + TypeInfoStart(builder) + +def TypeInfoAddDenotation(builder, denotation): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(denotation), 0) + +def AddDenotation(builder, denotation): + TypeInfoAddDenotation(builder, denotation) + +def TypeInfoAddValueType(builder, valueType): + builder.PrependUint8Slot(1, valueType, 0) + +def AddValueType(builder, valueType): + TypeInfoAddValueType(builder, valueType) + +def TypeInfoAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + +def AddValue(builder, value): + TypeInfoAddValue(builder, value) + +def TypeInfoEnd(builder): + return builder.EndObject() + +def End(builder): + return TypeInfoEnd(builder) diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfoValue.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfoValue.py index 0bc18dce2e8fb..3698ae462ab76 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfoValue.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/TypeInfoValue.py @@ -7,4 +7,3 @@ class TypeInfoValue(object): tensor_type = 1 sequence_type = 2 map_type = 3 - diff --git a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ValueInfo.py b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ValueInfo.py index 41c81844ddd38..529f21fc4685d 100644 --- a/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ValueInfo.py +++ b/onnxruntime/core/flatbuffers/ort_flatbuffers_py/fbs/ValueInfo.py @@ -10,13 +10,17 @@ class ValueInfo(object): __slots__ = ['_tab'] @classmethod - def GetRootAsValueInfo(cls, buf, offset): + def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ValueInfo() x.Init(buf, n + offset) return x @classmethod + def GetRootAsValueInfo(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def ValueInfoBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed) @@ -49,8 +53,32 @@ def Type(self): return obj return None -def ValueInfoStart(builder): builder.StartObject(3) -def ValueInfoAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def ValueInfoAddDocString(builder, docString): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) -def ValueInfoAddType(builder, type): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(type), 0) -def ValueInfoEnd(builder): return builder.EndObject() +def ValueInfoStart(builder): + builder.StartObject(3) + +def Start(builder): + ValueInfoStart(builder) + +def ValueInfoAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def AddName(builder, name): + ValueInfoAddName(builder, name) + +def ValueInfoAddDocString(builder, docString): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(docString), 0) + +def AddDocString(builder, docString): + ValueInfoAddDocString(builder, docString) + +def ValueInfoAddType(builder, type): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(type), 0) + +def AddType(builder, type): + ValueInfoAddType(builder, type) + +def ValueInfoEnd(builder): + return builder.EndObject() + +def End(builder): + return ValueInfoEnd(builder) diff --git a/onnxruntime/core/flatbuffers/schema/compile_schema.py b/onnxruntime/core/flatbuffers/schema/compile_schema.py index e9b090c237815..bf367221ae945 100644 --- a/onnxruntime/core/flatbuffers/schema/compile_schema.py +++ b/onnxruntime/core/flatbuffers/schema/compile_schema.py @@ -92,6 +92,10 @@ def main(): schema_path = SCRIPT_DIR / "ort.fbs" training_schema_path = SCRIPT_DIR / "ort_training_checkpoint.fbs" + test_dir = SCRIPT_DIR.parents[2] / "test" / "flatbuffers" + test_schema = "flatbuffers_utils_test.fbs" + test_schema_path = test_dir / test_schema + if "python" in languages: with tempfile.TemporaryDirectory() as temp_dir_name: temp_dir = pathlib.Path(temp_dir_name).resolve() @@ -134,6 +138,12 @@ def main(): generate_cpp(flatc, schema_path) generate_cpp(flatc, training_schema_path) + # generate schema used in unit tests and move to the test dir. file is generated by generate_cpp in SCRIPT_DIR + generate_cpp(flatc, test_schema_path) + src_test_schema_h = str(SCRIPT_DIR / test_schema) + ".h" + dest_test_schema_h = str(test_dir / test_schema) + ".h" + shutil.move(src_test_schema_h, dest_test_schema_h) + if __name__ == "__main__": main() diff --git a/onnxruntime/core/flatbuffers/schema/ort.fbs b/onnxruntime/core/flatbuffers/schema/ort.fbs index 937d59f605627..4d883e87b00f9 100644 --- a/onnxruntime/core/flatbuffers/schema/ort.fbs +++ b/onnxruntime/core/flatbuffers/schema/ort.fbs @@ -158,6 +158,11 @@ table Tensor { // string_data is least used string_data:[string]; + + // offset into external data file to allow data >2GB to be handled. not used for string data. + // an external file writer/reader needs to be provided when serializing. + // int64 (vs uint64) so we can explicitly set to -1 when not used. + external_data_offset:int64 = -1; } table SparseTensor { diff --git a/onnxruntime/core/flatbuffers/schema/ort.fbs.h b/onnxruntime/core/flatbuffers/schema/ort.fbs.h index dc8a471f2d81f..50fc1db8621a4 100644 --- a/onnxruntime/core/flatbuffers/schema/ort.fbs.h +++ b/onnxruntime/core/flatbuffers/schema/ort.fbs.h @@ -6,6 +6,13 @@ #include "core/common/flatbuffers.h" +// Ensure the included flatbuffers.h is the same version as when this file was +// generated, otherwise it may not be compatible. +static_assert(FLATBUFFERS_VERSION_MAJOR == 23 && + FLATBUFFERS_VERSION_MINOR == 5 && + FLATBUFFERS_VERSION_REVISION == 26, + "Non-compatible flatbuffers version included"); + namespace onnxruntime { namespace fbs { @@ -159,7 +166,7 @@ inline const char * const *EnumNamesAttributeType() { } inline const char *EnumNameAttributeType(AttributeType e) { - if (flatbuffers::IsOutRange(e, AttributeType::UNDEFINED, AttributeType::SPARSE_TENSORS)) return ""; + if (::flatbuffers::IsOutRange(e, AttributeType::UNDEFINED, AttributeType::SPARSE_TENSORS)) return ""; const size_t index = static_cast(e); return EnumNamesAttributeType()[index]; } @@ -192,7 +199,7 @@ inline const char * const *EnumNamesDimensionValueType() { } inline const char *EnumNameDimensionValueType(DimensionValueType e) { - if (flatbuffers::IsOutRange(e, DimensionValueType::UNKNOWN, DimensionValueType::PARAM)) return ""; + if (::flatbuffers::IsOutRange(e, DimensionValueType::UNKNOWN, DimensionValueType::PARAM)) return ""; const size_t index = static_cast(e); return EnumNamesDimensionValueType()[index]; } @@ -279,7 +286,7 @@ inline const char * const *EnumNamesTensorDataType() { } inline const char *EnumNameTensorDataType(TensorDataType e) { - if (flatbuffers::IsOutRange(e, TensorDataType::UNDEFINED, TensorDataType::FLOAT8E5M2FNUZ)) return ""; + if (::flatbuffers::IsOutRange(e, TensorDataType::UNDEFINED, TensorDataType::FLOAT8E5M2FNUZ)) return ""; const size_t index = static_cast(e); return EnumNamesTensorDataType()[index]; } @@ -309,7 +316,7 @@ inline const char * const *EnumNamesNodeType() { } inline const char *EnumNameNodeType(NodeType e) { - if (flatbuffers::IsOutRange(e, NodeType::Primitive, NodeType::Fused)) return ""; + if (::flatbuffers::IsOutRange(e, NodeType::Primitive, NodeType::Fused)) return ""; const size_t index = static_cast(e); return EnumNamesNodeType()[index]; } @@ -345,7 +352,7 @@ inline const char * const *EnumNamesTypeInfoValue() { } inline const char *EnumNameTypeInfoValue(TypeInfoValue e) { - if (flatbuffers::IsOutRange(e, TypeInfoValue::NONE, TypeInfoValue::map_type)) return ""; + if (::flatbuffers::IsOutRange(e, TypeInfoValue::NONE, TypeInfoValue::map_type)) return ""; const size_t index = static_cast(e); return EnumNamesTypeInfoValue()[index]; } @@ -366,8 +373,8 @@ template<> struct TypeInfoValueTraits { static const TypeInfoValue enum_value = TypeInfoValue::map_type; }; -bool VerifyTypeInfoValue(flatbuffers::Verifier &verifier, const void *obj, TypeInfoValue type); -bool VerifyTypeInfoValueVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); +bool VerifyTypeInfoValue(::flatbuffers::Verifier &verifier, const void *obj, TypeInfoValue type); +bool VerifyTypeInfoValueVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types); enum class ArgType : int8_t { INPUT = 0, @@ -394,7 +401,7 @@ inline const char * const *EnumNamesArgType() { } inline const char *EnumNameArgType(ArgType e) { - if (flatbuffers::IsOutRange(e, ArgType::INPUT, ArgType::OUTPUT)) return ""; + if (::flatbuffers::IsOutRange(e, ArgType::INPUT, ArgType::OUTPUT)) return ""; const size_t index = static_cast(e); return EnumNamesArgType()[index]; } @@ -406,35 +413,37 @@ FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(4) EdgeEnd FLATBUFFERS_FINAL_CLASS { int32_t dst_arg_index_; public: - EdgeEnd() { - memset(static_cast(this), 0, sizeof(EdgeEnd)); + EdgeEnd() + : node_index_(0), + src_arg_index_(0), + dst_arg_index_(0) { } EdgeEnd(uint32_t _node_index, int32_t _src_arg_index, int32_t _dst_arg_index) - : node_index_(flatbuffers::EndianScalar(_node_index)), - src_arg_index_(flatbuffers::EndianScalar(_src_arg_index)), - dst_arg_index_(flatbuffers::EndianScalar(_dst_arg_index)) { + : node_index_(::flatbuffers::EndianScalar(_node_index)), + src_arg_index_(::flatbuffers::EndianScalar(_src_arg_index)), + dst_arg_index_(::flatbuffers::EndianScalar(_dst_arg_index)) { } uint32_t node_index() const { - return flatbuffers::EndianScalar(node_index_); + return ::flatbuffers::EndianScalar(node_index_); } int32_t src_arg_index() const { - return flatbuffers::EndianScalar(src_arg_index_); + return ::flatbuffers::EndianScalar(src_arg_index_); } int32_t dst_arg_index() const { - return flatbuffers::EndianScalar(dst_arg_index_); + return ::flatbuffers::EndianScalar(dst_arg_index_); } }; FLATBUFFERS_STRUCT_END(EdgeEnd, 12); -struct Shape FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Shape FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ShapeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_DIM = 4 }; - const flatbuffers::Vector> *dim() const { - return GetPointer> *>(VT_DIM); + const ::flatbuffers::Vector<::flatbuffers::Offset> *dim() const { + return GetPointer> *>(VT_DIM); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_DIM) && verifier.VerifyVector(dim()) && @@ -445,41 +454,40 @@ struct Shape FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct ShapeBuilder { typedef Shape Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_dim(flatbuffers::Offset>> dim) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_dim(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> dim) { fbb_.AddOffset(Shape::VT_DIM, dim); } - explicit ShapeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ShapeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ShapeBuilder &operator=(const ShapeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateShape( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> dim = 0) { +inline ::flatbuffers::Offset CreateShape( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> dim = 0) { ShapeBuilder builder_(_fbb); builder_.add_dim(dim); return builder_.Finish(); } -inline flatbuffers::Offset CreateShapeDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *dim = nullptr) { - auto dim__ = dim ? _fbb.CreateVector>(*dim) : 0; +inline ::flatbuffers::Offset CreateShapeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *dim = nullptr) { + auto dim__ = dim ? _fbb.CreateVector<::flatbuffers::Offset>(*dim) : 0; return onnxruntime::fbs::CreateShape( _fbb, dim__); } -struct Dimension FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Dimension FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DimensionBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VALUE = 4, @@ -488,10 +496,10 @@ struct Dimension FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::DimensionValue *value() const { return GetPointer(VT_VALUE); } - const flatbuffers::String *denotation() const { - return GetPointer(VT_DENOTATION); + const ::flatbuffers::String *denotation() const { + return GetPointer(VT_DENOTATION); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_VALUE) && verifier.VerifyTable(value()) && @@ -503,39 +511,38 @@ struct Dimension FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct DimensionBuilder { typedef Dimension Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_value(flatbuffers::Offset value) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_value(::flatbuffers::Offset value) { fbb_.AddOffset(Dimension::VT_VALUE, value); } - void add_denotation(flatbuffers::Offset denotation) { + void add_denotation(::flatbuffers::Offset<::flatbuffers::String> denotation) { fbb_.AddOffset(Dimension::VT_DENOTATION, denotation); } - explicit DimensionBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DimensionBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DimensionBuilder &operator=(const DimensionBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDimension( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset value = 0, - flatbuffers::Offset denotation = 0) { +inline ::flatbuffers::Offset CreateDimension( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset value = 0, + ::flatbuffers::Offset<::flatbuffers::String> denotation = 0) { DimensionBuilder builder_(_fbb); builder_.add_denotation(denotation); builder_.add_value(value); return builder_.Finish(); } -inline flatbuffers::Offset CreateDimensionDirect( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset value = 0, +inline ::flatbuffers::Offset CreateDimensionDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset value = 0, const char *denotation = nullptr) { auto denotation__ = denotation ? _fbb.CreateString(denotation) : 0; return onnxruntime::fbs::CreateDimension( @@ -544,7 +551,7 @@ inline flatbuffers::Offset CreateDimensionDirect( denotation__); } -struct DimensionValue FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DimensionValue FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DimensionValueBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_DIM_TYPE = 4, @@ -557,10 +564,10 @@ struct DimensionValue FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { int64_t dim_value() const { return GetField(VT_DIM_VALUE, 0); } - const flatbuffers::String *dim_param() const { - return GetPointer(VT_DIM_PARAM); + const ::flatbuffers::String *dim_param() const { + return GetPointer(VT_DIM_PARAM); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_DIM_TYPE, 1) && VerifyField(verifier, VT_DIM_VALUE, 8) && @@ -572,34 +579,33 @@ struct DimensionValue FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct DimensionValueBuilder { typedef DimensionValue Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_dim_type(onnxruntime::fbs::DimensionValueType dim_type) { fbb_.AddElement(DimensionValue::VT_DIM_TYPE, static_cast(dim_type), 0); } void add_dim_value(int64_t dim_value) { fbb_.AddElement(DimensionValue::VT_DIM_VALUE, dim_value, 0); } - void add_dim_param(flatbuffers::Offset dim_param) { + void add_dim_param(::flatbuffers::Offset<::flatbuffers::String> dim_param) { fbb_.AddOffset(DimensionValue::VT_DIM_PARAM, dim_param); } - explicit DimensionValueBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DimensionValueBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DimensionValueBuilder &operator=(const DimensionValueBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDimensionValue( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDimensionValue( + ::flatbuffers::FlatBufferBuilder &_fbb, onnxruntime::fbs::DimensionValueType dim_type = onnxruntime::fbs::DimensionValueType::UNKNOWN, int64_t dim_value = 0, - flatbuffers::Offset dim_param = 0) { + ::flatbuffers::Offset<::flatbuffers::String> dim_param = 0) { DimensionValueBuilder builder_(_fbb); builder_.add_dim_value(dim_value); builder_.add_dim_param(dim_param); @@ -607,8 +613,8 @@ inline flatbuffers::Offset CreateDimensionValue( return builder_.Finish(); } -inline flatbuffers::Offset CreateDimensionValueDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDimensionValueDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, onnxruntime::fbs::DimensionValueType dim_type = onnxruntime::fbs::DimensionValueType::UNKNOWN, int64_t dim_value = 0, const char *dim_param = nullptr) { @@ -620,7 +626,7 @@ inline flatbuffers::Offset CreateDimensionValueDirect( dim_param__); } -struct TensorTypeAndShape FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct TensorTypeAndShape FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef TensorTypeAndShapeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_ELEM_TYPE = 4, @@ -632,7 +638,7 @@ struct TensorTypeAndShape FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::Shape *shape() const { return GetPointer(VT_SHAPE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_ELEM_TYPE, 4) && VerifyOffset(verifier, VT_SHAPE) && @@ -643,37 +649,36 @@ struct TensorTypeAndShape FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct TensorTypeAndShapeBuilder { typedef TensorTypeAndShape Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_elem_type(onnxruntime::fbs::TensorDataType elem_type) { fbb_.AddElement(TensorTypeAndShape::VT_ELEM_TYPE, static_cast(elem_type), 0); } - void add_shape(flatbuffers::Offset shape) { + void add_shape(::flatbuffers::Offset shape) { fbb_.AddOffset(TensorTypeAndShape::VT_SHAPE, shape); } - explicit TensorTypeAndShapeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit TensorTypeAndShapeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - TensorTypeAndShapeBuilder &operator=(const TensorTypeAndShapeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTensorTypeAndShape( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateTensorTypeAndShape( + ::flatbuffers::FlatBufferBuilder &_fbb, onnxruntime::fbs::TensorDataType elem_type = onnxruntime::fbs::TensorDataType::UNDEFINED, - flatbuffers::Offset shape = 0) { + ::flatbuffers::Offset shape = 0) { TensorTypeAndShapeBuilder builder_(_fbb); builder_.add_shape(shape); builder_.add_elem_type(elem_type); return builder_.Finish(); } -struct MapType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct MapType FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef MapTypeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_KEY_TYPE = 4, @@ -685,7 +690,7 @@ struct MapType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::TypeInfo *value_type() const { return GetPointer(VT_VALUE_TYPE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_KEY_TYPE, 4) && VerifyOffset(verifier, VT_VALUE_TYPE) && @@ -696,37 +701,36 @@ struct MapType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct MapTypeBuilder { typedef MapType Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_key_type(onnxruntime::fbs::TensorDataType key_type) { fbb_.AddElement(MapType::VT_KEY_TYPE, static_cast(key_type), 0); } - void add_value_type(flatbuffers::Offset value_type) { + void add_value_type(::flatbuffers::Offset value_type) { fbb_.AddOffset(MapType::VT_VALUE_TYPE, value_type); } - explicit MapTypeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit MapTypeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - MapTypeBuilder &operator=(const MapTypeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMapType( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateMapType( + ::flatbuffers::FlatBufferBuilder &_fbb, onnxruntime::fbs::TensorDataType key_type = onnxruntime::fbs::TensorDataType::UNDEFINED, - flatbuffers::Offset value_type = 0) { + ::flatbuffers::Offset value_type = 0) { MapTypeBuilder builder_(_fbb); builder_.add_value_type(value_type); builder_.add_key_type(key_type); return builder_.Finish(); } -struct SequenceType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct SequenceType FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef SequenceTypeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_ELEM_TYPE = 4 @@ -734,7 +738,7 @@ struct SequenceType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::TypeInfo *elem_type() const { return GetPointer(VT_ELEM_TYPE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_ELEM_TYPE) && verifier.VerifyTable(elem_type()) && @@ -744,32 +748,31 @@ struct SequenceType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct SequenceTypeBuilder { typedef SequenceType Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_elem_type(flatbuffers::Offset elem_type) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_elem_type(::flatbuffers::Offset elem_type) { fbb_.AddOffset(SequenceType::VT_ELEM_TYPE, elem_type); } - explicit SequenceTypeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SequenceTypeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - SequenceTypeBuilder &operator=(const SequenceTypeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSequenceType( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset elem_type = 0) { +inline ::flatbuffers::Offset CreateSequenceType( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset elem_type = 0) { SequenceTypeBuilder builder_(_fbb); builder_.add_elem_type(elem_type); return builder_.Finish(); } -struct NodeEdge FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct NodeEdge FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef NodeEdgeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NODE_INDEX = 4, @@ -779,13 +782,13 @@ struct NodeEdge FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { uint32_t node_index() const { return GetField(VT_NODE_INDEX, 0); } - const flatbuffers::Vector *input_edges() const { - return GetPointer *>(VT_INPUT_EDGES); + const ::flatbuffers::Vector *input_edges() const { + return GetPointer *>(VT_INPUT_EDGES); } - const flatbuffers::Vector *output_edges() const { - return GetPointer *>(VT_OUTPUT_EDGES); + const ::flatbuffers::Vector *output_edges() const { + return GetPointer *>(VT_OUTPUT_EDGES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_NODE_INDEX, 4) && VerifyOffset(verifier, VT_INPUT_EDGES) && @@ -798,34 +801,33 @@ struct NodeEdge FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct NodeEdgeBuilder { typedef NodeEdge Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_node_index(uint32_t node_index) { fbb_.AddElement(NodeEdge::VT_NODE_INDEX, node_index, 0); } - void add_input_edges(flatbuffers::Offset> input_edges) { + void add_input_edges(::flatbuffers::Offset<::flatbuffers::Vector> input_edges) { fbb_.AddOffset(NodeEdge::VT_INPUT_EDGES, input_edges); } - void add_output_edges(flatbuffers::Offset> output_edges) { + void add_output_edges(::flatbuffers::Offset<::flatbuffers::Vector> output_edges) { fbb_.AddOffset(NodeEdge::VT_OUTPUT_EDGES, output_edges); } - explicit NodeEdgeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit NodeEdgeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - NodeEdgeBuilder &operator=(const NodeEdgeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNodeEdge( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateNodeEdge( + ::flatbuffers::FlatBufferBuilder &_fbb, uint32_t node_index = 0, - flatbuffers::Offset> input_edges = 0, - flatbuffers::Offset> output_edges = 0) { + ::flatbuffers::Offset<::flatbuffers::Vector> input_edges = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> output_edges = 0) { NodeEdgeBuilder builder_(_fbb); builder_.add_output_edges(output_edges); builder_.add_input_edges(input_edges); @@ -833,8 +835,8 @@ inline flatbuffers::Offset CreateNodeEdge( return builder_.Finish(); } -inline flatbuffers::Offset CreateNodeEdgeDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateNodeEdgeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, uint32_t node_index = 0, const std::vector *input_edges = nullptr, const std::vector *output_edges = nullptr) { @@ -847,7 +849,7 @@ inline flatbuffers::Offset CreateNodeEdgeDirect( output_edges__); } -struct Node FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Node FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef NodeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, @@ -864,14 +866,14 @@ struct Node FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_INPUT_ARG_COUNTS = 26, VT_IMPLICIT_INPUTS = 28 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const flatbuffers::String *doc_string() const { - return GetPointer(VT_DOC_STRING); + const ::flatbuffers::String *doc_string() const { + return GetPointer(VT_DOC_STRING); } - const flatbuffers::String *domain() const { - return GetPointer(VT_DOMAIN); + const ::flatbuffers::String *domain() const { + return GetPointer(VT_DOMAIN); } int32_t since_version() const { return GetField(VT_SINCE_VERSION, 0); @@ -879,31 +881,31 @@ struct Node FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { uint32_t index() const { return GetField(VT_INDEX, 0); } - const flatbuffers::String *op_type() const { - return GetPointer(VT_OP_TYPE); + const ::flatbuffers::String *op_type() const { + return GetPointer(VT_OP_TYPE); } onnxruntime::fbs::NodeType type() const { return static_cast(GetField(VT_TYPE, 0)); } - const flatbuffers::String *execution_provider_type() const { - return GetPointer(VT_EXECUTION_PROVIDER_TYPE); + const ::flatbuffers::String *execution_provider_type() const { + return GetPointer(VT_EXECUTION_PROVIDER_TYPE); } - const flatbuffers::Vector> *inputs() const { - return GetPointer> *>(VT_INPUTS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *inputs() const { + return GetPointer> *>(VT_INPUTS); } - const flatbuffers::Vector> *outputs() const { - return GetPointer> *>(VT_OUTPUTS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *outputs() const { + return GetPointer> *>(VT_OUTPUTS); } - const flatbuffers::Vector> *attributes() const { - return GetPointer> *>(VT_ATTRIBUTES); + const ::flatbuffers::Vector<::flatbuffers::Offset> *attributes() const { + return GetPointer> *>(VT_ATTRIBUTES); } - const flatbuffers::Vector *input_arg_counts() const { - return GetPointer *>(VT_INPUT_ARG_COUNTS); + const ::flatbuffers::Vector *input_arg_counts() const { + return GetPointer *>(VT_INPUT_ARG_COUNTS); } - const flatbuffers::Vector> *implicit_inputs() const { - return GetPointer> *>(VT_IMPLICIT_INPUTS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *implicit_inputs() const { + return GetPointer> *>(VT_IMPLICIT_INPUTS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -938,15 +940,15 @@ struct Node FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct NodeBuilder { typedef Node Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(Node::VT_NAME, name); } - void add_doc_string(flatbuffers::Offset doc_string) { + void add_doc_string(::flatbuffers::Offset<::flatbuffers::String> doc_string) { fbb_.AddOffset(Node::VT_DOC_STRING, doc_string); } - void add_domain(flatbuffers::Offset domain) { + void add_domain(::flatbuffers::Offset<::flatbuffers::String> domain) { fbb_.AddOffset(Node::VT_DOMAIN, domain); } void add_since_version(int32_t since_version) { @@ -955,57 +957,56 @@ struct NodeBuilder { void add_index(uint32_t index) { fbb_.AddElement(Node::VT_INDEX, index, 0); } - void add_op_type(flatbuffers::Offset op_type) { + void add_op_type(::flatbuffers::Offset<::flatbuffers::String> op_type) { fbb_.AddOffset(Node::VT_OP_TYPE, op_type); } void add_type(onnxruntime::fbs::NodeType type) { fbb_.AddElement(Node::VT_TYPE, static_cast(type), 0); } - void add_execution_provider_type(flatbuffers::Offset execution_provider_type) { + void add_execution_provider_type(::flatbuffers::Offset<::flatbuffers::String> execution_provider_type) { fbb_.AddOffset(Node::VT_EXECUTION_PROVIDER_TYPE, execution_provider_type); } - void add_inputs(flatbuffers::Offset>> inputs) { + void add_inputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> inputs) { fbb_.AddOffset(Node::VT_INPUTS, inputs); } - void add_outputs(flatbuffers::Offset>> outputs) { + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> outputs) { fbb_.AddOffset(Node::VT_OUTPUTS, outputs); } - void add_attributes(flatbuffers::Offset>> attributes) { + void add_attributes(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> attributes) { fbb_.AddOffset(Node::VT_ATTRIBUTES, attributes); } - void add_input_arg_counts(flatbuffers::Offset> input_arg_counts) { + void add_input_arg_counts(::flatbuffers::Offset<::flatbuffers::Vector> input_arg_counts) { fbb_.AddOffset(Node::VT_INPUT_ARG_COUNTS, input_arg_counts); } - void add_implicit_inputs(flatbuffers::Offset>> implicit_inputs) { + void add_implicit_inputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> implicit_inputs) { fbb_.AddOffset(Node::VT_IMPLICIT_INPUTS, implicit_inputs); } - explicit NodeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit NodeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - NodeBuilder &operator=(const NodeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNode( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - flatbuffers::Offset doc_string = 0, - flatbuffers::Offset domain = 0, +inline ::flatbuffers::Offset CreateNode( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset<::flatbuffers::String> doc_string = 0, + ::flatbuffers::Offset<::flatbuffers::String> domain = 0, int32_t since_version = 0, uint32_t index = 0, - flatbuffers::Offset op_type = 0, + ::flatbuffers::Offset<::flatbuffers::String> op_type = 0, onnxruntime::fbs::NodeType type = onnxruntime::fbs::NodeType::Primitive, - flatbuffers::Offset execution_provider_type = 0, - flatbuffers::Offset>> inputs = 0, - flatbuffers::Offset>> outputs = 0, - flatbuffers::Offset>> attributes = 0, - flatbuffers::Offset> input_arg_counts = 0, - flatbuffers::Offset>> implicit_inputs = 0) { + ::flatbuffers::Offset<::flatbuffers::String> execution_provider_type = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> outputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> attributes = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> input_arg_counts = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> implicit_inputs = 0) { NodeBuilder builder_(_fbb); builder_.add_implicit_inputs(implicit_inputs); builder_.add_input_arg_counts(input_arg_counts); @@ -1023,8 +1024,8 @@ inline flatbuffers::Offset CreateNode( return builder_.Finish(); } -inline flatbuffers::Offset CreateNodeDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateNodeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const char *doc_string = nullptr, const char *domain = nullptr, @@ -1033,21 +1034,21 @@ inline flatbuffers::Offset CreateNodeDirect( const char *op_type = nullptr, onnxruntime::fbs::NodeType type = onnxruntime::fbs::NodeType::Primitive, const char *execution_provider_type = nullptr, - const std::vector> *inputs = nullptr, - const std::vector> *outputs = nullptr, - const std::vector> *attributes = nullptr, + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *inputs = nullptr, + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *outputs = nullptr, + const std::vector<::flatbuffers::Offset> *attributes = nullptr, const std::vector *input_arg_counts = nullptr, - const std::vector> *implicit_inputs = nullptr) { + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *implicit_inputs = nullptr) { auto name__ = name ? _fbb.CreateString(name) : 0; auto doc_string__ = doc_string ? _fbb.CreateString(doc_string) : 0; auto domain__ = domain ? _fbb.CreateString(domain) : 0; auto op_type__ = op_type ? _fbb.CreateString(op_type) : 0; auto execution_provider_type__ = execution_provider_type ? _fbb.CreateString(execution_provider_type) : 0; - auto inputs__ = inputs ? _fbb.CreateVector>(*inputs) : 0; - auto outputs__ = outputs ? _fbb.CreateVector>(*outputs) : 0; - auto attributes__ = attributes ? _fbb.CreateVector>(*attributes) : 0; + auto inputs__ = inputs ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*outputs) : 0; + auto attributes__ = attributes ? _fbb.CreateVector<::flatbuffers::Offset>(*attributes) : 0; auto input_arg_counts__ = input_arg_counts ? _fbb.CreateVector(*input_arg_counts) : 0; - auto implicit_inputs__ = implicit_inputs ? _fbb.CreateVector>(*implicit_inputs) : 0; + auto implicit_inputs__ = implicit_inputs ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*implicit_inputs) : 0; return onnxruntime::fbs::CreateNode( _fbb, name__, @@ -1065,23 +1066,23 @@ inline flatbuffers::Offset CreateNodeDirect( implicit_inputs__); } -struct ValueInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct ValueInfo FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ValueInfoBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, VT_DOC_STRING = 6, VT_TYPE = 8 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const flatbuffers::String *doc_string() const { - return GetPointer(VT_DOC_STRING); + const ::flatbuffers::String *doc_string() const { + return GetPointer(VT_DOC_STRING); } const onnxruntime::fbs::TypeInfo *type() const { return GetPointer(VT_TYPE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -1095,34 +1096,33 @@ struct ValueInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct ValueInfoBuilder { typedef ValueInfo Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(ValueInfo::VT_NAME, name); } - void add_doc_string(flatbuffers::Offset doc_string) { + void add_doc_string(::flatbuffers::Offset<::flatbuffers::String> doc_string) { fbb_.AddOffset(ValueInfo::VT_DOC_STRING, doc_string); } - void add_type(flatbuffers::Offset type) { + void add_type(::flatbuffers::Offset type) { fbb_.AddOffset(ValueInfo::VT_TYPE, type); } - explicit ValueInfoBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ValueInfoBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ValueInfoBuilder &operator=(const ValueInfoBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateValueInfo( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - flatbuffers::Offset doc_string = 0, - flatbuffers::Offset type = 0) { +inline ::flatbuffers::Offset CreateValueInfo( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset<::flatbuffers::String> doc_string = 0, + ::flatbuffers::Offset type = 0) { ValueInfoBuilder builder_(_fbb); builder_.add_type(type); builder_.add_doc_string(doc_string); @@ -1130,11 +1130,11 @@ inline flatbuffers::Offset CreateValueInfo( return builder_.Finish(); } -inline flatbuffers::Offset CreateValueInfoDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateValueInfoDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const char *doc_string = nullptr, - flatbuffers::Offset type = 0) { + ::flatbuffers::Offset type = 0) { auto name__ = name ? _fbb.CreateString(name) : 0; auto doc_string__ = doc_string ? _fbb.CreateString(doc_string) : 0; return onnxruntime::fbs::CreateValueInfo( @@ -1144,15 +1144,15 @@ inline flatbuffers::Offset CreateValueInfoDirect( type); } -struct TypeInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct TypeInfo FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef TypeInfoBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_DENOTATION = 4, VT_VALUE_TYPE = 6, VT_VALUE = 8 }; - const flatbuffers::String *denotation() const { - return GetPointer(VT_DENOTATION); + const ::flatbuffers::String *denotation() const { + return GetPointer(VT_DENOTATION); } onnxruntime::fbs::TypeInfoValue value_type() const { return static_cast(GetField(VT_VALUE_TYPE, 0)); @@ -1170,7 +1170,7 @@ struct TypeInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::MapType *value_as_map_type() const { return value_type() == onnxruntime::fbs::TypeInfoValue::map_type ? static_cast(value()) : nullptr; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_DENOTATION) && verifier.VerifyString(denotation()) && @@ -1195,34 +1195,33 @@ template<> inline const onnxruntime::fbs::MapType *TypeInfo::value_as denotation) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_denotation(::flatbuffers::Offset<::flatbuffers::String> denotation) { fbb_.AddOffset(TypeInfo::VT_DENOTATION, denotation); } void add_value_type(onnxruntime::fbs::TypeInfoValue value_type) { fbb_.AddElement(TypeInfo::VT_VALUE_TYPE, static_cast(value_type), 0); } - void add_value(flatbuffers::Offset value) { + void add_value(::flatbuffers::Offset value) { fbb_.AddOffset(TypeInfo::VT_VALUE, value); } - explicit TypeInfoBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit TypeInfoBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - TypeInfoBuilder &operator=(const TypeInfoBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTypeInfo( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset denotation = 0, +inline ::flatbuffers::Offset CreateTypeInfo( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> denotation = 0, onnxruntime::fbs::TypeInfoValue value_type = onnxruntime::fbs::TypeInfoValue::NONE, - flatbuffers::Offset value = 0) { + ::flatbuffers::Offset value = 0) { TypeInfoBuilder builder_(_fbb); builder_.add_value(value); builder_.add_denotation(denotation); @@ -1230,11 +1229,11 @@ inline flatbuffers::Offset CreateTypeInfo( return builder_.Finish(); } -inline flatbuffers::Offset CreateTypeInfoDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateTypeInfoDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *denotation = nullptr, onnxruntime::fbs::TypeInfoValue value_type = onnxruntime::fbs::TypeInfoValue::NONE, - flatbuffers::Offset value = 0) { + ::flatbuffers::Offset value = 0) { auto denotation__ = denotation ? _fbb.CreateString(denotation) : 0; return onnxruntime::fbs::CreateTypeInfo( _fbb, @@ -1243,19 +1242,19 @@ inline flatbuffers::Offset CreateTypeInfoDirect( value); } -struct OperatorSetId FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct OperatorSetId FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef OperatorSetIdBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_DOMAIN = 4, VT_VERSION = 6 }; - const flatbuffers::String *domain() const { - return GetPointer(VT_DOMAIN); + const ::flatbuffers::String *domain() const { + return GetPointer(VT_DOMAIN); } int64_t version() const { return GetField(VT_VERSION, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_DOMAIN) && verifier.VerifyString(domain()) && @@ -1266,29 +1265,28 @@ struct OperatorSetId FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct OperatorSetIdBuilder { typedef OperatorSetId Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_domain(flatbuffers::Offset domain) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_domain(::flatbuffers::Offset<::flatbuffers::String> domain) { fbb_.AddOffset(OperatorSetId::VT_DOMAIN, domain); } void add_version(int64_t version) { fbb_.AddElement(OperatorSetId::VT_VERSION, version, 0); } - explicit OperatorSetIdBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit OperatorSetIdBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - OperatorSetIdBuilder &operator=(const OperatorSetIdBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateOperatorSetId( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset domain = 0, +inline ::flatbuffers::Offset CreateOperatorSetId( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> domain = 0, int64_t version = 0) { OperatorSetIdBuilder builder_(_fbb); builder_.add_version(version); @@ -1296,8 +1294,8 @@ inline flatbuffers::Offset CreateOperatorSetId( return builder_.Finish(); } -inline flatbuffers::Offset CreateOperatorSetIdDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateOperatorSetIdDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *domain = nullptr, int64_t version = 0) { auto domain__ = domain ? _fbb.CreateString(domain) : 0; @@ -1307,7 +1305,7 @@ inline flatbuffers::Offset CreateOperatorSetIdDirect( version); } -struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Tensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef TensorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, @@ -1315,27 +1313,31 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_DIMS = 8, VT_DATA_TYPE = 10, VT_RAW_DATA = 12, - VT_STRING_DATA = 14 + VT_STRING_DATA = 14, + VT_EXTERNAL_DATA_OFFSET = 16 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const flatbuffers::String *doc_string() const { - return GetPointer(VT_DOC_STRING); + const ::flatbuffers::String *doc_string() const { + return GetPointer(VT_DOC_STRING); } - const flatbuffers::Vector *dims() const { - return GetPointer *>(VT_DIMS); + const ::flatbuffers::Vector *dims() const { + return GetPointer *>(VT_DIMS); } onnxruntime::fbs::TensorDataType data_type() const { return static_cast(GetField(VT_DATA_TYPE, 0)); } - const flatbuffers::Vector *raw_data() const { - return GetPointer *>(VT_RAW_DATA); + const ::flatbuffers::Vector *raw_data() const { + return GetPointer *>(VT_RAW_DATA); + } + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *string_data() const { + return GetPointer> *>(VT_STRING_DATA); } - const flatbuffers::Vector> *string_data() const { - return GetPointer> *>(VT_STRING_DATA); + int64_t external_data_offset() const { + return GetField(VT_EXTERNAL_DATA_OFFSET, -1LL); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -1349,53 +1351,58 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VerifyOffset(verifier, VT_STRING_DATA) && verifier.VerifyVector(string_data()) && verifier.VerifyVectorOfStrings(string_data()) && + VerifyField(verifier, VT_EXTERNAL_DATA_OFFSET, 8) && verifier.EndTable(); } }; struct TensorBuilder { typedef Tensor Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(Tensor::VT_NAME, name); } - void add_doc_string(flatbuffers::Offset doc_string) { + void add_doc_string(::flatbuffers::Offset<::flatbuffers::String> doc_string) { fbb_.AddOffset(Tensor::VT_DOC_STRING, doc_string); } - void add_dims(flatbuffers::Offset> dims) { + void add_dims(::flatbuffers::Offset<::flatbuffers::Vector> dims) { fbb_.AddOffset(Tensor::VT_DIMS, dims); } void add_data_type(onnxruntime::fbs::TensorDataType data_type) { fbb_.AddElement(Tensor::VT_DATA_TYPE, static_cast(data_type), 0); } - void add_raw_data(flatbuffers::Offset> raw_data) { + void add_raw_data(::flatbuffers::Offset<::flatbuffers::Vector> raw_data) { fbb_.AddOffset(Tensor::VT_RAW_DATA, raw_data); } - void add_string_data(flatbuffers::Offset>> string_data) { + void add_string_data(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> string_data) { fbb_.AddOffset(Tensor::VT_STRING_DATA, string_data); } - explicit TensorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_external_data_offset(int64_t external_data_offset) { + fbb_.AddElement(Tensor::VT_EXTERNAL_DATA_OFFSET, external_data_offset, -1LL); + } + explicit TensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - TensorBuilder &operator=(const TensorBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTensor( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - flatbuffers::Offset doc_string = 0, - flatbuffers::Offset> dims = 0, +inline ::flatbuffers::Offset CreateTensor( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset<::flatbuffers::String> doc_string = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> dims = 0, onnxruntime::fbs::TensorDataType data_type = onnxruntime::fbs::TensorDataType::UNDEFINED, - flatbuffers::Offset> raw_data = 0, - flatbuffers::Offset>> string_data = 0) { + ::flatbuffers::Offset<::flatbuffers::Vector> raw_data = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> string_data = 0, + int64_t external_data_offset = -1LL) { TensorBuilder builder_(_fbb); + builder_.add_external_data_offset(external_data_offset); builder_.add_string_data(string_data); builder_.add_raw_data(raw_data); builder_.add_data_type(data_type); @@ -1405,19 +1412,20 @@ inline flatbuffers::Offset CreateTensor( return builder_.Finish(); } -inline flatbuffers::Offset CreateTensorDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateTensorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const char *doc_string = nullptr, const std::vector *dims = nullptr, onnxruntime::fbs::TensorDataType data_type = onnxruntime::fbs::TensorDataType::UNDEFINED, const std::vector *raw_data = nullptr, - const std::vector> *string_data = nullptr) { + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *string_data = nullptr, + int64_t external_data_offset = -1LL) { auto name__ = name ? _fbb.CreateString(name) : 0; auto doc_string__ = doc_string ? _fbb.CreateString(doc_string) : 0; auto dims__ = dims ? _fbb.CreateVector(*dims) : 0; auto raw_data__ = raw_data ? _fbb.CreateVector(*raw_data) : 0; - auto string_data__ = string_data ? _fbb.CreateVector>(*string_data) : 0; + auto string_data__ = string_data ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*string_data) : 0; return onnxruntime::fbs::CreateTensor( _fbb, name__, @@ -1425,10 +1433,11 @@ inline flatbuffers::Offset CreateTensorDirect( dims__, data_type, raw_data__, - string_data__); + string_data__, + external_data_offset); } -struct SparseTensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct SparseTensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef SparseTensorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VALUES = 4, @@ -1441,10 +1450,10 @@ struct SparseTensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::Tensor *indices() const { return GetPointer(VT_INDICES); } - const flatbuffers::Vector *dims() const { - return GetPointer *>(VT_DIMS); + const ::flatbuffers::Vector *dims() const { + return GetPointer *>(VT_DIMS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_VALUES) && verifier.VerifyTable(values()) && @@ -1458,34 +1467,33 @@ struct SparseTensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct SparseTensorBuilder { typedef SparseTensor Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_values(flatbuffers::Offset values) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_values(::flatbuffers::Offset values) { fbb_.AddOffset(SparseTensor::VT_VALUES, values); } - void add_indices(flatbuffers::Offset indices) { + void add_indices(::flatbuffers::Offset indices) { fbb_.AddOffset(SparseTensor::VT_INDICES, indices); } - void add_dims(flatbuffers::Offset> dims) { + void add_dims(::flatbuffers::Offset<::flatbuffers::Vector> dims) { fbb_.AddOffset(SparseTensor::VT_DIMS, dims); } - explicit SparseTensorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SparseTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - SparseTensorBuilder &operator=(const SparseTensorBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSparseTensor( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset values = 0, - flatbuffers::Offset indices = 0, - flatbuffers::Offset> dims = 0) { +inline ::flatbuffers::Offset CreateSparseTensor( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset values = 0, + ::flatbuffers::Offset indices = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> dims = 0) { SparseTensorBuilder builder_(_fbb); builder_.add_dims(dims); builder_.add_indices(indices); @@ -1493,10 +1501,10 @@ inline flatbuffers::Offset CreateSparseTensor( return builder_.Finish(); } -inline flatbuffers::Offset CreateSparseTensorDirect( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset values = 0, - flatbuffers::Offset indices = 0, +inline ::flatbuffers::Offset CreateSparseTensorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset values = 0, + ::flatbuffers::Offset indices = 0, const std::vector *dims = nullptr) { auto dims__ = dims ? _fbb.CreateVector(*dims) : 0; return onnxruntime::fbs::CreateSparseTensor( @@ -1506,7 +1514,7 @@ inline flatbuffers::Offset CreateSparseTensorDirect( dims__); } -struct Attribute FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Attribute FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef AttributeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, @@ -1523,11 +1531,11 @@ struct Attribute FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_TENSORS = 26, VT_GRAPHS = 28 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const flatbuffers::String *doc_string() const { - return GetPointer(VT_DOC_STRING); + const ::flatbuffers::String *doc_string() const { + return GetPointer(VT_DOC_STRING); } onnxruntime::fbs::AttributeType type() const { return static_cast(GetField(VT_TYPE, 0)); @@ -1538,8 +1546,8 @@ struct Attribute FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { int64_t i() const { return GetField(VT_I, 0); } - const flatbuffers::String *s() const { - return GetPointer(VT_S); + const ::flatbuffers::String *s() const { + return GetPointer(VT_S); } const onnxruntime::fbs::Tensor *t() const { return GetPointer(VT_T); @@ -1547,22 +1555,22 @@ struct Attribute FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::Graph *g() const { return GetPointer(VT_G); } - const flatbuffers::Vector *floats() const { - return GetPointer *>(VT_FLOATS); + const ::flatbuffers::Vector *floats() const { + return GetPointer *>(VT_FLOATS); } - const flatbuffers::Vector *ints() const { - return GetPointer *>(VT_INTS); + const ::flatbuffers::Vector *ints() const { + return GetPointer *>(VT_INTS); } - const flatbuffers::Vector> *strings() const { - return GetPointer> *>(VT_STRINGS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *strings() const { + return GetPointer> *>(VT_STRINGS); } - const flatbuffers::Vector> *tensors() const { - return GetPointer> *>(VT_TENSORS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *tensors() const { + return GetPointer> *>(VT_TENSORS); } - const flatbuffers::Vector> *graphs() const { - return GetPointer> *>(VT_GRAPHS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *graphs() const { + return GetPointer> *>(VT_GRAPHS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -1596,12 +1604,12 @@ struct Attribute FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct AttributeBuilder { typedef Attribute Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(Attribute::VT_NAME, name); } - void add_doc_string(flatbuffers::Offset doc_string) { + void add_doc_string(::flatbuffers::Offset<::flatbuffers::String> doc_string) { fbb_.AddOffset(Attribute::VT_DOC_STRING, doc_string); } void add_type(onnxruntime::fbs::AttributeType type) { @@ -1613,57 +1621,56 @@ struct AttributeBuilder { void add_i(int64_t i) { fbb_.AddElement(Attribute::VT_I, i, 0); } - void add_s(flatbuffers::Offset s) { + void add_s(::flatbuffers::Offset<::flatbuffers::String> s) { fbb_.AddOffset(Attribute::VT_S, s); } - void add_t(flatbuffers::Offset t) { + void add_t(::flatbuffers::Offset t) { fbb_.AddOffset(Attribute::VT_T, t); } - void add_g(flatbuffers::Offset g) { + void add_g(::flatbuffers::Offset g) { fbb_.AddOffset(Attribute::VT_G, g); } - void add_floats(flatbuffers::Offset> floats) { + void add_floats(::flatbuffers::Offset<::flatbuffers::Vector> floats) { fbb_.AddOffset(Attribute::VT_FLOATS, floats); } - void add_ints(flatbuffers::Offset> ints) { + void add_ints(::flatbuffers::Offset<::flatbuffers::Vector> ints) { fbb_.AddOffset(Attribute::VT_INTS, ints); } - void add_strings(flatbuffers::Offset>> strings) { + void add_strings(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> strings) { fbb_.AddOffset(Attribute::VT_STRINGS, strings); } - void add_tensors(flatbuffers::Offset>> tensors) { + void add_tensors(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> tensors) { fbb_.AddOffset(Attribute::VT_TENSORS, tensors); } - void add_graphs(flatbuffers::Offset>> graphs) { + void add_graphs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> graphs) { fbb_.AddOffset(Attribute::VT_GRAPHS, graphs); } - explicit AttributeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit AttributeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - AttributeBuilder &operator=(const AttributeBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateAttribute( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - flatbuffers::Offset doc_string = 0, +inline ::flatbuffers::Offset CreateAttribute( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset<::flatbuffers::String> doc_string = 0, onnxruntime::fbs::AttributeType type = onnxruntime::fbs::AttributeType::UNDEFINED, float f = 0.0f, int64_t i = 0, - flatbuffers::Offset s = 0, - flatbuffers::Offset t = 0, - flatbuffers::Offset g = 0, - flatbuffers::Offset> floats = 0, - flatbuffers::Offset> ints = 0, - flatbuffers::Offset>> strings = 0, - flatbuffers::Offset>> tensors = 0, - flatbuffers::Offset>> graphs = 0) { + ::flatbuffers::Offset<::flatbuffers::String> s = 0, + ::flatbuffers::Offset t = 0, + ::flatbuffers::Offset g = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> floats = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> ints = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> strings = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> tensors = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> graphs = 0) { AttributeBuilder builder_(_fbb); builder_.add_i(i); builder_.add_graphs(graphs); @@ -1681,29 +1688,29 @@ inline flatbuffers::Offset CreateAttribute( return builder_.Finish(); } -inline flatbuffers::Offset CreateAttributeDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateAttributeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const char *doc_string = nullptr, onnxruntime::fbs::AttributeType type = onnxruntime::fbs::AttributeType::UNDEFINED, float f = 0.0f, int64_t i = 0, const char *s = nullptr, - flatbuffers::Offset t = 0, - flatbuffers::Offset g = 0, + ::flatbuffers::Offset t = 0, + ::flatbuffers::Offset g = 0, const std::vector *floats = nullptr, const std::vector *ints = nullptr, - const std::vector> *strings = nullptr, - const std::vector> *tensors = nullptr, - const std::vector> *graphs = nullptr) { + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *strings = nullptr, + const std::vector<::flatbuffers::Offset> *tensors = nullptr, + const std::vector<::flatbuffers::Offset> *graphs = nullptr) { auto name__ = name ? _fbb.CreateString(name) : 0; auto doc_string__ = doc_string ? _fbb.CreateString(doc_string) : 0; auto s__ = s ? _fbb.CreateString(s) : 0; auto floats__ = floats ? _fbb.CreateVector(*floats) : 0; auto ints__ = ints ? _fbb.CreateVector(*ints) : 0; - auto strings__ = strings ? _fbb.CreateVector>(*strings) : 0; - auto tensors__ = tensors ? _fbb.CreateVector>(*tensors) : 0; - auto graphs__ = graphs ? _fbb.CreateVector>(*graphs) : 0; + auto strings__ = strings ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*strings) : 0; + auto tensors__ = tensors ? _fbb.CreateVector<::flatbuffers::Offset>(*tensors) : 0; + auto graphs__ = graphs ? _fbb.CreateVector<::flatbuffers::Offset>(*graphs) : 0; return onnxruntime::fbs::CreateAttribute( _fbb, name__, @@ -1723,7 +1730,7 @@ inline flatbuffers::Offset CreateAttributeDirect( /// nodes to consider for a runtime optimization /// see corresponding type in onnxruntime/core/graph/runtime_optimization_record.h -struct NodesToOptimizeIndices FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct NodesToOptimizeIndices FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef NodesToOptimizeIndicesBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NODE_INDICES = 4, @@ -1734,8 +1741,8 @@ struct NodesToOptimizeIndices FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab VT_NUM_VARIADIC_INPUTS = 14, VT_NUM_VARIADIC_OUTPUTS = 16 }; - const flatbuffers::Vector *node_indices() const { - return GetPointer *>(VT_NODE_INDICES); + const ::flatbuffers::Vector *node_indices() const { + return GetPointer *>(VT_NODE_INDICES); } uint32_t num_inputs() const { return GetField(VT_NUM_INPUTS, 0); @@ -1755,7 +1762,7 @@ struct NodesToOptimizeIndices FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab uint32_t num_variadic_outputs() const { return GetField(VT_NUM_VARIADIC_OUTPUTS, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NODE_INDICES) && verifier.VerifyVector(node_indices()) && @@ -1771,9 +1778,9 @@ struct NodesToOptimizeIndices FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab struct NodesToOptimizeIndicesBuilder { typedef NodesToOptimizeIndices Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_node_indices(flatbuffers::Offset> node_indices) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_node_indices(::flatbuffers::Offset<::flatbuffers::Vector> node_indices) { fbb_.AddOffset(NodesToOptimizeIndices::VT_NODE_INDICES, node_indices); } void add_num_inputs(uint32_t num_inputs) { @@ -1794,21 +1801,20 @@ struct NodesToOptimizeIndicesBuilder { void add_num_variadic_outputs(uint32_t num_variadic_outputs) { fbb_.AddElement(NodesToOptimizeIndices::VT_NUM_VARIADIC_OUTPUTS, num_variadic_outputs, 0); } - explicit NodesToOptimizeIndicesBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit NodesToOptimizeIndicesBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - NodesToOptimizeIndicesBuilder &operator=(const NodesToOptimizeIndicesBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNodesToOptimizeIndices( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> node_indices = 0, +inline ::flatbuffers::Offset CreateNodesToOptimizeIndices( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> node_indices = 0, uint32_t num_inputs = 0, uint32_t num_outputs = 0, bool has_variadic_input = false, @@ -1826,8 +1832,8 @@ inline flatbuffers::Offset CreateNodesToOptimizeIndices( return builder_.Finish(); } -inline flatbuffers::Offset CreateNodesToOptimizeIndicesDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateNodesToOptimizeIndicesDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *node_indices = nullptr, uint32_t num_inputs = 0, uint32_t num_outputs = 0, @@ -1848,7 +1854,7 @@ inline flatbuffers::Offset CreateNodesToOptimizeIndicesD } /// deprecated: no longer using kernel def hashes -struct DeprecatedNodeIndexAndKernelDefHash FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DeprecatedNodeIndexAndKernelDefHash FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DeprecatedNodeIndexAndKernelDefHashBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NODE_INDEX = 4, @@ -1860,7 +1866,7 @@ struct DeprecatedNodeIndexAndKernelDefHash FLATBUFFERS_FINAL_CLASS : private fla uint64_t kernel_def_hash() const { return GetField(VT_KERNEL_DEF_HASH, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_NODE_INDEX, 4) && VerifyField(verifier, VT_KERNEL_DEF_HASH, 8) && @@ -1870,28 +1876,27 @@ struct DeprecatedNodeIndexAndKernelDefHash FLATBUFFERS_FINAL_CLASS : private fla struct DeprecatedNodeIndexAndKernelDefHashBuilder { typedef DeprecatedNodeIndexAndKernelDefHash Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_node_index(uint32_t node_index) { fbb_.AddElement(DeprecatedNodeIndexAndKernelDefHash::VT_NODE_INDEX, node_index, 0); } void add_kernel_def_hash(uint64_t kernel_def_hash) { fbb_.AddElement(DeprecatedNodeIndexAndKernelDefHash::VT_KERNEL_DEF_HASH, kernel_def_hash, 0); } - explicit DeprecatedNodeIndexAndKernelDefHashBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DeprecatedNodeIndexAndKernelDefHashBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DeprecatedNodeIndexAndKernelDefHashBuilder &operator=(const DeprecatedNodeIndexAndKernelDefHashBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDeprecatedNodeIndexAndKernelDefHash( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDeprecatedNodeIndexAndKernelDefHash( + ::flatbuffers::FlatBufferBuilder &_fbb, uint32_t node_index = 0, uint64_t kernel_def_hash = 0) { DeprecatedNodeIndexAndKernelDefHashBuilder builder_(_fbb); @@ -1902,23 +1907,23 @@ inline flatbuffers::Offset CreateDeprecated /// a single runtime optimization /// see corresponding type in onnxruntime/core/graph/runtime_optimization_record.h -struct RuntimeOptimizationRecord FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct RuntimeOptimizationRecord FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef RuntimeOptimizationRecordBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_ACTION_ID = 4, VT_NODES_TO_OPTIMIZE_INDICES = 6, VT_PRODUCED_OP_IDS = 10 }; - const flatbuffers::String *action_id() const { - return GetPointer(VT_ACTION_ID); + const ::flatbuffers::String *action_id() const { + return GetPointer(VT_ACTION_ID); } const onnxruntime::fbs::NodesToOptimizeIndices *nodes_to_optimize_indices() const { return GetPointer(VT_NODES_TO_OPTIMIZE_INDICES); } - const flatbuffers::Vector> *produced_op_ids() const { - return GetPointer> *>(VT_PRODUCED_OP_IDS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *produced_op_ids() const { + return GetPointer> *>(VT_PRODUCED_OP_IDS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_ACTION_ID) && verifier.VerifyString(action_id()) && @@ -1933,34 +1938,33 @@ struct RuntimeOptimizationRecord FLATBUFFERS_FINAL_CLASS : private flatbuffers:: struct RuntimeOptimizationRecordBuilder { typedef RuntimeOptimizationRecord Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_action_id(flatbuffers::Offset action_id) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_action_id(::flatbuffers::Offset<::flatbuffers::String> action_id) { fbb_.AddOffset(RuntimeOptimizationRecord::VT_ACTION_ID, action_id); } - void add_nodes_to_optimize_indices(flatbuffers::Offset nodes_to_optimize_indices) { + void add_nodes_to_optimize_indices(::flatbuffers::Offset nodes_to_optimize_indices) { fbb_.AddOffset(RuntimeOptimizationRecord::VT_NODES_TO_OPTIMIZE_INDICES, nodes_to_optimize_indices); } - void add_produced_op_ids(flatbuffers::Offset>> produced_op_ids) { + void add_produced_op_ids(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> produced_op_ids) { fbb_.AddOffset(RuntimeOptimizationRecord::VT_PRODUCED_OP_IDS, produced_op_ids); } - explicit RuntimeOptimizationRecordBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit RuntimeOptimizationRecordBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - RuntimeOptimizationRecordBuilder &operator=(const RuntimeOptimizationRecordBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRuntimeOptimizationRecord( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset action_id = 0, - flatbuffers::Offset nodes_to_optimize_indices = 0, - flatbuffers::Offset>> produced_op_ids = 0) { +inline ::flatbuffers::Offset CreateRuntimeOptimizationRecord( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> action_id = 0, + ::flatbuffers::Offset nodes_to_optimize_indices = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> produced_op_ids = 0) { RuntimeOptimizationRecordBuilder builder_(_fbb); builder_.add_produced_op_ids(produced_op_ids); builder_.add_nodes_to_optimize_indices(nodes_to_optimize_indices); @@ -1968,13 +1972,13 @@ inline flatbuffers::Offset CreateRuntimeOptimizationR return builder_.Finish(); } -inline flatbuffers::Offset CreateRuntimeOptimizationRecordDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateRuntimeOptimizationRecordDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *action_id = nullptr, - flatbuffers::Offset nodes_to_optimize_indices = 0, - const std::vector> *produced_op_ids = nullptr) { + ::flatbuffers::Offset nodes_to_optimize_indices = 0, + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *produced_op_ids = nullptr) { auto action_id__ = action_id ? _fbb.CreateString(action_id) : 0; - auto produced_op_ids__ = produced_op_ids ? _fbb.CreateVector>(*produced_op_ids) : 0; + auto produced_op_ids__ = produced_op_ids ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*produced_op_ids) : 0; return onnxruntime::fbs::CreateRuntimeOptimizationRecord( _fbb, action_id__, @@ -1982,25 +1986,25 @@ inline flatbuffers::Offset CreateRuntimeOptimizationR produced_op_ids__); } -struct RuntimeOptimizationRecordContainerEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct RuntimeOptimizationRecordContainerEntry FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef RuntimeOptimizationRecordContainerEntryBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_OPTIMIZER_NAME = 4, VT_RUNTIME_OPTIMIZATION_RECORDS = 6 }; - const flatbuffers::String *optimizer_name() const { - return GetPointer(VT_OPTIMIZER_NAME); + const ::flatbuffers::String *optimizer_name() const { + return GetPointer(VT_OPTIMIZER_NAME); } - bool KeyCompareLessThan(const RuntimeOptimizationRecordContainerEntry *o) const { + bool KeyCompareLessThan(const RuntimeOptimizationRecordContainerEntry * const o) const { return *optimizer_name() < *o->optimizer_name(); } - int KeyCompareWithValue(const char *val) const { - return strcmp(optimizer_name()->c_str(), val); + int KeyCompareWithValue(const char *_optimizer_name) const { + return strcmp(optimizer_name()->c_str(), _optimizer_name); } - const flatbuffers::Vector> *runtime_optimization_records() const { - return GetPointer> *>(VT_RUNTIME_OPTIMIZATION_RECORDS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *runtime_optimization_records() const { + return GetPointer> *>(VT_RUNTIME_OPTIMIZATION_RECORDS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_OPTIMIZER_NAME) && verifier.VerifyString(optimizer_name()) && @@ -2013,59 +2017,58 @@ struct RuntimeOptimizationRecordContainerEntry FLATBUFFERS_FINAL_CLASS : private struct RuntimeOptimizationRecordContainerEntryBuilder { typedef RuntimeOptimizationRecordContainerEntry Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_optimizer_name(flatbuffers::Offset optimizer_name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_optimizer_name(::flatbuffers::Offset<::flatbuffers::String> optimizer_name) { fbb_.AddOffset(RuntimeOptimizationRecordContainerEntry::VT_OPTIMIZER_NAME, optimizer_name); } - void add_runtime_optimization_records(flatbuffers::Offset>> runtime_optimization_records) { + void add_runtime_optimization_records(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> runtime_optimization_records) { fbb_.AddOffset(RuntimeOptimizationRecordContainerEntry::VT_RUNTIME_OPTIMIZATION_RECORDS, runtime_optimization_records); } - explicit RuntimeOptimizationRecordContainerEntryBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit RuntimeOptimizationRecordContainerEntryBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - RuntimeOptimizationRecordContainerEntryBuilder &operator=(const RuntimeOptimizationRecordContainerEntryBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); fbb_.Required(o, RuntimeOptimizationRecordContainerEntry::VT_OPTIMIZER_NAME); return o; } }; -inline flatbuffers::Offset CreateRuntimeOptimizationRecordContainerEntry( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset optimizer_name = 0, - flatbuffers::Offset>> runtime_optimization_records = 0) { +inline ::flatbuffers::Offset CreateRuntimeOptimizationRecordContainerEntry( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> optimizer_name = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> runtime_optimization_records = 0) { RuntimeOptimizationRecordContainerEntryBuilder builder_(_fbb); builder_.add_runtime_optimization_records(runtime_optimization_records); builder_.add_optimizer_name(optimizer_name); return builder_.Finish(); } -inline flatbuffers::Offset CreateRuntimeOptimizationRecordContainerEntryDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateRuntimeOptimizationRecordContainerEntryDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *optimizer_name = nullptr, - const std::vector> *runtime_optimization_records = nullptr) { + const std::vector<::flatbuffers::Offset> *runtime_optimization_records = nullptr) { auto optimizer_name__ = optimizer_name ? _fbb.CreateString(optimizer_name) : 0; - auto runtime_optimization_records__ = runtime_optimization_records ? _fbb.CreateVector>(*runtime_optimization_records) : 0; + auto runtime_optimization_records__ = runtime_optimization_records ? _fbb.CreateVector<::flatbuffers::Offset>(*runtime_optimization_records) : 0; return onnxruntime::fbs::CreateRuntimeOptimizationRecordContainerEntry( _fbb, optimizer_name__, runtime_optimization_records__); } -struct RuntimeOptimizations FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct RuntimeOptimizations FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef RuntimeOptimizationsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_RECORDS = 4 }; /// mapping from optimizer name to [RuntimeOptimizationRecord] - const flatbuffers::Vector> *records() const { - return GetPointer> *>(VT_RECORDS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *records() const { + return GetPointer> *>(VT_RECORDS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_RECORDS) && verifier.VerifyVector(records()) && @@ -2076,41 +2079,40 @@ struct RuntimeOptimizations FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table struct RuntimeOptimizationsBuilder { typedef RuntimeOptimizations Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_records(flatbuffers::Offset>> records) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_records(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> records) { fbb_.AddOffset(RuntimeOptimizations::VT_RECORDS, records); } - explicit RuntimeOptimizationsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit RuntimeOptimizationsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - RuntimeOptimizationsBuilder &operator=(const RuntimeOptimizationsBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRuntimeOptimizations( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> records = 0) { +inline ::flatbuffers::Offset CreateRuntimeOptimizations( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> records = 0) { RuntimeOptimizationsBuilder builder_(_fbb); builder_.add_records(records); return builder_.Finish(); } -inline flatbuffers::Offset CreateRuntimeOptimizationsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - std::vector> *records = nullptr) { +inline ::flatbuffers::Offset CreateRuntimeOptimizationsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + std::vector<::flatbuffers::Offset> *records = nullptr) { auto records__ = records ? _fbb.CreateVectorOfSortedTables(records) : 0; return onnxruntime::fbs::CreateRuntimeOptimizations( _fbb, records__); } -struct Graph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Graph FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef GraphBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_INITIALIZERS = 4, @@ -2123,34 +2125,34 @@ struct Graph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_SPARSE_INITIALIZERS = 18, VT_RUNTIME_OPTIMIZATIONS = 20 }; - const flatbuffers::Vector> *initializers() const { - return GetPointer> *>(VT_INITIALIZERS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *initializers() const { + return GetPointer> *>(VT_INITIALIZERS); } - const flatbuffers::Vector> *node_args() const { - return GetPointer> *>(VT_NODE_ARGS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *node_args() const { + return GetPointer> *>(VT_NODE_ARGS); } - const flatbuffers::Vector> *nodes() const { - return GetPointer> *>(VT_NODES); + const ::flatbuffers::Vector<::flatbuffers::Offset> *nodes() const { + return GetPointer> *>(VT_NODES); } uint32_t max_node_index() const { return GetField(VT_MAX_NODE_INDEX, 0); } - const flatbuffers::Vector> *node_edges() const { - return GetPointer> *>(VT_NODE_EDGES); + const ::flatbuffers::Vector<::flatbuffers::Offset> *node_edges() const { + return GetPointer> *>(VT_NODE_EDGES); } - const flatbuffers::Vector> *inputs() const { - return GetPointer> *>(VT_INPUTS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *inputs() const { + return GetPointer> *>(VT_INPUTS); } - const flatbuffers::Vector> *outputs() const { - return GetPointer> *>(VT_OUTPUTS); + const ::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>> *outputs() const { + return GetPointer> *>(VT_OUTPUTS); } - const flatbuffers::Vector> *sparse_initializers() const { - return GetPointer> *>(VT_SPARSE_INITIALIZERS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *sparse_initializers() const { + return GetPointer> *>(VT_SPARSE_INITIALIZERS); } const onnxruntime::fbs::RuntimeOptimizations *runtime_optimizations() const { return GetPointer(VT_RUNTIME_OPTIMIZATIONS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_INITIALIZERS) && verifier.VerifyVector(initializers()) && @@ -2182,58 +2184,57 @@ struct Graph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct GraphBuilder { typedef Graph Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_initializers(flatbuffers::Offset>> initializers) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_initializers(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> initializers) { fbb_.AddOffset(Graph::VT_INITIALIZERS, initializers); } - void add_node_args(flatbuffers::Offset>> node_args) { + void add_node_args(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> node_args) { fbb_.AddOffset(Graph::VT_NODE_ARGS, node_args); } - void add_nodes(flatbuffers::Offset>> nodes) { + void add_nodes(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> nodes) { fbb_.AddOffset(Graph::VT_NODES, nodes); } void add_max_node_index(uint32_t max_node_index) { fbb_.AddElement(Graph::VT_MAX_NODE_INDEX, max_node_index, 0); } - void add_node_edges(flatbuffers::Offset>> node_edges) { + void add_node_edges(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> node_edges) { fbb_.AddOffset(Graph::VT_NODE_EDGES, node_edges); } - void add_inputs(flatbuffers::Offset>> inputs) { + void add_inputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> inputs) { fbb_.AddOffset(Graph::VT_INPUTS, inputs); } - void add_outputs(flatbuffers::Offset>> outputs) { + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> outputs) { fbb_.AddOffset(Graph::VT_OUTPUTS, outputs); } - void add_sparse_initializers(flatbuffers::Offset>> sparse_initializers) { + void add_sparse_initializers(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> sparse_initializers) { fbb_.AddOffset(Graph::VT_SPARSE_INITIALIZERS, sparse_initializers); } - void add_runtime_optimizations(flatbuffers::Offset runtime_optimizations) { + void add_runtime_optimizations(::flatbuffers::Offset runtime_optimizations) { fbb_.AddOffset(Graph::VT_RUNTIME_OPTIMIZATIONS, runtime_optimizations); } - explicit GraphBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit GraphBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - GraphBuilder &operator=(const GraphBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateGraph( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> initializers = 0, - flatbuffers::Offset>> node_args = 0, - flatbuffers::Offset>> nodes = 0, +inline ::flatbuffers::Offset CreateGraph( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> initializers = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> node_args = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> nodes = 0, uint32_t max_node_index = 0, - flatbuffers::Offset>> node_edges = 0, - flatbuffers::Offset>> inputs = 0, - flatbuffers::Offset>> outputs = 0, - flatbuffers::Offset>> sparse_initializers = 0, - flatbuffers::Offset runtime_optimizations = 0) { + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> node_edges = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<::flatbuffers::String>>> outputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> sparse_initializers = 0, + ::flatbuffers::Offset runtime_optimizations = 0) { GraphBuilder builder_(_fbb); builder_.add_runtime_optimizations(runtime_optimizations); builder_.add_sparse_initializers(sparse_initializers); @@ -2247,24 +2248,24 @@ inline flatbuffers::Offset CreateGraph( return builder_.Finish(); } -inline flatbuffers::Offset CreateGraphDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *initializers = nullptr, - const std::vector> *node_args = nullptr, - const std::vector> *nodes = nullptr, +inline ::flatbuffers::Offset CreateGraphDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *initializers = nullptr, + const std::vector<::flatbuffers::Offset> *node_args = nullptr, + const std::vector<::flatbuffers::Offset> *nodes = nullptr, uint32_t max_node_index = 0, - const std::vector> *node_edges = nullptr, - const std::vector> *inputs = nullptr, - const std::vector> *outputs = nullptr, - const std::vector> *sparse_initializers = nullptr, - flatbuffers::Offset runtime_optimizations = 0) { - auto initializers__ = initializers ? _fbb.CreateVector>(*initializers) : 0; - auto node_args__ = node_args ? _fbb.CreateVector>(*node_args) : 0; - auto nodes__ = nodes ? _fbb.CreateVector>(*nodes) : 0; - auto node_edges__ = node_edges ? _fbb.CreateVector>(*node_edges) : 0; - auto inputs__ = inputs ? _fbb.CreateVector>(*inputs) : 0; - auto outputs__ = outputs ? _fbb.CreateVector>(*outputs) : 0; - auto sparse_initializers__ = sparse_initializers ? _fbb.CreateVector>(*sparse_initializers) : 0; + const std::vector<::flatbuffers::Offset> *node_edges = nullptr, + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *inputs = nullptr, + const std::vector<::flatbuffers::Offset<::flatbuffers::String>> *outputs = nullptr, + const std::vector<::flatbuffers::Offset> *sparse_initializers = nullptr, + ::flatbuffers::Offset runtime_optimizations = 0) { + auto initializers__ = initializers ? _fbb.CreateVector<::flatbuffers::Offset>(*initializers) : 0; + auto node_args__ = node_args ? _fbb.CreateVector<::flatbuffers::Offset>(*node_args) : 0; + auto nodes__ = nodes ? _fbb.CreateVector<::flatbuffers::Offset>(*nodes) : 0; + auto node_edges__ = node_edges ? _fbb.CreateVector<::flatbuffers::Offset>(*node_edges) : 0; + auto inputs__ = inputs ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector<::flatbuffers::Offset<::flatbuffers::String>>(*outputs) : 0; + auto sparse_initializers__ = sparse_initializers ? _fbb.CreateVector<::flatbuffers::Offset>(*sparse_initializers) : 0; return onnxruntime::fbs::CreateGraph( _fbb, initializers__, @@ -2278,19 +2279,19 @@ inline flatbuffers::Offset CreateGraphDirect( runtime_optimizations); } -struct StringStringEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct StringStringEntry FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef StringStringEntryBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_KEY = 4, VT_VALUE = 6 }; - const flatbuffers::String *key() const { - return GetPointer(VT_KEY); + const ::flatbuffers::String *key() const { + return GetPointer(VT_KEY); } - const flatbuffers::String *value() const { - return GetPointer(VT_VALUE); + const ::flatbuffers::String *value() const { + return GetPointer(VT_VALUE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_KEY) && verifier.VerifyString(key()) && @@ -2302,38 +2303,37 @@ struct StringStringEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct StringStringEntryBuilder { typedef StringStringEntry Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_key(flatbuffers::Offset key) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_key(::flatbuffers::Offset<::flatbuffers::String> key) { fbb_.AddOffset(StringStringEntry::VT_KEY, key); } - void add_value(flatbuffers::Offset value) { + void add_value(::flatbuffers::Offset<::flatbuffers::String> value) { fbb_.AddOffset(StringStringEntry::VT_VALUE, value); } - explicit StringStringEntryBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StringStringEntryBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - StringStringEntryBuilder &operator=(const StringStringEntryBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateStringStringEntry( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset key = 0, - flatbuffers::Offset value = 0) { +inline ::flatbuffers::Offset CreateStringStringEntry( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> key = 0, + ::flatbuffers::Offset<::flatbuffers::String> value = 0) { StringStringEntryBuilder builder_(_fbb); builder_.add_value(value); builder_.add_key(key); return builder_.Finish(); } -inline flatbuffers::Offset CreateStringStringEntryDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateStringStringEntryDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *key = nullptr, const char *value = nullptr) { auto key__ = key ? _fbb.CreateString(key) : 0; @@ -2344,7 +2344,7 @@ inline flatbuffers::Offset CreateStringStringEntryDirect( value__); } -struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Model FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ModelBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_IR_VERSION = 4, @@ -2361,34 +2361,34 @@ struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { int64_t ir_version() const { return GetField(VT_IR_VERSION, 0); } - const flatbuffers::Vector> *opset_import() const { - return GetPointer> *>(VT_OPSET_IMPORT); + const ::flatbuffers::Vector<::flatbuffers::Offset> *opset_import() const { + return GetPointer> *>(VT_OPSET_IMPORT); } - const flatbuffers::String *producer_name() const { - return GetPointer(VT_PRODUCER_NAME); + const ::flatbuffers::String *producer_name() const { + return GetPointer(VT_PRODUCER_NAME); } - const flatbuffers::String *producer_version() const { - return GetPointer(VT_PRODUCER_VERSION); + const ::flatbuffers::String *producer_version() const { + return GetPointer(VT_PRODUCER_VERSION); } - const flatbuffers::String *domain() const { - return GetPointer(VT_DOMAIN); + const ::flatbuffers::String *domain() const { + return GetPointer(VT_DOMAIN); } int64_t model_version() const { return GetField(VT_MODEL_VERSION, 0); } - const flatbuffers::String *doc_string() const { - return GetPointer(VT_DOC_STRING); + const ::flatbuffers::String *doc_string() const { + return GetPointer(VT_DOC_STRING); } const onnxruntime::fbs::Graph *graph() const { return GetPointer(VT_GRAPH); } - const flatbuffers::String *graph_doc_string() const { - return GetPointer(VT_GRAPH_DOC_STRING); + const ::flatbuffers::String *graph_doc_string() const { + return GetPointer(VT_GRAPH_DOC_STRING); } - const flatbuffers::Vector> *metadata_props() const { - return GetPointer> *>(VT_METADATA_PROPS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *metadata_props() const { + return GetPointer> *>(VT_METADATA_PROPS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_IR_VERSION, 8) && VerifyOffset(verifier, VT_OPSET_IMPORT) && @@ -2416,62 +2416,61 @@ struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct ModelBuilder { typedef Model Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_ir_version(int64_t ir_version) { fbb_.AddElement(Model::VT_IR_VERSION, ir_version, 0); } - void add_opset_import(flatbuffers::Offset>> opset_import) { + void add_opset_import(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> opset_import) { fbb_.AddOffset(Model::VT_OPSET_IMPORT, opset_import); } - void add_producer_name(flatbuffers::Offset producer_name) { + void add_producer_name(::flatbuffers::Offset<::flatbuffers::String> producer_name) { fbb_.AddOffset(Model::VT_PRODUCER_NAME, producer_name); } - void add_producer_version(flatbuffers::Offset producer_version) { + void add_producer_version(::flatbuffers::Offset<::flatbuffers::String> producer_version) { fbb_.AddOffset(Model::VT_PRODUCER_VERSION, producer_version); } - void add_domain(flatbuffers::Offset domain) { + void add_domain(::flatbuffers::Offset<::flatbuffers::String> domain) { fbb_.AddOffset(Model::VT_DOMAIN, domain); } void add_model_version(int64_t model_version) { fbb_.AddElement(Model::VT_MODEL_VERSION, model_version, 0); } - void add_doc_string(flatbuffers::Offset doc_string) { + void add_doc_string(::flatbuffers::Offset<::flatbuffers::String> doc_string) { fbb_.AddOffset(Model::VT_DOC_STRING, doc_string); } - void add_graph(flatbuffers::Offset graph) { + void add_graph(::flatbuffers::Offset graph) { fbb_.AddOffset(Model::VT_GRAPH, graph); } - void add_graph_doc_string(flatbuffers::Offset graph_doc_string) { + void add_graph_doc_string(::flatbuffers::Offset<::flatbuffers::String> graph_doc_string) { fbb_.AddOffset(Model::VT_GRAPH_DOC_STRING, graph_doc_string); } - void add_metadata_props(flatbuffers::Offset>> metadata_props) { + void add_metadata_props(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> metadata_props) { fbb_.AddOffset(Model::VT_METADATA_PROPS, metadata_props); } - explicit ModelBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ModelBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ModelBuilder &operator=(const ModelBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateModel( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateModel( + ::flatbuffers::FlatBufferBuilder &_fbb, int64_t ir_version = 0, - flatbuffers::Offset>> opset_import = 0, - flatbuffers::Offset producer_name = 0, - flatbuffers::Offset producer_version = 0, - flatbuffers::Offset domain = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> opset_import = 0, + ::flatbuffers::Offset<::flatbuffers::String> producer_name = 0, + ::flatbuffers::Offset<::flatbuffers::String> producer_version = 0, + ::flatbuffers::Offset<::flatbuffers::String> domain = 0, int64_t model_version = 0, - flatbuffers::Offset doc_string = 0, - flatbuffers::Offset graph = 0, - flatbuffers::Offset graph_doc_string = 0, - flatbuffers::Offset>> metadata_props = 0) { + ::flatbuffers::Offset<::flatbuffers::String> doc_string = 0, + ::flatbuffers::Offset graph = 0, + ::flatbuffers::Offset<::flatbuffers::String> graph_doc_string = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> metadata_props = 0) { ModelBuilder builder_(_fbb); builder_.add_model_version(model_version); builder_.add_ir_version(ir_version); @@ -2486,25 +2485,25 @@ inline flatbuffers::Offset CreateModel( return builder_.Finish(); } -inline flatbuffers::Offset CreateModelDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateModelDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, int64_t ir_version = 0, - const std::vector> *opset_import = nullptr, + const std::vector<::flatbuffers::Offset> *opset_import = nullptr, const char *producer_name = nullptr, const char *producer_version = nullptr, const char *domain = nullptr, int64_t model_version = 0, const char *doc_string = nullptr, - flatbuffers::Offset graph = 0, + ::flatbuffers::Offset graph = 0, const char *graph_doc_string = nullptr, - const std::vector> *metadata_props = nullptr) { - auto opset_import__ = opset_import ? _fbb.CreateVector>(*opset_import) : 0; + const std::vector<::flatbuffers::Offset> *metadata_props = nullptr) { + auto opset_import__ = opset_import ? _fbb.CreateVector<::flatbuffers::Offset>(*opset_import) : 0; auto producer_name__ = producer_name ? _fbb.CreateString(producer_name) : 0; auto producer_version__ = producer_version ? _fbb.CreateString(producer_version) : 0; auto domain__ = domain ? _fbb.CreateString(domain) : 0; auto doc_string__ = doc_string ? _fbb.CreateString(doc_string) : 0; auto graph_doc_string__ = graph_doc_string ? _fbb.CreateString(graph_doc_string) : 0; - auto metadata_props__ = metadata_props ? _fbb.CreateVector>(*metadata_props) : 0; + auto metadata_props__ = metadata_props ? _fbb.CreateVector<::flatbuffers::Offset>(*metadata_props) : 0; return onnxruntime::fbs::CreateModel( _fbb, ir_version, @@ -2520,19 +2519,19 @@ inline flatbuffers::Offset CreateModelDirect( } /// deprecated: no longer using kernel def hashes -struct DeprecatedKernelCreateInfos FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DeprecatedKernelCreateInfos FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DeprecatedKernelCreateInfosBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NODE_INDICES = 4, VT_KERNEL_DEF_HASHES = 6 }; - const flatbuffers::Vector *node_indices() const { - return GetPointer *>(VT_NODE_INDICES); + const ::flatbuffers::Vector *node_indices() const { + return GetPointer *>(VT_NODE_INDICES); } - const flatbuffers::Vector *kernel_def_hashes() const { - return GetPointer *>(VT_KERNEL_DEF_HASHES); + const ::flatbuffers::Vector *kernel_def_hashes() const { + return GetPointer *>(VT_KERNEL_DEF_HASHES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NODE_INDICES) && verifier.VerifyVector(node_indices()) && @@ -2544,38 +2543,37 @@ struct DeprecatedKernelCreateInfos FLATBUFFERS_FINAL_CLASS : private flatbuffers struct DeprecatedKernelCreateInfosBuilder { typedef DeprecatedKernelCreateInfos Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_node_indices(flatbuffers::Offset> node_indices) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_node_indices(::flatbuffers::Offset<::flatbuffers::Vector> node_indices) { fbb_.AddOffset(DeprecatedKernelCreateInfos::VT_NODE_INDICES, node_indices); } - void add_kernel_def_hashes(flatbuffers::Offset> kernel_def_hashes) { + void add_kernel_def_hashes(::flatbuffers::Offset<::flatbuffers::Vector> kernel_def_hashes) { fbb_.AddOffset(DeprecatedKernelCreateInfos::VT_KERNEL_DEF_HASHES, kernel_def_hashes); } - explicit DeprecatedKernelCreateInfosBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DeprecatedKernelCreateInfosBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DeprecatedKernelCreateInfosBuilder &operator=(const DeprecatedKernelCreateInfosBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDeprecatedKernelCreateInfos( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> node_indices = 0, - flatbuffers::Offset> kernel_def_hashes = 0) { +inline ::flatbuffers::Offset CreateDeprecatedKernelCreateInfos( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> node_indices = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> kernel_def_hashes = 0) { DeprecatedKernelCreateInfosBuilder builder_(_fbb); builder_.add_kernel_def_hashes(kernel_def_hashes); builder_.add_node_indices(node_indices); return builder_.Finish(); } -inline flatbuffers::Offset CreateDeprecatedKernelCreateInfosDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDeprecatedKernelCreateInfosDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *node_indices = nullptr, const std::vector *kernel_def_hashes = nullptr) { auto node_indices__ = node_indices ? _fbb.CreateVector(*node_indices) : 0; @@ -2587,25 +2585,25 @@ inline flatbuffers::Offset CreateDeprecatedKernelCr } /// deprecated: no longer using kernel def hashes -struct DeprecatedSubGraphSessionState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DeprecatedSubGraphSessionState FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DeprecatedSubGraphSessionStateBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_GRAPH_ID = 4, VT_SESSION_STATE = 6 }; - const flatbuffers::String *graph_id() const { - return GetPointer(VT_GRAPH_ID); + const ::flatbuffers::String *graph_id() const { + return GetPointer(VT_GRAPH_ID); } - bool KeyCompareLessThan(const DeprecatedSubGraphSessionState *o) const { + bool KeyCompareLessThan(const DeprecatedSubGraphSessionState * const o) const { return *graph_id() < *o->graph_id(); } - int KeyCompareWithValue(const char *val) const { - return strcmp(graph_id()->c_str(), val); + int KeyCompareWithValue(const char *_graph_id) const { + return strcmp(graph_id()->c_str(), _graph_id); } const onnxruntime::fbs::DeprecatedSessionState *session_state() const { return GetPointer(VT_SESSION_STATE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_GRAPH_ID) && verifier.VerifyString(graph_id()) && @@ -2617,41 +2615,40 @@ struct DeprecatedSubGraphSessionState FLATBUFFERS_FINAL_CLASS : private flatbuff struct DeprecatedSubGraphSessionStateBuilder { typedef DeprecatedSubGraphSessionState Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_graph_id(flatbuffers::Offset graph_id) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_graph_id(::flatbuffers::Offset<::flatbuffers::String> graph_id) { fbb_.AddOffset(DeprecatedSubGraphSessionState::VT_GRAPH_ID, graph_id); } - void add_session_state(flatbuffers::Offset session_state) { + void add_session_state(::flatbuffers::Offset session_state) { fbb_.AddOffset(DeprecatedSubGraphSessionState::VT_SESSION_STATE, session_state); } - explicit DeprecatedSubGraphSessionStateBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DeprecatedSubGraphSessionStateBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DeprecatedSubGraphSessionStateBuilder &operator=(const DeprecatedSubGraphSessionStateBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); fbb_.Required(o, DeprecatedSubGraphSessionState::VT_GRAPH_ID); return o; } }; -inline flatbuffers::Offset CreateDeprecatedSubGraphSessionState( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset graph_id = 0, - flatbuffers::Offset session_state = 0) { +inline ::flatbuffers::Offset CreateDeprecatedSubGraphSessionState( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> graph_id = 0, + ::flatbuffers::Offset session_state = 0) { DeprecatedSubGraphSessionStateBuilder builder_(_fbb); builder_.add_session_state(session_state); builder_.add_graph_id(graph_id); return builder_.Finish(); } -inline flatbuffers::Offset CreateDeprecatedSubGraphSessionStateDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDeprecatedSubGraphSessionStateDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *graph_id = nullptr, - flatbuffers::Offset session_state = 0) { + ::flatbuffers::Offset session_state = 0) { auto graph_id__ = graph_id ? _fbb.CreateString(graph_id) : 0; return onnxruntime::fbs::CreateDeprecatedSubGraphSessionState( _fbb, @@ -2660,7 +2657,7 @@ inline flatbuffers::Offset CreateDeprecatedSubGr } /// deprecated: no longer using kernel def hashes -struct DeprecatedSessionState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DeprecatedSessionState FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DeprecatedSessionStateBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_KERNELS = 4, @@ -2669,10 +2666,10 @@ struct DeprecatedSessionState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab const onnxruntime::fbs::DeprecatedKernelCreateInfos *kernels() const { return GetPointer(VT_KERNELS); } - const flatbuffers::Vector> *sub_graph_session_states() const { - return GetPointer> *>(VT_SUB_GRAPH_SESSION_STATES); + const ::flatbuffers::Vector<::flatbuffers::Offset> *sub_graph_session_states() const { + return GetPointer> *>(VT_SUB_GRAPH_SESSION_STATES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_KERNELS) && verifier.VerifyTable(kernels()) && @@ -2685,40 +2682,39 @@ struct DeprecatedSessionState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab struct DeprecatedSessionStateBuilder { typedef DeprecatedSessionState Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_kernels(flatbuffers::Offset kernels) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_kernels(::flatbuffers::Offset kernels) { fbb_.AddOffset(DeprecatedSessionState::VT_KERNELS, kernels); } - void add_sub_graph_session_states(flatbuffers::Offset>> sub_graph_session_states) { + void add_sub_graph_session_states(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> sub_graph_session_states) { fbb_.AddOffset(DeprecatedSessionState::VT_SUB_GRAPH_SESSION_STATES, sub_graph_session_states); } - explicit DeprecatedSessionStateBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DeprecatedSessionStateBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - DeprecatedSessionStateBuilder &operator=(const DeprecatedSessionStateBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDeprecatedSessionState( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset kernels = 0, - flatbuffers::Offset>> sub_graph_session_states = 0) { +inline ::flatbuffers::Offset CreateDeprecatedSessionState( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset kernels = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> sub_graph_session_states = 0) { DeprecatedSessionStateBuilder builder_(_fbb); builder_.add_sub_graph_session_states(sub_graph_session_states); builder_.add_kernels(kernels); return builder_.Finish(); } -inline flatbuffers::Offset CreateDeprecatedSessionStateDirect( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset kernels = 0, - std::vector> *sub_graph_session_states = nullptr) { +inline ::flatbuffers::Offset CreateDeprecatedSessionStateDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset kernels = 0, + std::vector<::flatbuffers::Offset> *sub_graph_session_states = nullptr) { auto sub_graph_session_states__ = sub_graph_session_states ? _fbb.CreateVectorOfSortedTables(sub_graph_session_states) : 0; return onnxruntime::fbs::CreateDeprecatedSessionState( _fbb, @@ -2726,7 +2722,7 @@ inline flatbuffers::Offset CreateDeprecatedSessionStateD sub_graph_session_states__); } -struct ArgTypeAndIndex FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct ArgTypeAndIndex FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ArgTypeAndIndexBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_ARG_TYPE = 4, @@ -2738,7 +2734,7 @@ struct ArgTypeAndIndex FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { uint32_t index() const { return GetField(VT_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_ARG_TYPE, 1) && VerifyField(verifier, VT_INDEX, 4) && @@ -2748,28 +2744,27 @@ struct ArgTypeAndIndex FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct ArgTypeAndIndexBuilder { typedef ArgTypeAndIndex Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_arg_type(onnxruntime::fbs::ArgType arg_type) { fbb_.AddElement(ArgTypeAndIndex::VT_ARG_TYPE, static_cast(arg_type), 0); } void add_index(uint32_t index) { fbb_.AddElement(ArgTypeAndIndex::VT_INDEX, index, 0); } - explicit ArgTypeAndIndexBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ArgTypeAndIndexBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ArgTypeAndIndexBuilder &operator=(const ArgTypeAndIndexBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateArgTypeAndIndex( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateArgTypeAndIndex( + ::flatbuffers::FlatBufferBuilder &_fbb, onnxruntime::fbs::ArgType arg_type = onnxruntime::fbs::ArgType::INPUT, uint32_t index = 0) { ArgTypeAndIndexBuilder builder_(_fbb); @@ -2778,25 +2773,25 @@ inline flatbuffers::Offset CreateArgTypeAndIndex( return builder_.Finish(); } -struct KernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct KernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef KernelTypeStrArgsEntryBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_KERNEL_TYPE_STR = 4, VT_ARGS = 6 }; - const flatbuffers::String *kernel_type_str() const { - return GetPointer(VT_KERNEL_TYPE_STR); + const ::flatbuffers::String *kernel_type_str() const { + return GetPointer(VT_KERNEL_TYPE_STR); } - bool KeyCompareLessThan(const KernelTypeStrArgsEntry *o) const { + bool KeyCompareLessThan(const KernelTypeStrArgsEntry * const o) const { return *kernel_type_str() < *o->kernel_type_str(); } - int KeyCompareWithValue(const char *val) const { - return strcmp(kernel_type_str()->c_str(), val); + int KeyCompareWithValue(const char *_kernel_type_str) const { + return strcmp(kernel_type_str()->c_str(), _kernel_type_str); } - const flatbuffers::Vector> *args() const { - return GetPointer> *>(VT_ARGS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *args() const { + return GetPointer> *>(VT_ARGS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_KERNEL_TYPE_STR) && verifier.VerifyString(kernel_type_str()) && @@ -2809,68 +2804,67 @@ struct KernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab struct KernelTypeStrArgsEntryBuilder { typedef KernelTypeStrArgsEntry Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_kernel_type_str(flatbuffers::Offset kernel_type_str) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_kernel_type_str(::flatbuffers::Offset<::flatbuffers::String> kernel_type_str) { fbb_.AddOffset(KernelTypeStrArgsEntry::VT_KERNEL_TYPE_STR, kernel_type_str); } - void add_args(flatbuffers::Offset>> args) { + void add_args(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> args) { fbb_.AddOffset(KernelTypeStrArgsEntry::VT_ARGS, args); } - explicit KernelTypeStrArgsEntryBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit KernelTypeStrArgsEntryBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - KernelTypeStrArgsEntryBuilder &operator=(const KernelTypeStrArgsEntryBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); fbb_.Required(o, KernelTypeStrArgsEntry::VT_KERNEL_TYPE_STR); return o; } }; -inline flatbuffers::Offset CreateKernelTypeStrArgsEntry( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset kernel_type_str = 0, - flatbuffers::Offset>> args = 0) { +inline ::flatbuffers::Offset CreateKernelTypeStrArgsEntry( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> kernel_type_str = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> args = 0) { KernelTypeStrArgsEntryBuilder builder_(_fbb); builder_.add_args(args); builder_.add_kernel_type_str(kernel_type_str); return builder_.Finish(); } -inline flatbuffers::Offset CreateKernelTypeStrArgsEntryDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateKernelTypeStrArgsEntryDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *kernel_type_str = nullptr, - const std::vector> *args = nullptr) { + const std::vector<::flatbuffers::Offset> *args = nullptr) { auto kernel_type_str__ = kernel_type_str ? _fbb.CreateString(kernel_type_str) : 0; - auto args__ = args ? _fbb.CreateVector>(*args) : 0; + auto args__ = args ? _fbb.CreateVector<::flatbuffers::Offset>(*args) : 0; return onnxruntime::fbs::CreateKernelTypeStrArgsEntry( _fbb, kernel_type_str__, args__); } -struct OpIdKernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct OpIdKernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef OpIdKernelTypeStrArgsEntryBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_OP_ID = 4, VT_KERNEL_TYPE_STR_ARGS = 6 }; - const flatbuffers::String *op_id() const { - return GetPointer(VT_OP_ID); + const ::flatbuffers::String *op_id() const { + return GetPointer(VT_OP_ID); } - bool KeyCompareLessThan(const OpIdKernelTypeStrArgsEntry *o) const { + bool KeyCompareLessThan(const OpIdKernelTypeStrArgsEntry * const o) const { return *op_id() < *o->op_id(); } - int KeyCompareWithValue(const char *val) const { - return strcmp(op_id()->c_str(), val); + int KeyCompareWithValue(const char *_op_id) const { + return strcmp(op_id()->c_str(), _op_id); } - const flatbuffers::Vector> *kernel_type_str_args() const { - return GetPointer> *>(VT_KERNEL_TYPE_STR_ARGS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *kernel_type_str_args() const { + return GetPointer> *>(VT_KERNEL_TYPE_STR_ARGS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_OP_ID) && verifier.VerifyString(op_id()) && @@ -2883,41 +2877,40 @@ struct OpIdKernelTypeStrArgsEntry FLATBUFFERS_FINAL_CLASS : private flatbuffers: struct OpIdKernelTypeStrArgsEntryBuilder { typedef OpIdKernelTypeStrArgsEntry Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_op_id(flatbuffers::Offset op_id) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_op_id(::flatbuffers::Offset<::flatbuffers::String> op_id) { fbb_.AddOffset(OpIdKernelTypeStrArgsEntry::VT_OP_ID, op_id); } - void add_kernel_type_str_args(flatbuffers::Offset>> kernel_type_str_args) { + void add_kernel_type_str_args(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> kernel_type_str_args) { fbb_.AddOffset(OpIdKernelTypeStrArgsEntry::VT_KERNEL_TYPE_STR_ARGS, kernel_type_str_args); } - explicit OpIdKernelTypeStrArgsEntryBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit OpIdKernelTypeStrArgsEntryBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - OpIdKernelTypeStrArgsEntryBuilder &operator=(const OpIdKernelTypeStrArgsEntryBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); fbb_.Required(o, OpIdKernelTypeStrArgsEntry::VT_OP_ID); return o; } }; -inline flatbuffers::Offset CreateOpIdKernelTypeStrArgsEntry( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset op_id = 0, - flatbuffers::Offset>> kernel_type_str_args = 0) { +inline ::flatbuffers::Offset CreateOpIdKernelTypeStrArgsEntry( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> op_id = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> kernel_type_str_args = 0) { OpIdKernelTypeStrArgsEntryBuilder builder_(_fbb); builder_.add_kernel_type_str_args(kernel_type_str_args); builder_.add_op_id(op_id); return builder_.Finish(); } -inline flatbuffers::Offset CreateOpIdKernelTypeStrArgsEntryDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateOpIdKernelTypeStrArgsEntryDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *op_id = nullptr, - std::vector> *kernel_type_str_args = nullptr) { + std::vector<::flatbuffers::Offset> *kernel_type_str_args = nullptr) { auto op_id__ = op_id ? _fbb.CreateString(op_id) : 0; auto kernel_type_str_args__ = kernel_type_str_args ? _fbb.CreateVectorOfSortedTables(kernel_type_str_args) : 0; return onnxruntime::fbs::CreateOpIdKernelTypeStrArgsEntry( @@ -2926,15 +2919,15 @@ inline flatbuffers::Offset CreateOpIdKernelTypeStrAr kernel_type_str_args__); } -struct KernelTypeStrResolver FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct KernelTypeStrResolver FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef KernelTypeStrResolverBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_OP_KERNEL_TYPE_STR_ARGS = 4 }; - const flatbuffers::Vector> *op_kernel_type_str_args() const { - return GetPointer> *>(VT_OP_KERNEL_TYPE_STR_ARGS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *op_kernel_type_str_args() const { + return GetPointer> *>(VT_OP_KERNEL_TYPE_STR_ARGS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_OP_KERNEL_TYPE_STR_ARGS) && verifier.VerifyVector(op_kernel_type_str_args()) && @@ -2945,49 +2938,48 @@ struct KernelTypeStrResolver FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tabl struct KernelTypeStrResolverBuilder { typedef KernelTypeStrResolver Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_op_kernel_type_str_args(flatbuffers::Offset>> op_kernel_type_str_args) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_op_kernel_type_str_args(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> op_kernel_type_str_args) { fbb_.AddOffset(KernelTypeStrResolver::VT_OP_KERNEL_TYPE_STR_ARGS, op_kernel_type_str_args); } - explicit KernelTypeStrResolverBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit KernelTypeStrResolverBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - KernelTypeStrResolverBuilder &operator=(const KernelTypeStrResolverBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateKernelTypeStrResolver( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> op_kernel_type_str_args = 0) { +inline ::flatbuffers::Offset CreateKernelTypeStrResolver( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> op_kernel_type_str_args = 0) { KernelTypeStrResolverBuilder builder_(_fbb); builder_.add_op_kernel_type_str_args(op_kernel_type_str_args); return builder_.Finish(); } -inline flatbuffers::Offset CreateKernelTypeStrResolverDirect( - flatbuffers::FlatBufferBuilder &_fbb, - std::vector> *op_kernel_type_str_args = nullptr) { +inline ::flatbuffers::Offset CreateKernelTypeStrResolverDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + std::vector<::flatbuffers::Offset> *op_kernel_type_str_args = nullptr) { auto op_kernel_type_str_args__ = op_kernel_type_str_args ? _fbb.CreateVectorOfSortedTables(op_kernel_type_str_args) : 0; return onnxruntime::fbs::CreateKernelTypeStrResolver( _fbb, op_kernel_type_str_args__); } -struct InferenceSession FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct InferenceSession FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef InferenceSessionBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_ORT_VERSION = 4, VT_MODEL = 6, VT_KERNEL_TYPE_STR_RESOLVER = 10 }; - const flatbuffers::String *ort_version() const { - return GetPointer(VT_ORT_VERSION); + const ::flatbuffers::String *ort_version() const { + return GetPointer(VT_ORT_VERSION); } const onnxruntime::fbs::Model *model() const { return GetPointer(VT_MODEL); @@ -2995,7 +2987,7 @@ struct InferenceSession FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::KernelTypeStrResolver *kernel_type_str_resolver() const { return GetPointer(VT_KERNEL_TYPE_STR_RESOLVER); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_ORT_VERSION) && verifier.VerifyString(ort_version()) && @@ -3009,34 +3001,33 @@ struct InferenceSession FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct InferenceSessionBuilder { typedef InferenceSession Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_ort_version(flatbuffers::Offset ort_version) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_ort_version(::flatbuffers::Offset<::flatbuffers::String> ort_version) { fbb_.AddOffset(InferenceSession::VT_ORT_VERSION, ort_version); } - void add_model(flatbuffers::Offset model) { + void add_model(::flatbuffers::Offset model) { fbb_.AddOffset(InferenceSession::VT_MODEL, model); } - void add_kernel_type_str_resolver(flatbuffers::Offset kernel_type_str_resolver) { + void add_kernel_type_str_resolver(::flatbuffers::Offset kernel_type_str_resolver) { fbb_.AddOffset(InferenceSession::VT_KERNEL_TYPE_STR_RESOLVER, kernel_type_str_resolver); } - explicit InferenceSessionBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit InferenceSessionBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - InferenceSessionBuilder &operator=(const InferenceSessionBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateInferenceSession( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset ort_version = 0, - flatbuffers::Offset model = 0, - flatbuffers::Offset kernel_type_str_resolver = 0) { +inline ::flatbuffers::Offset CreateInferenceSession( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> ort_version = 0, + ::flatbuffers::Offset model = 0, + ::flatbuffers::Offset kernel_type_str_resolver = 0) { InferenceSessionBuilder builder_(_fbb); builder_.add_kernel_type_str_resolver(kernel_type_str_resolver); builder_.add_model(model); @@ -3044,11 +3035,11 @@ inline flatbuffers::Offset CreateInferenceSession( return builder_.Finish(); } -inline flatbuffers::Offset CreateInferenceSessionDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateInferenceSessionDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *ort_version = nullptr, - flatbuffers::Offset model = 0, - flatbuffers::Offset kernel_type_str_resolver = 0) { + ::flatbuffers::Offset model = 0, + ::flatbuffers::Offset kernel_type_str_resolver = 0) { auto ort_version__ = ort_version ? _fbb.CreateString(ort_version) : 0; return onnxruntime::fbs::CreateInferenceSession( _fbb, @@ -3057,7 +3048,7 @@ inline flatbuffers::Offset CreateInferenceSessionDirect( kernel_type_str_resolver); } -inline bool VerifyTypeInfoValue(flatbuffers::Verifier &verifier, const void *obj, TypeInfoValue type) { +inline bool VerifyTypeInfoValue(::flatbuffers::Verifier &verifier, const void *obj, TypeInfoValue type) { switch (type) { case TypeInfoValue::NONE: { return true; @@ -3078,10 +3069,10 @@ inline bool VerifyTypeInfoValue(flatbuffers::Verifier &verifier, const void *obj } } -inline bool VerifyTypeInfoValueVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { +inline bool VerifyTypeInfoValueVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types) { if (!values || !types) return !values && !types; if (values->size() != types->size()) return false; - for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { if (!VerifyTypeInfoValue( verifier, values->Get(i), types->GetEnum(i))) { return false; @@ -3091,11 +3082,11 @@ inline bool VerifyTypeInfoValueVector(flatbuffers::Verifier &verifier, const fla } inline const onnxruntime::fbs::InferenceSession *GetInferenceSession(const void *buf) { - return flatbuffers::GetRoot(buf); + return ::flatbuffers::GetRoot(buf); } inline const onnxruntime::fbs::InferenceSession *GetSizePrefixedInferenceSession(const void *buf) { - return flatbuffers::GetSizePrefixedRoot(buf); + return ::flatbuffers::GetSizePrefixedRoot(buf); } inline const char *InferenceSessionIdentifier() { @@ -3103,29 +3094,34 @@ inline const char *InferenceSessionIdentifier() { } inline bool InferenceSessionBufferHasIdentifier(const void *buf) { - return flatbuffers::BufferHasIdentifier( + return ::flatbuffers::BufferHasIdentifier( buf, InferenceSessionIdentifier()); } +inline bool SizePrefixedInferenceSessionBufferHasIdentifier(const void *buf) { + return ::flatbuffers::BufferHasIdentifier( + buf, InferenceSessionIdentifier(), true); +} + inline bool VerifyInferenceSessionBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifyBuffer(InferenceSessionIdentifier()); } inline bool VerifySizePrefixedInferenceSessionBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifySizePrefixedBuffer(InferenceSessionIdentifier()); } inline void FinishInferenceSessionBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.Finish(root, InferenceSessionIdentifier()); } inline void FinishSizePrefixedInferenceSessionBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.FinishSizePrefixed(root, InferenceSessionIdentifier()); } diff --git a/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs b/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs index 94757fa6d5bf5..811bd3c04585e 100644 --- a/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs +++ b/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs @@ -12,6 +12,8 @@ table ModuleState { // are empty. i.e. The tensors are treated as named entities // without any meaningful data. is_nominal_state:bool; + // Tensors use external data file + has_external_data:bool; } table ParameterOptimizerState { diff --git a/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs.h b/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs.h index 62e6cf74394e5..4b2c950d03363 100644 --- a/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs.h +++ b/onnxruntime/core/flatbuffers/schema/ort_training_checkpoint.fbs.h @@ -6,6 +6,13 @@ #include "core/common/flatbuffers.h" +// Ensure the included flatbuffers.h is the same version as when this file was +// generated, otherwise it may not be compatible. +static_assert(FLATBUFFERS_VERSION_MAJOR == 23 && + FLATBUFFERS_VERSION_MINOR == 5 && + FLATBUFFERS_VERSION_REVISION == 26, + "Non-compatible flatbuffers version included"); + #include "ort.fbs.h" namespace onnxruntime { @@ -35,23 +42,27 @@ struct PropertyBagBuilder; struct Checkpoint; struct CheckpointBuilder; -struct ModuleState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct ModuleState FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ModuleStateBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_REQUIRES_GRAD_PARAMS = 4, VT_FROZEN_PARAMS = 6, - VT_IS_NOMINAL_STATE = 8 + VT_IS_NOMINAL_STATE = 8, + VT_HAS_EXTERNAL_DATA = 10 }; - const flatbuffers::Vector> *requires_grad_params() const { - return GetPointer> *>(VT_REQUIRES_GRAD_PARAMS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *requires_grad_params() const { + return GetPointer> *>(VT_REQUIRES_GRAD_PARAMS); } - const flatbuffers::Vector> *frozen_params() const { - return GetPointer> *>(VT_FROZEN_PARAMS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *frozen_params() const { + return GetPointer> *>(VT_FROZEN_PARAMS); } bool is_nominal_state() const { return GetField(VT_IS_NOMINAL_STATE, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool has_external_data() const { + return GetField(VT_HAS_EXTERNAL_DATA, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_REQUIRES_GRAD_PARAMS) && verifier.VerifyVector(requires_grad_params()) && @@ -60,74 +71,81 @@ struct ModuleState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { verifier.VerifyVector(frozen_params()) && verifier.VerifyVectorOfTables(frozen_params()) && VerifyField(verifier, VT_IS_NOMINAL_STATE, 1) && + VerifyField(verifier, VT_HAS_EXTERNAL_DATA, 1) && verifier.EndTable(); } }; struct ModuleStateBuilder { typedef ModuleState Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_requires_grad_params(flatbuffers::Offset>> requires_grad_params) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_requires_grad_params(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> requires_grad_params) { fbb_.AddOffset(ModuleState::VT_REQUIRES_GRAD_PARAMS, requires_grad_params); } - void add_frozen_params(flatbuffers::Offset>> frozen_params) { + void add_frozen_params(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> frozen_params) { fbb_.AddOffset(ModuleState::VT_FROZEN_PARAMS, frozen_params); } void add_is_nominal_state(bool is_nominal_state) { fbb_.AddElement(ModuleState::VT_IS_NOMINAL_STATE, static_cast(is_nominal_state), 0); } - explicit ModuleStateBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_has_external_data(bool has_external_data) { + fbb_.AddElement(ModuleState::VT_HAS_EXTERNAL_DATA, static_cast(has_external_data), 0); + } + explicit ModuleStateBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ModuleStateBuilder &operator=(const ModuleStateBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateModuleState( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> requires_grad_params = 0, - flatbuffers::Offset>> frozen_params = 0, - bool is_nominal_state = false) { +inline ::flatbuffers::Offset CreateModuleState( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> requires_grad_params = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> frozen_params = 0, + bool is_nominal_state = false, + bool has_external_data = false) { ModuleStateBuilder builder_(_fbb); builder_.add_frozen_params(frozen_params); builder_.add_requires_grad_params(requires_grad_params); + builder_.add_has_external_data(has_external_data); builder_.add_is_nominal_state(is_nominal_state); return builder_.Finish(); } -inline flatbuffers::Offset CreateModuleStateDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *requires_grad_params = nullptr, - const std::vector> *frozen_params = nullptr, - bool is_nominal_state = false) { - auto requires_grad_params__ = requires_grad_params ? _fbb.CreateVector>(*requires_grad_params) : 0; - auto frozen_params__ = frozen_params ? _fbb.CreateVector>(*frozen_params) : 0; +inline ::flatbuffers::Offset CreateModuleStateDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *requires_grad_params = nullptr, + const std::vector<::flatbuffers::Offset> *frozen_params = nullptr, + bool is_nominal_state = false, + bool has_external_data = false) { + auto requires_grad_params__ = requires_grad_params ? _fbb.CreateVector<::flatbuffers::Offset>(*requires_grad_params) : 0; + auto frozen_params__ = frozen_params ? _fbb.CreateVector<::flatbuffers::Offset>(*frozen_params) : 0; return onnxruntime::fbs::CreateModuleState( _fbb, requires_grad_params__, frozen_params__, - is_nominal_state); + is_nominal_state, + has_external_data); } -struct ParameterOptimizerState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct ParameterOptimizerState FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef ParameterOptimizerStateBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_PARAM_NAME = 4, VT_MOMENTUMS = 6 }; - const flatbuffers::String *param_name() const { - return GetPointer(VT_PARAM_NAME); + const ::flatbuffers::String *param_name() const { + return GetPointer(VT_PARAM_NAME); } - const flatbuffers::Vector> *momentums() const { - return GetPointer> *>(VT_MOMENTUMS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *momentums() const { + return GetPointer> *>(VT_MOMENTUMS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_PARAM_NAME) && verifier.VerifyString(param_name()) && @@ -140,49 +158,48 @@ struct ParameterOptimizerState FLATBUFFERS_FINAL_CLASS : private flatbuffers::Ta struct ParameterOptimizerStateBuilder { typedef ParameterOptimizerState Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_param_name(flatbuffers::Offset param_name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_param_name(::flatbuffers::Offset<::flatbuffers::String> param_name) { fbb_.AddOffset(ParameterOptimizerState::VT_PARAM_NAME, param_name); } - void add_momentums(flatbuffers::Offset>> momentums) { + void add_momentums(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> momentums) { fbb_.AddOffset(ParameterOptimizerState::VT_MOMENTUMS, momentums); } - explicit ParameterOptimizerStateBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ParameterOptimizerStateBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - ParameterOptimizerStateBuilder &operator=(const ParameterOptimizerStateBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateParameterOptimizerState( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset param_name = 0, - flatbuffers::Offset>> momentums = 0) { +inline ::flatbuffers::Offset CreateParameterOptimizerState( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> param_name = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> momentums = 0) { ParameterOptimizerStateBuilder builder_(_fbb); builder_.add_momentums(momentums); builder_.add_param_name(param_name); return builder_.Finish(); } -inline flatbuffers::Offset CreateParameterOptimizerStateDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateParameterOptimizerStateDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *param_name = nullptr, - const std::vector> *momentums = nullptr) { + const std::vector<::flatbuffers::Offset> *momentums = nullptr) { auto param_name__ = param_name ? _fbb.CreateString(param_name) : 0; - auto momentums__ = momentums ? _fbb.CreateVector>(*momentums) : 0; + auto momentums__ = momentums ? _fbb.CreateVector<::flatbuffers::Offset>(*momentums) : 0; return onnxruntime::fbs::CreateParameterOptimizerState( _fbb, param_name__, momentums__); } -struct OptimizerGroup FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct OptimizerGroup FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef OptimizerGroupBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_GROUP_NAME = 4, @@ -190,8 +207,8 @@ struct OptimizerGroup FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_INITIAL_LEARNING_RATE = 8, VT_OPTIMIZER_STATES = 10 }; - const flatbuffers::String *group_name() const { - return GetPointer(VT_GROUP_NAME); + const ::flatbuffers::String *group_name() const { + return GetPointer(VT_GROUP_NAME); } int64_t step() const { return GetField(VT_STEP, 0); @@ -199,10 +216,10 @@ struct OptimizerGroup FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { float initial_learning_rate() const { return GetField(VT_INITIAL_LEARNING_RATE, 0.0f); } - const flatbuffers::Vector> *optimizer_states() const { - return GetPointer> *>(VT_OPTIMIZER_STATES); + const ::flatbuffers::Vector<::flatbuffers::Offset> *optimizer_states() const { + return GetPointer> *>(VT_OPTIMIZER_STATES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_GROUP_NAME) && verifier.VerifyString(group_name()) && @@ -217,9 +234,9 @@ struct OptimizerGroup FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct OptimizerGroupBuilder { typedef OptimizerGroup Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_group_name(flatbuffers::Offset group_name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_group_name(::flatbuffers::Offset<::flatbuffers::String> group_name) { fbb_.AddOffset(OptimizerGroup::VT_GROUP_NAME, group_name); } void add_step(int64_t step) { @@ -228,27 +245,26 @@ struct OptimizerGroupBuilder { void add_initial_learning_rate(float initial_learning_rate) { fbb_.AddElement(OptimizerGroup::VT_INITIAL_LEARNING_RATE, initial_learning_rate, 0.0f); } - void add_optimizer_states(flatbuffers::Offset>> optimizer_states) { + void add_optimizer_states(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> optimizer_states) { fbb_.AddOffset(OptimizerGroup::VT_OPTIMIZER_STATES, optimizer_states); } - explicit OptimizerGroupBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit OptimizerGroupBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - OptimizerGroupBuilder &operator=(const OptimizerGroupBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateOptimizerGroup( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset group_name = 0, +inline ::flatbuffers::Offset CreateOptimizerGroup( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> group_name = 0, int64_t step = 0, float initial_learning_rate = 0.0f, - flatbuffers::Offset>> optimizer_states = 0) { + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> optimizer_states = 0) { OptimizerGroupBuilder builder_(_fbb); builder_.add_step(step); builder_.add_optimizer_states(optimizer_states); @@ -257,14 +273,14 @@ inline flatbuffers::Offset CreateOptimizerGroup( return builder_.Finish(); } -inline flatbuffers::Offset CreateOptimizerGroupDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateOptimizerGroupDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *group_name = nullptr, int64_t step = 0, float initial_learning_rate = 0.0f, - const std::vector> *optimizer_states = nullptr) { + const std::vector<::flatbuffers::Offset> *optimizer_states = nullptr) { auto group_name__ = group_name ? _fbb.CreateString(group_name) : 0; - auto optimizer_states__ = optimizer_states ? _fbb.CreateVector>(*optimizer_states) : 0; + auto optimizer_states__ = optimizer_states ? _fbb.CreateVector<::flatbuffers::Offset>(*optimizer_states) : 0; return onnxruntime::fbs::CreateOptimizerGroup( _fbb, group_name__, @@ -273,19 +289,19 @@ inline flatbuffers::Offset CreateOptimizerGroupDirect( optimizer_states__); } -struct IntProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct IntProperty FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef IntPropertyBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, VT_VALUE = 6 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } int64_t value() const { return GetField(VT_VALUE, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -296,29 +312,28 @@ struct IntProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct IntPropertyBuilder { typedef IntProperty Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(IntProperty::VT_NAME, name); } void add_value(int64_t value) { fbb_.AddElement(IntProperty::VT_VALUE, value, 0); } - explicit IntPropertyBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit IntPropertyBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - IntPropertyBuilder &operator=(const IntPropertyBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateIntProperty( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, +inline ::flatbuffers::Offset CreateIntProperty( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, int64_t value = 0) { IntPropertyBuilder builder_(_fbb); builder_.add_value(value); @@ -326,8 +341,8 @@ inline flatbuffers::Offset CreateIntProperty( return builder_.Finish(); } -inline flatbuffers::Offset CreateIntPropertyDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateIntPropertyDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, int64_t value = 0) { auto name__ = name ? _fbb.CreateString(name) : 0; @@ -337,19 +352,19 @@ inline flatbuffers::Offset CreateIntPropertyDirect( value); } -struct FloatProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct FloatProperty FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef FloatPropertyBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, VT_VALUE = 6 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } float value() const { return GetField(VT_VALUE, 0.0f); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -360,29 +375,28 @@ struct FloatProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct FloatPropertyBuilder { typedef FloatProperty Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(FloatProperty::VT_NAME, name); } void add_value(float value) { fbb_.AddElement(FloatProperty::VT_VALUE, value, 0.0f); } - explicit FloatPropertyBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit FloatPropertyBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - FloatPropertyBuilder &operator=(const FloatPropertyBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFloatProperty( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, +inline ::flatbuffers::Offset CreateFloatProperty( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, float value = 0.0f) { FloatPropertyBuilder builder_(_fbb); builder_.add_value(value); @@ -390,8 +404,8 @@ inline flatbuffers::Offset CreateFloatProperty( return builder_.Finish(); } -inline flatbuffers::Offset CreateFloatPropertyDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateFloatPropertyDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, float value = 0.0f) { auto name__ = name ? _fbb.CreateString(name) : 0; @@ -401,19 +415,19 @@ inline flatbuffers::Offset CreateFloatPropertyDirect( value); } -struct StringProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct StringProperty FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef StringPropertyBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_NAME = 4, VT_VALUE = 6 }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const flatbuffers::String *value() const { - return GetPointer(VT_VALUE); + const ::flatbuffers::String *value() const { + return GetPointer(VT_VALUE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NAME) && verifier.VerifyString(name()) && @@ -425,38 +439,37 @@ struct StringProperty FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct StringPropertyBuilder { typedef StringProperty Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(StringProperty::VT_NAME, name); } - void add_value(flatbuffers::Offset value) { + void add_value(::flatbuffers::Offset<::flatbuffers::String> value) { fbb_.AddOffset(StringProperty::VT_VALUE, value); } - explicit StringPropertyBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StringPropertyBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - StringPropertyBuilder &operator=(const StringPropertyBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateStringProperty( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - flatbuffers::Offset value = 0) { +inline ::flatbuffers::Offset CreateStringProperty( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset<::flatbuffers::String> value = 0) { StringPropertyBuilder builder_(_fbb); builder_.add_value(value); builder_.add_name(name); return builder_.Finish(); } -inline flatbuffers::Offset CreateStringPropertyDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateStringPropertyDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const char *value = nullptr) { auto name__ = name ? _fbb.CreateString(name) : 0; @@ -467,23 +480,23 @@ inline flatbuffers::Offset CreateStringPropertyDirect( value__); } -struct PropertyBag FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct PropertyBag FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef PropertyBagBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_INTS = 4, VT_FLOATS = 6, VT_STRINGS = 8 }; - const flatbuffers::Vector> *ints() const { - return GetPointer> *>(VT_INTS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *ints() const { + return GetPointer> *>(VT_INTS); } - const flatbuffers::Vector> *floats() const { - return GetPointer> *>(VT_FLOATS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *floats() const { + return GetPointer> *>(VT_FLOATS); } - const flatbuffers::Vector> *strings() const { - return GetPointer> *>(VT_STRINGS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *strings() const { + return GetPointer> *>(VT_STRINGS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_INTS) && verifier.VerifyVector(ints()) && @@ -500,34 +513,33 @@ struct PropertyBag FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct PropertyBagBuilder { typedef PropertyBag Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_ints(flatbuffers::Offset>> ints) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_ints(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> ints) { fbb_.AddOffset(PropertyBag::VT_INTS, ints); } - void add_floats(flatbuffers::Offset>> floats) { + void add_floats(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> floats) { fbb_.AddOffset(PropertyBag::VT_FLOATS, floats); } - void add_strings(flatbuffers::Offset>> strings) { + void add_strings(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> strings) { fbb_.AddOffset(PropertyBag::VT_STRINGS, strings); } - explicit PropertyBagBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit PropertyBagBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - PropertyBagBuilder &operator=(const PropertyBagBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePropertyBag( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> ints = 0, - flatbuffers::Offset>> floats = 0, - flatbuffers::Offset>> strings = 0) { +inline ::flatbuffers::Offset CreatePropertyBag( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> ints = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> floats = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> strings = 0) { PropertyBagBuilder builder_(_fbb); builder_.add_strings(strings); builder_.add_floats(floats); @@ -535,14 +547,14 @@ inline flatbuffers::Offset CreatePropertyBag( return builder_.Finish(); } -inline flatbuffers::Offset CreatePropertyBagDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *ints = nullptr, - const std::vector> *floats = nullptr, - const std::vector> *strings = nullptr) { - auto ints__ = ints ? _fbb.CreateVector>(*ints) : 0; - auto floats__ = floats ? _fbb.CreateVector>(*floats) : 0; - auto strings__ = strings ? _fbb.CreateVector>(*strings) : 0; +inline ::flatbuffers::Offset CreatePropertyBagDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *ints = nullptr, + const std::vector<::flatbuffers::Offset> *floats = nullptr, + const std::vector<::flatbuffers::Offset> *strings = nullptr) { + auto ints__ = ints ? _fbb.CreateVector<::flatbuffers::Offset>(*ints) : 0; + auto floats__ = floats ? _fbb.CreateVector<::flatbuffers::Offset>(*floats) : 0; + auto strings__ = strings ? _fbb.CreateVector<::flatbuffers::Offset>(*strings) : 0; return onnxruntime::fbs::CreatePropertyBag( _fbb, ints__, @@ -550,7 +562,7 @@ inline flatbuffers::Offset CreatePropertyBagDirect( strings__); } -struct Checkpoint FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Checkpoint FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef CheckpointBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VERSION = 4, @@ -564,13 +576,13 @@ struct Checkpoint FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const onnxruntime::fbs::ModuleState *module_state() const { return GetPointer(VT_MODULE_STATE); } - const flatbuffers::Vector> *optimizer_groups() const { - return GetPointer> *>(VT_OPTIMIZER_GROUPS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *optimizer_groups() const { + return GetPointer> *>(VT_OPTIMIZER_GROUPS); } const onnxruntime::fbs::PropertyBag *property_bag() const { return GetPointer(VT_PROPERTY_BAG); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_VERSION, 4) && VerifyOffset(verifier, VT_MODULE_STATE) && @@ -586,38 +598,37 @@ struct Checkpoint FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { struct CheckpointBuilder { typedef Checkpoint Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_version(int32_t version) { fbb_.AddElement(Checkpoint::VT_VERSION, version, 0); } - void add_module_state(flatbuffers::Offset module_state) { + void add_module_state(::flatbuffers::Offset module_state) { fbb_.AddOffset(Checkpoint::VT_MODULE_STATE, module_state); } - void add_optimizer_groups(flatbuffers::Offset>> optimizer_groups) { + void add_optimizer_groups(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> optimizer_groups) { fbb_.AddOffset(Checkpoint::VT_OPTIMIZER_GROUPS, optimizer_groups); } - void add_property_bag(flatbuffers::Offset property_bag) { + void add_property_bag(::flatbuffers::Offset property_bag) { fbb_.AddOffset(Checkpoint::VT_PROPERTY_BAG, property_bag); } - explicit CheckpointBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit CheckpointBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - CheckpointBuilder &operator=(const CheckpointBuilder &); - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCheckpoint( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateCheckpoint( + ::flatbuffers::FlatBufferBuilder &_fbb, int32_t version = 0, - flatbuffers::Offset module_state = 0, - flatbuffers::Offset>> optimizer_groups = 0, - flatbuffers::Offset property_bag = 0) { + ::flatbuffers::Offset module_state = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> optimizer_groups = 0, + ::flatbuffers::Offset property_bag = 0) { CheckpointBuilder builder_(_fbb); builder_.add_property_bag(property_bag); builder_.add_optimizer_groups(optimizer_groups); @@ -626,13 +637,13 @@ inline flatbuffers::Offset CreateCheckpoint( return builder_.Finish(); } -inline flatbuffers::Offset CreateCheckpointDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateCheckpointDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, int32_t version = 0, - flatbuffers::Offset module_state = 0, - const std::vector> *optimizer_groups = nullptr, - flatbuffers::Offset property_bag = 0) { - auto optimizer_groups__ = optimizer_groups ? _fbb.CreateVector>(*optimizer_groups) : 0; + ::flatbuffers::Offset module_state = 0, + const std::vector<::flatbuffers::Offset> *optimizer_groups = nullptr, + ::flatbuffers::Offset property_bag = 0) { + auto optimizer_groups__ = optimizer_groups ? _fbb.CreateVector<::flatbuffers::Offset>(*optimizer_groups) : 0; return onnxruntime::fbs::CreateCheckpoint( _fbb, version, @@ -642,11 +653,11 @@ inline flatbuffers::Offset CreateCheckpointDirect( } inline const onnxruntime::fbs::Checkpoint *GetCheckpoint(const void *buf) { - return flatbuffers::GetRoot(buf); + return ::flatbuffers::GetRoot(buf); } inline const onnxruntime::fbs::Checkpoint *GetSizePrefixedCheckpoint(const void *buf) { - return flatbuffers::GetSizePrefixedRoot(buf); + return ::flatbuffers::GetSizePrefixedRoot(buf); } inline const char *CheckpointIdentifier() { @@ -654,29 +665,34 @@ inline const char *CheckpointIdentifier() { } inline bool CheckpointBufferHasIdentifier(const void *buf) { - return flatbuffers::BufferHasIdentifier( + return ::flatbuffers::BufferHasIdentifier( buf, CheckpointIdentifier()); } +inline bool SizePrefixedCheckpointBufferHasIdentifier(const void *buf) { + return ::flatbuffers::BufferHasIdentifier( + buf, CheckpointIdentifier(), true); +} + inline bool VerifyCheckpointBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifyBuffer(CheckpointIdentifier()); } inline bool VerifySizePrefixedCheckpointBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifySizePrefixedBuffer(CheckpointIdentifier()); } inline void FinishCheckpointBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.Finish(root, CheckpointIdentifier()); } inline void FinishSizePrefixedCheckpointBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.FinishSizePrefixed(root, CheckpointIdentifier()); } diff --git a/onnxruntime/core/graph/graph_flatbuffers_utils.cc b/onnxruntime/core/graph/graph_flatbuffers_utils.cc index 2314a5228f83c..7dfdba687517f 100644 --- a/onnxruntime/core/graph/graph_flatbuffers_utils.cc +++ b/onnxruntime/core/graph/graph_flatbuffers_utils.cc @@ -29,25 +29,37 @@ SaveDims(flatbuffers::FlatBufferBuilder& builder, const DimsFieldType& dims) { Status SaveInitializerOrtFormat(flatbuffers::FlatBufferBuilder& builder, const TensorProto& initializer, const Path& model_path, - flatbuffers::Offset& fbs_tensor) { + flatbuffers::Offset& fbs_tensor, + const ExternalDataWriter& external_writer) { auto name = SaveStringToOrtFormat(builder, initializer.has_name(), initializer.name()); auto doc_string = SaveStringToOrtFormat(builder, initializer.has_doc_string(), initializer.doc_string()); auto dims = SaveDims(builder, initializer.dims()); + // we have to populate string_data or raw_data prior to creating the TensorBuilder instance to avoid vtable offset + // issues. flatbuffers::Offset>> string_data; flatbuffers::Offset> raw_data; + int64_t external_data_offset = -1; auto src_type = initializer.data_type(); const bool has_string_data = src_type == ONNX_NAMESPACE::TensorProto_DataType_STRING; + if (has_string_data) { std::vector string_data_vec(initializer.string_data().size()); std::copy(initializer.string_data().cbegin(), initializer.string_data().cend(), string_data_vec.begin()); string_data = builder.CreateVectorOfStrings(string_data_vec); } else { std::vector unpacked_tensor; - ORT_RETURN_IF_ERROR( - onnxruntime::utils::UnpackInitializerData(initializer, model_path, unpacked_tensor)); - raw_data = builder.CreateVector(unpacked_tensor.data(), unpacked_tensor.size()); + ORT_RETURN_IF_ERROR(onnxruntime::utils::UnpackInitializerData(initializer, model_path, unpacked_tensor)); + + if (external_writer && unpacked_tensor.size() >= kMinimumSizeForExternalData) { + // write bytes to external buffer/file and record offset for the start of the data + uint64_t offset = 0; + ORT_RETURN_IF_ERROR(external_writer(src_type, unpacked_tensor, offset)); + external_data_offset = onnxruntime::narrow(offset); // offset in fb is int64_t so -1 can mark not in use + } else { + raw_data = builder.CreateVector(unpacked_tensor.data(), unpacked_tensor.size()); + } } fbs::TensorBuilder tb(builder); @@ -55,11 +67,18 @@ Status SaveInitializerOrtFormat(flatbuffers::FlatBufferBuilder& builder, tb.add_doc_string(doc_string); tb.add_dims(dims); tb.add_data_type(static_cast(src_type)); - if (has_string_data) + + if (has_string_data) { tb.add_string_data(string_data); - else - tb.add_raw_data(raw_data); + } else { + if (external_data_offset >= 0) { + tb.add_external_data_offset(external_data_offset); + } else { + tb.add_raw_data(raw_data); + } + } fbs_tensor = tb.Finish(); + return Status::OK(); } @@ -176,8 +195,88 @@ Status SaveAttributeOrtFormat(flatbuffers::FlatBufferBuilder& builder, #endif +/** + * @brief Calculates how much memory will be required for putting contents of the given tensor into a plain array. + * + * complex64/complex128 tensors are not supported. The size is calculated from the dimensions and the data type, + * to accommodate fbs::Tensors with external data. + * + * @param tensor flatbuffer representation of a tensor. + * @return size_t size in bytes of the tensor's data. + */ +size_t GetSizeInBytesFromFbsTensor(const fbs::Tensor& tensor) { + auto fbs_dims = tensor.dims(); + + auto num_elements = std::accumulate(fbs_dims->cbegin(), fbs_dims->cend(), SafeInt(1), + std::multiplies<>()); + + size_t byte_size_of_one_element; + + switch (tensor.data_type()) { + case fbs::TensorDataType::FLOAT: + byte_size_of_one_element = sizeof(float); + break; + case fbs::TensorDataType::UINT8: + byte_size_of_one_element = sizeof(uint8_t); + break; + case fbs::TensorDataType::INT8: + byte_size_of_one_element = sizeof(int8_t); + break; + case fbs::TensorDataType::UINT16: + byte_size_of_one_element = sizeof(uint16_t); + break; + case fbs::TensorDataType::INT16: + byte_size_of_one_element = sizeof(int16_t); + break; + case fbs::TensorDataType::INT32: + byte_size_of_one_element = sizeof(int32_t); + break; + case fbs::TensorDataType::INT64: + byte_size_of_one_element = sizeof(int64_t); + break; + case fbs::TensorDataType::BOOL: + byte_size_of_one_element = sizeof(bool); + break; + case fbs::TensorDataType::FLOAT16: + byte_size_of_one_element = sizeof(MLFloat16); + break; + case fbs::TensorDataType::DOUBLE: + byte_size_of_one_element = sizeof(double); + break; + case fbs::TensorDataType::UINT32: + byte_size_of_one_element = sizeof(uint32_t); + break; + case fbs::TensorDataType::UINT64: + byte_size_of_one_element = sizeof(uint64_t); + break; + case fbs::TensorDataType::BFLOAT16: + byte_size_of_one_element = sizeof(BFloat16); + break; +#if !defined(DISABLE_FLOAT8_TYPES) + case fbs::TensorDataType::FLOAT8E4M3FN: + byte_size_of_one_element = sizeof(uint8_t); + break; + case fbs::TensorDataType::FLOAT8E4M3FNUZ: + byte_size_of_one_element = sizeof(uint8_t); + break; + case fbs::TensorDataType::FLOAT8E5M2: + byte_size_of_one_element = sizeof(uint8_t); + break; + case fbs::TensorDataType::FLOAT8E5M2FNUZ: + byte_size_of_one_element = sizeof(uint8_t); + break; +#endif + case fbs::TensorDataType::STRING: + ORT_THROW("String data type is not supported for on-device training", tensor.name()); + default: + ORT_THROW("Unsupported tensor data type for tensor ", tensor.name()); + } + return num_elements * byte_size_of_one_element; +} + Status LoadInitializerOrtFormat(const fbs::Tensor& fbs_tensor, TensorProto& initializer, - const OrtFormatLoadOptions& load_options) { + const OrtFormatLoadOptions& load_options, + const ExternalDataReader& external_data_reader) { initializer.Clear(); LOAD_STR_FROM_ORT_FORMAT(initializer, name, fbs_tensor.name()); @@ -186,9 +285,9 @@ Status LoadInitializerOrtFormat(const fbs::Tensor& fbs_tensor, TensorProto& init auto fbs_dims = fbs_tensor.dims(); ORT_RETURN_IF(nullptr == fbs_dims, "Missing dimensions for initializer. Invalid ORT format model."); initializer.mutable_dims()->Add(fbs_dims->cbegin(), fbs_dims->cend()); - auto fbs_data_type = fbs_tensor.data_type(); initializer.set_data_type(static_cast(fbs_data_type)); + if (fbs_data_type == fbs::TensorDataType::STRING) { auto fbs_str_data = fbs_tensor.string_data(); ORT_RETURN_IF(nullptr == fbs_str_data, "Missing string data for initializer. Invalid ORT format model."); @@ -199,30 +298,49 @@ Status LoadInitializerOrtFormat(const fbs::Tensor& fbs_tensor, TensorProto& init } } else { const auto* fbs_raw_data = fbs_tensor.raw_data(); - ORT_RETURN_IF(nullptr == fbs_raw_data, "Missing raw data for initializer. Invalid ORT format model."); - - if (load_options.can_use_flatbuffer_for_initializers && fbs_raw_data->size() > 127) { - initializer.set_data_location(ONNX_NAMESPACE::TensorProto_DataLocation_EXTERNAL); - - static_assert(sizeof(void*) <= sizeof(ExternalDataInfo::OFFSET_TYPE)); - const void* data_offset = fbs_raw_data->Data(); - // we reinterpret_cast this back to void* in tensorprotoutils.cc:GetExtDataFromTensorProto. - // use intptr_t as OFFSET_TYPE is signed. in theory you could get a weird looking value if the address uses the - // high bit, but that should be unlikely in a scenario where we care about memory usage enough to use this path. - auto offset = narrow(reinterpret_cast(data_offset)); - - ONNX_NAMESPACE::StringStringEntryProto* entry = initializer.mutable_external_data()->Add(); - entry->set_key("location"); - entry->set_value(ToUTF8String(onnxruntime::utils::kTensorProtoMemoryAddressTag)); - entry = initializer.mutable_external_data()->Add(); - entry->set_key("offset"); - entry->set_value(std::to_string(offset)); - entry = initializer.mutable_external_data()->Add(); - entry->set_key("length"); - entry->set_value(std::to_string(fbs_raw_data->size())); + if (fbs_raw_data) { + if (load_options.can_use_flatbuffer_for_initializers && fbs_raw_data->size() > 127) { + initializer.set_data_location(ONNX_NAMESPACE::TensorProto_DataLocation_EXTERNAL); + + static_assert(sizeof(void*) <= sizeof(ExternalDataInfo::OFFSET_TYPE)); + const void* data_offset = fbs_raw_data->Data(); + // we reinterpret_cast this back to void* in tensorprotoutils.cc:GetExtDataFromTensorProto. + // use intptr_t as OFFSET_TYPE is signed. in theory you could get a weird looking value if the address uses the + // high bit, but that should be unlikely in a scenario where we care about memory usage enough to use this path. + auto offset = narrow(reinterpret_cast(data_offset)); + + ONNX_NAMESPACE::StringStringEntryProto* entry = initializer.mutable_external_data()->Add(); + entry->set_key("location"); + entry->set_value(ToUTF8String(onnxruntime::utils::kTensorProtoMemoryAddressTag)); + entry = initializer.mutable_external_data()->Add(); + entry->set_key("offset"); + entry->set_value(std::to_string(offset)); + entry = initializer.mutable_external_data()->Add(); + entry->set_key("length"); + entry->set_value(std::to_string(fbs_raw_data->size())); + } else { + // fbs_raw_data is uint8_t vector, so the size is byte size + initializer.set_raw_data(fbs_raw_data->Data(), fbs_raw_data->size()); + } } else { - // fbs_raw_data is uint8_t vector, so the size is byte size - initializer.set_raw_data(fbs_raw_data->Data(), fbs_raw_data->size()); + auto external_data_offset = fbs_tensor.external_data_offset(); + + // no external data. should have had raw data. + ORT_RETURN_IF(external_data_offset < 0, "Missing raw data for initializer. Invalid ORT format model."); + + // external data but no reader + ORT_RETURN_IF(!external_data_reader, "Tensor has external data but a data reader was not provided."); + + // FUTURE: This could be setup similarly to can_use_flatbuffer_for_initializers above if the external data file + // is memory mapped and guaranteed to remain valid. This would avoid the copy. + auto num_bytes = GetSizeInBytesFromFbsTensor(fbs_tensor); + + // pre-allocate so we can write directly to the string buffer + std::string& raw_data = *initializer.mutable_raw_data(); + raw_data.resize(num_bytes); + auto output_buffer = gsl::make_span(reinterpret_cast(raw_data.data()), num_bytes); + + ORT_RETURN_IF_ERROR(external_data_reader(external_data_offset, output_buffer)); } } @@ -344,22 +462,35 @@ Status LoadAttributeOrtFormat(const fbs::Attribute& fbs_attr, Status SaveOrtTensorOrtFormat( const std::string& tensor_name, const onnxruntime::Tensor& ort_tensor, flatbuffers::FlatBufferBuilder& builder, - flatbuffers::Offset& fbs_tensor) { + flatbuffers::Offset& fbs_tensor, + ExternalDataWriter external_data_writer) { ORT_RETURN_IF(ort_tensor.IsDataTypeString(), "TensorProto_DataType_STRING is not supported while saving a tensor to ORT format."); const auto fbs_tensor_name = builder.CreateString(tensor_name); const auto fbs_tensor_dims = SaveDims(builder, ort_tensor.Shape().GetDims()); - flatbuffers::Offset> raw_data = builder.CreateVector( - static_cast(ort_tensor.DataRaw()), - ort_tensor.SizeInBytes()); + // To avoid issues with vtable offsets, raw_data fbs::vector must be constructed before the TensorBuilder begins + // building the tensor. See flatbuffer_builder.h's NotNested() function for more details. + flatbuffers::Offset> raw_data; + if (!external_data_writer) { + raw_data = builder.CreateVector(static_cast(ort_tensor.DataRaw()), + ort_tensor.SizeInBytes()); + } fbs::TensorBuilder tb(builder); tb.add_name(fbs_tensor_name); tb.add_doc_string(0); tb.add_dims(fbs_tensor_dims); tb.add_data_type(static_cast(ort_tensor.GetElementType())); - tb.add_raw_data(raw_data); + if (external_data_writer) { + uint64_t offset = 0; + gsl::span ort_tensor_data_span(static_cast(ort_tensor.DataRaw()), ort_tensor.SizeInBytes()); + ORT_RETURN_IF_ERROR(external_data_writer(ort_tensor.GetElementType(), ort_tensor_data_span, offset)); + int64_t external_data_offset = onnxruntime::narrow(offset); + tb.add_external_data_offset(external_data_offset); + } else { + tb.add_raw_data(raw_data); + } fbs_tensor = tb.Finish(); return Status::OK(); } @@ -367,17 +498,42 @@ Status SaveOrtTensorOrtFormat( template struct UnpackTensorWithType { Status operator()(const ONNX_NAMESPACE::TensorProto& tensor_proto, const fbs::Tensor& fbs_tensor, - onnxruntime::Tensor& ort_tensor) const { - return onnxruntime::utils::UnpackTensor( - tensor_proto, fbs_tensor.raw_data()->Data(), - fbs_tensor.raw_data()->size(), - ort_tensor.MutableData(), - static_cast(ort_tensor.Shape().Size())); + onnxruntime::Tensor& ort_tensor, const ExternalDataReader& external_data_reader) const { + if (fbs_tensor.external_data_offset() >= 0) { + auto fbs_tensor_external_data_offset = fbs_tensor.external_data_offset(); + ORT_RETURN_IF_NOT(external_data_reader, "Tensor has external data but a data reader was not provided."); + + // no external data. should have had raw data. + ORT_RETURN_IF(fbs_tensor_external_data_offset < 0, "Missing raw data for initializer. Invalid ORT format model."); + + const size_t raw_data_len = fbs::utils::GetSizeInBytesFromFbsTensor(fbs_tensor); + + auto raw_buf = std::make_unique(raw_data_len); + gsl::span raw_buf_span(raw_buf.get(), raw_data_len); + + ORT_RETURN_IF_ERROR(external_data_reader(fbs_tensor_external_data_offset, raw_buf_span)); + return onnxruntime::utils::UnpackTensor( + tensor_proto, raw_buf_span.data(), + raw_buf_span.size(), + ort_tensor.MutableData(), + static_cast(ort_tensor.Shape().Size())); + } else if (fbs_tensor.raw_data()) { + return onnxruntime::utils::UnpackTensor( + tensor_proto, fbs_tensor.raw_data()->Data(), + fbs_tensor.raw_data()->size(), + ort_tensor.MutableData(), + static_cast(ort_tensor.Shape().Size())); + } else { + return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Invalid tensor. Expected: raw data or external data offset. Actual: ", + fbs_tensor.string_data() ? "string data" : "nullptr", " for tensor named: ", + fbs_tensor.name()->str()); + } } }; Status LoadOrtTensorOrtFormat(const fbs::Tensor& fbs_tensor, const AllocatorPtr allocator, - std::string& tensor_name, onnxruntime::Tensor& ort_tensor) { + std::string& tensor_name, onnxruntime::Tensor& ort_tensor, + const ExternalDataReader& external_data_reader) { auto* fbs_tensor_name = fbs_tensor.name(); ORT_RETURN_IF_NOT(fbs_tensor_name, "Flatbuffer tensor is invalid. Expected: A valid tensor name. Actual: nullptr."); tensor_name = fbs_tensor_name->str(); @@ -392,7 +548,7 @@ Status LoadOrtTensorOrtFormat(const fbs::Tensor& fbs_tensor, const AllocatorPtr ort_tensor = onnxruntime::Tensor( tensor_dtype, TensorShape(tensor_dims->data(), tensor_dims->size()), allocator); - if (fbs_tensor.raw_data()->size() == 0U) { + if (fbs_tensor.raw_data() && fbs_tensor.raw_data()->size() == 0U) { // Empty tensor. Nothing to unpack. // This check is necessary because an empty ort tensor will return a size of 1. // As a result, the following call to UnpackTensor will fail since the src and @@ -408,7 +564,7 @@ Status LoadOrtTensorOrtFormat(const fbs::Tensor& fbs_tensor, const AllocatorPtr onnxruntime::utils::MLTypeCallDispatcher dispatcher(tensor_data_type); - return dispatcher.InvokeRet(unused_tensor_proto, fbs_tensor, ort_tensor); + return dispatcher.InvokeRet(unused_tensor_proto, fbs_tensor, ort_tensor, external_data_reader); } #endif // ENABLE_TRAINING_APIS diff --git a/onnxruntime/core/graph/graph_flatbuffers_utils.h b/onnxruntime/core/graph/graph_flatbuffers_utils.h index 9c55dad3c41ef..33eba34fbaff0 100644 --- a/onnxruntime/core/graph/graph_flatbuffers_utils.h +++ b/onnxruntime/core/graph/graph_flatbuffers_utils.h @@ -40,9 +40,39 @@ struct SparseTensor; namespace utils { +/// +/// Delegate to write initializer data to an external file/buffer. +/// Data should be aligned to an appropriate boundary for the data type and or any potential mmap'd usage of the file. +/// `data_type` is value returned by TensorProto::data_type() and is a value from onnx::TensorTypeProto_DataType. +/// The function is not called for onnx::TensorTypeProto_DataType_STRING. +/// The function should set `offset` to the start of the data in the external file/buffer. +/// +using ExternalDataWriter = std::function bytes, uint64_t& offset)>; + +// inverse to ExternalDataWriter. +// The reader should write bytes to the output_buffer which is pre-allocated with the correct size. +using ExternalDataReader = std::function output_buffer)>; + +/// +/// Minimum number of bytes for data to be written as external data. +/// +/// arbitrary choice to keep small values local. adjust as needed. consider if it needs to be configurable. +/// +constexpr uint32_t kMinimumSizeForExternalData = 64; + +/// +/// Save an initializer to an ORT format flatbuffer. +/// +/// Builder to write initializer with. +/// Initializer to serialize +/// Model path. Used if TensorProto has external data. +/// Tensor in flatbuffer. +/// Optional delegate to write the initializer data to an external file +/// if the initializer contains kMinimumSizeForExternalData bytes or more, and not string data. Status SaveInitializerOrtFormat( flatbuffers::FlatBufferBuilder& builder, const ONNX_NAMESPACE::TensorProto& initializer, - const Path& model_path, flatbuffers::Offset& fbs_tensor); + const Path& model_path, flatbuffers::Offset& fbs_tensor, + const ExternalDataWriter& external_writer = nullptr); #if !defined(DISABLE_SPARSE_TENSORS) Status SaveSparseInitializerOrtFormat( @@ -65,10 +95,12 @@ Status SaveAttributeOrtFormat( /// Flatbuffer Tensor /// TensorProto to load data into /// ORT format load options +/// Optional delegate to read from external data file. /// Status Status LoadInitializerOrtFormat(const fbs::Tensor& fbs_tensor, ONNX_NAMESPACE::TensorProto& initializer, - const OrtFormatLoadOptions& load_options); + const OrtFormatLoadOptions& load_options, + const ExternalDataReader& external_data_reader = nullptr); #if !defined(DISABLE_SPARSE_TENSORS) Status LoadSparseInitializerOrtFormat(const fbs::SparseTensor& fbs_sparse_tensor, @@ -93,20 +125,24 @@ Status LoadAttributeOrtFormat(const fbs::Attribute& fbs_attr, /// @param[in] ort_tensor ORT tensor to serialize to a flatbuffer tensor /// @param[in] builder flatbuffer builder to use for creating the flatbuffer tensor /// @param[out] fbs_tensor flatbuffer tensor to serialize the ORT tensor to +/// @param[out] external_data_writer Optional delegate to write the tensor data to an external file /// @return Status indicating success or providing error information Status SaveOrtTensorOrtFormat( const std::string& tensor_name, const onnxruntime::Tensor& ort_tensor, flatbuffers::FlatBufferBuilder& builder, - flatbuffers::Offset& fbs_tensor); + flatbuffers::Offset& fbs_tensor, + ExternalDataWriter external_data_writer = nullptr); /// @brief Load an ORT tensor from a flatbuffer tensor /// @param[in] fbs_tensor flatbuffer tensor to load the ORT tensor from /// @param[in] allocator Allocator to use for creating the ORT tensor /// @param[out] tensor_name Name of the tensor /// @param[out] ort_tensor ORT tensor to load the flatbuffer tensor into +/// @param[in] external_data_reader Optional delegate to read from an external data file /// @return Status indicating success or providing error information Status LoadOrtTensorOrtFormat(const fbs::Tensor& fbs_tensor, const AllocatorPtr allocator, - std::string& tensor_name, onnxruntime::Tensor& ort_tensor); + std::string& tensor_name, onnxruntime::Tensor& ort_tensor, + const ExternalDataReader& external_data_reader = nullptr); #endif diff --git a/onnxruntime/core/graph/model.h b/onnxruntime/core/graph/model.h index a774d5fe34461..6f4b7f4f9f00b 100644 --- a/onnxruntime/core/graph/model.h +++ b/onnxruntime/core/graph/model.h @@ -199,19 +199,10 @@ class Model { static common::Status Save(Model& model, int fd); // Save the model to file using an external file for initializers larger than the given threshold (in bytes). - // Notice that when on Windows the external_file_name is a plain string. - // This is because the string is saved inside the output protobuf as a plain string, where wchar is not supported. -#ifdef _WIN32 static common::Status SaveWithExternalInitializers(Model& model, - const std::wstring& file_path, - const std::string& external_file_name, - size_t initializer_size_threshold); -#else - static common::Status SaveWithExternalInitializers(Model& model, - const std::string& file_path, + const PathString& file_path, const std::string& external_file_name, size_t initializer_size_threshold); -#endif static common::Status SaveWithExternalInitializers(Model& model, int fd, diff --git a/onnxruntime/test/flatbuffers/flatbuffer_utils_test.cc b/onnxruntime/test/flatbuffers/flatbuffer_utils_test.cc new file mode 100644 index 0000000000000..f36dbaf3d1aca --- /dev/null +++ b/onnxruntime/test/flatbuffers/flatbuffer_utils_test.cc @@ -0,0 +1,385 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#if !defined(ORT_MINIMAL_BUILD) + +#include +#include + +#include "gtest/gtest.h" + +#include "core/common/common.h" +#include "core/common/path.h" +#include "core/graph/graph_flatbuffers_utils.h" +#include "core/framework/tensorprotoutils.h" +#include "core/providers/cpu/cpu_execution_provider.h" + +#include "test/flatbuffers/flatbuffers_utils_test.fbs.h" + +#include "test/util/include/asserts.h" + +namespace onnxruntime { +using namespace fbs::utils; +namespace test { + +namespace { +void CreateWriter(const std::string& filename, std::ofstream& external_data_stream, ExternalDataWriter& writer) { + external_data_stream = std::ofstream(filename, std::ios::binary); + + ASSERT_FALSE(external_data_stream.fail()); + + // setup the data writer to write aligned data to external_data_stream + writer = [&external_data_stream](int32_t data_type, gsl::span bytes, uint64_t& offset) { + // align everything to 4 or 8 bytes. we can optimize this later if needed. + // aligning to data type or more is required on ARM platforms for the data to be used. + int32_t alignment = 4; + + if (data_type == ONNX_NAMESPACE::TensorProto_DataType_INT64 || + data_type == ONNX_NAMESPACE::TensorProto_DataType_DOUBLE) { + alignment = 8; + } + + int64_t pos = external_data_stream.tellp(); + + if (pos % alignment != 0) { + // 8 bytes of 0's so we can pad to alignment 8 in a single `write` + constexpr static const uint64_t zeros = 0; + int64_t padding = alignment - (pos % alignment); + external_data_stream.write(reinterpret_cast(&zeros), padding); + ORT_ENFORCE(!external_data_stream.fail(), "Failed adding padding to external checkpoint data."); + pos += padding; + } + + external_data_stream.write(reinterpret_cast(bytes.data()), bytes.size()); + ORT_ENFORCE(!external_data_stream.fail(), "Failed writing external checkpoint data."); + ORT_ENFORCE(pos + int64_t(bytes.size()) == external_data_stream.tellp()); // sanity check + + offset = pos; + return Status::OK(); + }; +} + +void CreateReader(const std::string& filename, std::ifstream& external_data_stream, ExternalDataReader& reader) { + external_data_stream = std::ifstream(filename, std::ios::binary); + + ASSERT_FALSE(external_data_stream.fail()) << "Failed to open data file."; + + reader = [&external_data_stream](uint64_t offset, gsl::span output_buffer) { + external_data_stream.seekg(offset); + external_data_stream.read(reinterpret_cast(output_buffer.data()), output_buffer.size()); + + ORT_ENFORCE(!external_data_stream.fail(), + "Failed to read external checkpoint data. Offset:", offset, " Bytes:", output_buffer.size()); + + return Status::OK(); + }; +} + +template +ONNX_NAMESPACE::TensorProto CreateInitializer(const std::string& name, + ONNX_NAMESPACE::TensorProto_DataType data_type, + const std::vector& dims) { + ONNX_NAMESPACE::TensorProto tp; + tp.set_name(name); + tp.set_data_type(data_type); + + int64_t num_elements = 1; + for (auto dim : dims) { + tp.add_dims(dim); + num_elements *= dim; + } + + std::vector data(num_elements); + std::iota(data.begin(), data.end(), T(1)); // fill with 1..num_elements + + switch (data_type) { + case ONNX_NAMESPACE::TensorProto_DataType_INT64: { + for (auto val : data) { + tp.add_int64_data(val); + } + break; + } + case ONNX_NAMESPACE::TensorProto_DataType_FLOAT: { + for (auto val : data) { + tp.add_float_data(val); + } + break; + } + case ONNX_NAMESPACE::TensorProto_DataType_INT16: + case ONNX_NAMESPACE::TensorProto_DataType_UINT8: + case ONNX_NAMESPACE::TensorProto_DataType_INT32: { + for (auto val : data) { + tp.add_int32_data(static_cast(val)); + } + break; + } + default: + ORT_THROW("Unsupported data type: ", data_type); + } + + return tp; +} + +template <> +ONNX_NAMESPACE::TensorProto CreateInitializer(const std::string& name, + ONNX_NAMESPACE::TensorProto_DataType data_type, + const std::vector& dims) { + ONNX_NAMESPACE::TensorProto tp; + tp.set_name(name); + tp.set_data_type(data_type); + + int64_t num_elements = 1; + for (auto dim : dims) { + tp.add_dims(dim); + num_elements *= dim; + } + + for (int i = 0; i < num_elements; ++i) { + tp.add_string_data("string_" + std::to_string(i)); + } + + return tp; +} + +std::vector CreateInitializers() { + std::vector initializers; + // create data of various sizes. order is chosen to require padding between most but not all. + // our writer aligns to 4 bytes unless it's 64-bit data (which is aligned to 8 bytes). + // buffer: <16-bit data><32-bit data><8-bit data><64-bit data> + // 128 bytes is required to write to external data. each data type has enough elements to satisfy this. + + // 16-bit. 81 elements so we're 2 bytes past 4 byte alignment + initializers.emplace_back( + CreateInitializer("tensor_16", ONNX_NAMESPACE::TensorProto_DataType_INT16, {9, 9})); + + // string (should not use external) + initializers.emplace_back( + CreateInitializer("tensor_string", ONNX_NAMESPACE::TensorProto_DataType_STRING, {2, 2})); + + // 32-bit, 64 elements + initializers.emplace_back( + CreateInitializer("tensor_f32", ONNX_NAMESPACE::TensorProto_DataType_FLOAT, {8, 8})); + + // 8-bit. 129 elements so we're 1 byte past 4 or 8 byte alignment + initializers.emplace_back( + CreateInitializer("tensor_8", ONNX_NAMESPACE::TensorProto_DataType_UINT8, {3, 43})); + + // small (should not use external) + initializers.emplace_back( + CreateInitializer("tensor_32_small", ONNX_NAMESPACE::TensorProto_DataType_INT32, {2, 2})); + + // 64-bit, 36 elements + initializers.emplace_back( + CreateInitializer("tensor_64", ONNX_NAMESPACE::TensorProto_DataType_INT64, {6, 6})); + + return initializers; +} + +#ifdef ENABLE_TRAINING_APIS +std::vector CreateInitializersNoString() { + std::vector initializers; + // create data of various sizes. order is chosen to require padding between most but not all + // assuming our writer aligns to 4 bytes unless it's 64-bit data (which is aligned to 8 bytes) + // buffer: <16-bit><32-bit><8-bit><64-bit> + // need 128 bytes to write to external data + + // 16-bit. 81 elements so we're 2 bytes past 4 byte alignment + initializers.emplace_back( + CreateInitializer("tensor_16", ONNX_NAMESPACE::TensorProto_DataType_INT16, {9, 9})); + + // 32-bit, 64 elements + initializers.emplace_back( + CreateInitializer("tensor_f32", ONNX_NAMESPACE::TensorProto_DataType_FLOAT, {8, 8})); + + // 8-bit. 129 elements so we're 1 byte past 4 or 8 byte alignment + initializers.emplace_back( + CreateInitializer("tensor_8", ONNX_NAMESPACE::TensorProto_DataType_UINT8, {3, 43})); + + // 64-bit, 36 elements + initializers.emplace_back( + CreateInitializer("tensor_64", ONNX_NAMESPACE::TensorProto_DataType_INT64, {6, 6})); + + // small (should not use external) + initializers.emplace_back( + CreateInitializer("tensor_32_small", ONNX_NAMESPACE::TensorProto_DataType_INT32, {2, 2})); + + return initializers; +} +#endif // ENABLE_TRAINING_APIS + +#define ASSERT_EQ_TENSORPROTO_VECTORFIELD(EXPECTED, ACTUAL, FIELD) \ + ASSERT_EQ((EXPECTED).FIELD.size(), (ACTUAL).FIELD.size()); \ + for (int j = 0; j < (EXPECTED).FIELD.size(); ++j) { \ + ASSERT_EQ((EXPECTED).FIELD[j], (ACTUAL).FIELD[j]); \ + } + +} // namespace + +// tests method that loads to tensorproto protobuf (used when loading a checkpoint into an inference model) +TEST(FlatbufferUtilsTest, ExternalWriteReadWithLoadInitializers) { + // create data + auto initializers = CreateInitializers(); + + flatbuffers::FlatBufferBuilder builder(1024); + + // write + std::ofstream output_stream; + ExternalDataWriter writer; + CreateWriter("ExternalWriteReadBasicTest.bin", output_stream, writer); + + std::vector> fbs_tensors; + for (const auto& initializer : initializers) { + flatbuffers::Offset fbs_tensor; + ASSERT_STATUS_OK(SaveInitializerOrtFormat(builder, initializer, Path(), fbs_tensor, writer)); + fbs_tensors.push_back(fbs_tensor); + } + + ASSERT_EQ(output_stream.tellp(), 840) << "Data written to the external file is incorrect."; + output_stream.close(); + ASSERT_TRUE(output_stream.good()) << "Failed to close data file."; + + auto fbs_tensors_offset = builder.CreateVector(fbs_tensors); + fbs::test::TestDataBuilder tdb(builder); + tdb.add_initializers(fbs_tensors_offset); + + builder.Finish(tdb.Finish()); + auto fb_data = builder.GetBufferSpan(); + + const auto* test_data = fbs::test::GetTestData(fb_data.data()); + const auto* fbs_tensors2 = test_data->initializers(); + + // read + std::ifstream input_stream; + ExternalDataReader reader; + CreateReader("ExternalWriteReadBasicTest.bin", input_stream, reader); + + std::vector loaded_initializers; + OrtFormatLoadOptions options; + + for (const auto* fbs_tensor : *fbs_tensors2) { + ONNX_NAMESPACE::TensorProto initializer; + ASSERT_STATUS_OK(LoadInitializerOrtFormat(*fbs_tensor, initializer, options, reader)); + loaded_initializers.emplace_back(std::move(initializer)); + // also check that the loaded flatbuffer tensors have accurately written to the external_data_offset field + if (fbs_tensor->data_type() != fbs::TensorDataType::STRING && fbs_tensor->name()->str() != "tensor_32_small") { + ASSERT_GE(fbs_tensor->external_data_offset(), 0) + << "external_data_offset is not set when we expect it to be set for tensor " << fbs_tensor->name()->str(); + } else { + ASSERT_EQ(fbs_tensor->external_data_offset(), -1) + << "external_data_offset is set for string data when we expect it to not be set for tensor " + << fbs_tensor->name()->str(); + ASSERT_TRUE(fbs_tensor->raw_data() || fbs_tensor->string_data()) + << "tensor has no data attached to it" << fbs_tensor->name()->str(); + } + } + + // initializers = expected, in the form of tensorproto + // loaded_initializers = actual, in the form of tensorproto + ASSERT_EQ(initializers.size(), loaded_initializers.size()); + + for (int i = 0; i < narrow(initializers.size()); i++) { + const auto& expected_initializer = initializers[i]; + const auto& loaded_initializer = loaded_initializers[i]; + // validate the loaded initializer + ASSERT_EQ(expected_initializer.name(), loaded_initializer.name()); + ASSERT_EQ(expected_initializer.data_type(), loaded_initializer.data_type()); + ASSERT_EQ_TENSORPROTO_VECTORFIELD(expected_initializer, loaded_initializer, dims()); + + if (loaded_initializer.data_type() != ONNX_NAMESPACE::TensorProto_DataType_STRING) { + std::vector expected_data, loaded_data; + ASSERT_STATUS_OK(onnxruntime::utils::UnpackInitializerData(expected_initializer, expected_data)); + ASSERT_STATUS_OK(onnxruntime::utils::UnpackInitializerData(loaded_initializer, loaded_data)); + ASSERT_EQ(expected_data, loaded_data) << loaded_initializer.name(); + } else { + // string type tensor + ASSERT_EQ_TENSORPROTO_VECTORFIELD(expected_initializer, loaded_initializer, string_data()); + } + } +} + +#ifdef ENABLE_TRAINING_APIS +// tests method that loads to OrtTensor (used when loading a checkpoint into a checkpoint state) +TEST(FlatbufferUtilsTest, ExternalWriteReadWithLoadOrtTensor) { + // create data + auto initializers = CreateInitializersNoString(); + + flatbuffers::FlatBufferBuilder builder(1024); + + // write + std::ofstream output_stream; + ExternalDataWriter writer; + CreateWriter("ExternalWriteReadBasicTest.bin", output_stream, writer); + + std::vector> fbs_tensors; + for (const auto& initializer : initializers) { + flatbuffers::Offset fbs_tensor; + ASSERT_STATUS_OK(SaveInitializerOrtFormat(builder, initializer, Path(), fbs_tensor, writer)); + fbs_tensors.push_back(fbs_tensor); + } + + ASSERT_EQ(output_stream.tellp(), 840) << "Data written to the external file is incorrect."; + output_stream.close(); + ASSERT_TRUE(output_stream.good()) << "Failed to close data file."; + + auto fbs_tensors_offset = builder.CreateVector(fbs_tensors); + fbs::test::TestDataBuilder tdb(builder); + tdb.add_initializers(fbs_tensors_offset); + builder.Finish(tdb.Finish()); + auto fb_data = builder.GetBufferSpan(); + + auto test_data = fbs::test::GetTestData(fb_data.data()); + auto fbs_tensors2 = test_data->initializers(); + + // read + std::ifstream input_stream; + ExternalDataReader reader; + CreateReader("ExternalWriteReadBasicTest.bin", input_stream, reader); + static onnxruntime::CPUExecutionProviderInfo info; + static onnxruntime::CPUExecutionProvider cpu_provider(info); + AllocatorPtr cpu_allocator = cpu_provider.CreatePreferredAllocators()[0]; + + std::vector loaded_tensors; + + for (const auto* fbs_tensor : *fbs_tensors2) { + Tensor ort_tensor; + std::string fbs_tensor_name = fbs_tensor->name()->str(); + ASSERT_STATUS_OK(LoadOrtTensorOrtFormat(*fbs_tensor, cpu_allocator, fbs_tensor_name, ort_tensor, reader)); + loaded_tensors.push_back(std::move(ort_tensor)); + } + + ASSERT_EQ(initializers.size(), loaded_tensors.size()); + + // convert expected initializers (TensorProtos) to Tensors for easier comparison + std::vector expected_tensors; + const Env& env = Env::Default(); + const auto* placeholder_model_path = ORT_TSTR("placeholder_model_path"); + + for (int i = 0; i < narrow(initializers.size()); i++) { + auto expected_proto = initializers[i]; + TensorShape tensor_shape = utils::GetTensorShapeFromTensorProto(expected_proto); + const auto* type = DataTypeImpl::TensorTypeFromONNXEnum(expected_proto.data_type())->GetElementType(); + Tensor expected_tensor(type, tensor_shape, cpu_allocator); + ASSERT_STATUS_OK(utils::TensorProtoToTensor(env, placeholder_model_path, initializers[i], expected_tensor)); + expected_tensors.push_back(std::move(expected_tensor)); + } + + // validate data + for (int i = 0; i < narrow(expected_tensors.size()); i++) { + auto& expected_tensor = expected_tensors[i]; + auto& loaded_tensor = loaded_tensors[i]; + ASSERT_EQ(expected_tensor.DataType(), loaded_tensor.DataType()); + ASSERT_EQ(expected_tensor.Shape(), loaded_tensor.Shape()); + ASSERT_EQ(expected_tensor.SizeInBytes(), loaded_tensor.SizeInBytes()); + gsl::span expected_data(static_cast(expected_tensor.DataRaw()), + expected_tensor.SizeInBytes()); + gsl::span loaded_data(static_cast(loaded_tensor.DataRaw()), + loaded_tensor.SizeInBytes()); + + ASSERT_EQ(expected_data, loaded_data); + } +} +#endif // ENABLE_TRAINING_APIS +} // namespace test +} // namespace onnxruntime + +#endif // ORT_MINIMAL_BUILD diff --git a/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs b/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs new file mode 100644 index 0000000000000..a10040294640d --- /dev/null +++ b/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// Update the generated schema using /onnxruntime/core/flatbuffers/schema/compile_schema.py. +// See /onnxruntime/core/flatbuffers/schema/README.md for usage instructions. +// Diff the old and new generated header. Copy #include info from old as it has some customizations. +include "../../core/flatbuffers/schema/ort.fbs"; + +namespace onnxruntime.fbs.test; + +table TestData { + initializers:[Tensor]; +} + +root_type TestData; diff --git a/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs.h b/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs.h new file mode 100644 index 0000000000000..0c8e89a80ed23 --- /dev/null +++ b/onnxruntime/test/flatbuffers/flatbuffers_utils_test.fbs.h @@ -0,0 +1,113 @@ +// automatically generated by the FlatBuffers compiler, do not modify + + +#ifndef FLATBUFFERS_GENERATED_FLATBUFFERSUTILSTEST_ONNXRUNTIME_FBS_TEST_H_ +#define FLATBUFFERS_GENERATED_FLATBUFFERSUTILSTEST_ONNXRUNTIME_FBS_TEST_H_ + +// manual edit to use wrapper in core/common +#include "core/common/flatbuffers.h" + +// Ensure the included flatbuffers.h is the same version as when this file was +// generated, otherwise it may not be compatible. +static_assert(FLATBUFFERS_VERSION_MAJOR == 23 && + FLATBUFFERS_VERSION_MINOR == 5 && + FLATBUFFERS_VERSION_REVISION == 26, + "Non-compatible flatbuffers version included"); + +// manual edit to set include path for this +#include "core/flatbuffers/schema/ort.fbs.h" + +namespace onnxruntime { +namespace fbs { +namespace test { + +struct TestData; +struct TestDataBuilder; + +struct TestData FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TestDataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INITIALIZERS = 4 + }; + const ::flatbuffers::Vector<::flatbuffers::Offset> *initializers() const { + return GetPointer> *>(VT_INITIALIZERS); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_INITIALIZERS) && + verifier.VerifyVector(initializers()) && + verifier.VerifyVectorOfTables(initializers()) && + verifier.EndTable(); + } +}; + +struct TestDataBuilder { + typedef TestData Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_initializers(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> initializers) { + fbb_.AddOffset(TestData::VT_INITIALIZERS, initializers); + } + explicit TestDataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateTestData( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> initializers = 0) { + TestDataBuilder builder_(_fbb); + builder_.add_initializers(initializers); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateTestDataDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *initializers = nullptr) { + auto initializers__ = initializers ? _fbb.CreateVector<::flatbuffers::Offset>(*initializers) : 0; + return onnxruntime::fbs::test::CreateTestData( + _fbb, + initializers__); +} + +inline const onnxruntime::fbs::test::TestData *GetTestData(const void *buf) { + return ::flatbuffers::GetRoot(buf); +} + +inline const onnxruntime::fbs::test::TestData *GetSizePrefixedTestData(const void *buf) { + return ::flatbuffers::GetSizePrefixedRoot(buf); +} + +inline bool VerifyTestDataBuffer( + ::flatbuffers::Verifier &verifier) { + return verifier.VerifyBuffer(nullptr); +} + +inline bool VerifySizePrefixedTestDataBuffer( + ::flatbuffers::Verifier &verifier) { + return verifier.VerifySizePrefixedBuffer(nullptr); +} + +inline void FinishTestDataBuffer( + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { + fbb.Finish(root); +} + +inline void FinishSizePrefixedTestDataBuffer( + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { + fbb.FinishSizePrefixed(root); +} + +} // namespace test +} // namespace fbs +} // namespace onnxruntime + +#endif // FLATBUFFERS_GENERATED_FLATBUFFERSUTILSTEST_ONNXRUNTIME_FBS_TEST_H_ diff --git a/orttraining/orttraining/test/training_api/core/checkpoint_test.cc b/orttraining/orttraining/test/training_api/core/checkpoint_test.cc index 5c53addb853e4..af11921ff2678 100644 --- a/orttraining/orttraining/test/training_api/core/checkpoint_test.cc +++ b/orttraining/orttraining/test/training_api/core/checkpoint_test.cc @@ -470,4 +470,115 @@ TEST(CheckpointApiTest, LoadAndSaveNominalCheckpoint) { ASSERT_EQ(param->Data().Get().Shape().Size(), 1); } } + +/** + * Load ONNX model from file path, save into ORT checkpoint files, + * Then load it into ORT, compare with the initial parameter values. + */ +TEST(CheckpointApiTest, SaveOnnxModelAsCheckpoint_ThenLoad_WithExternalData) { + /// Phase 1 - Test Preparation + /// Prepare the data and dest folder for saving checkpoint. + /// Also cooked the data for test result comparison. + + // Model path and trainable parameter name definitions. + auto model_uri = MODEL_FOLDER "transform/computation_reduction/gathernd/e2e.onnx"; + std::vector expected_trainable_param_names{ + "bert.encoder.layer.2.output.LayerNorm.weight", + "bert.encoder.layer.2.output.LayerNorm.bias", + "add1_initializerr", + "cls.predictions.transform.LayerNorm.weight", + "cls.predictions.transform.LayerNorm.bias", + "bert.embeddings.word_embeddings.weight_transposed", + "cls.predictions.bias", + }; + + // Extract a weight value baseline to compare. + // expected_trainable_param_name_to_ort_value is used to compare with the values after restoring from checkpoint. + auto logger_ptr = std::make_unique(logging::LoggingManager::DefaultLogger()); + std::shared_ptr p_model; + ASSERT_STATUS_OK(Model::Load(model_uri, p_model, nullptr, *logger_ptr)); + Graph& graph = p_model->MainGraph(); + + std::vector trainable_param_values; + trainable_param_values.reserve(expected_trainable_param_names.size()); + std::vector non_trainable_param_values; + const auto& initializer_tensors = graph.GetAllInitializedTensors(); + for (const auto& [initializer_name, tensor_proto] : initializer_tensors) { + if (std::find(expected_trainable_param_names.begin(), expected_trainable_param_names.end(), initializer_name) != + expected_trainable_param_names.end()) { + trainable_param_values.emplace_back(static_cast(*tensor_proto)); + } else { + non_trainable_param_values.emplace_back(static_cast(*tensor_proto)); + } + } + + std::unordered_map expected_trainable_param_name_to_ort_value; + ASSERT_STATUS_OK( + CreateOrtValuesFromTensorProtos(trainable_param_values, expected_trainable_param_name_to_ort_value)); + + // Remove the temporary directory if it already exists. + auto ckpt_test_root_dir = ORT_TSTR("checkpointing_api_test_dir"); + TemporaryDirectory tmp_dir{ckpt_test_root_dir}; + + /// Phase 2 - Run save checkpoint APIs. + /// And check the result checkpoint files. + + // Call Save APIs. + PathString checkpoint_path{ + ConcatPathComponent(tmp_dir.Path(), ORT_TSTR("e2e_ckpt_save_cpu"))}; + ASSERT_STATUS_OK(SaveCheckpoint(trainable_param_values, non_trainable_param_values, checkpoint_path, + false /* nominal checkpoint */, 0 /* external_data_threshold */)); + + std::basic_string checkpoint_path_copy(checkpoint_path); + PathString external_data_for_checkpoint_path{ + checkpoint_path_copy.append(ORT_TSTR(".data"))}; + + ASSERT_TRUE(std::filesystem::exists(checkpoint_path)); + ASSERT_TRUE(std::filesystem::exists(external_data_for_checkpoint_path)); + + // minor difference across platforms. check it's in the right ballpark + ASSERT_LT(std::filesystem::file_size(checkpoint_path), 1000); + ASSERT_GT(std::filesystem::file_size(external_data_for_checkpoint_path), 200000); + + /// Phase 3 - Run load checkpoint APIs. + /// And check the result comparable with initial parameter values. + + // Call Load APIs + CheckpointState checkpoint_state_to_load; + ASSERT_STATUS_OK(LoadCheckpoint(checkpoint_path, checkpoint_state_to_load)); + ModuleCheckpointState module_state = checkpoint_state_to_load.module_checkpoint_state; + const auto& param_states = module_state.named_parameters; + std::unordered_map restored_param_name_to_ort_values; + std::vector restored_trainable_param_names; + for (auto it = param_states.begin(); it != param_states.end(); ++it) { + restored_param_name_to_ort_values.insert({it->first, it->second->Data()}); + if (it->second->RequiresGrad()) { + restored_trainable_param_names.emplace_back(it->first); + } + } + + // Check loaded parameter's values are same with original ones. + ASSERT_EQ(expected_trainable_param_name_to_ort_value.size(), restored_trainable_param_names.size()); + ASSERT_EQ(expected_trainable_param_name_to_ort_value.size(), 7); + ASSERT_EQ(restored_param_name_to_ort_values.size(), 9); + + std::sort(expected_trainable_param_names.begin(), expected_trainable_param_names.end()); + std::sort(restored_trainable_param_names.begin(), restored_trainable_param_names.end()); + ASSERT_EQ(expected_trainable_param_names, restored_trainable_param_names); + + for (const auto& name : restored_trainable_param_names) { + const auto& restored_ort_value = restored_param_name_to_ort_values[name]; + const auto& expected_ort_value = expected_trainable_param_name_to_ort_value.at(name); + + ASSERT_TRUE(restored_ort_value.IsTensor() && expected_ort_value.IsTensor()); + const Tensor& restored_tensor = restored_ort_value.Get(); + const Tensor& expected_tensor = expected_ort_value.Get(); + ASSERT_EQ(expected_tensor.DataType(), restored_tensor.DataType()); + ASSERT_EQ(expected_tensor.SizeInBytes(), restored_tensor.SizeInBytes()); + ASSERT_EQ(expected_tensor.DataType(), restored_tensor.DataType()); + + ASSERT_EQ(std::memcmp(expected_tensor.DataRaw(), restored_tensor.DataRaw(), expected_tensor.SizeInBytes()), 0); + } +} + } // namespace onnxruntime::training::test diff --git a/orttraining/orttraining/test/training_api/core/training_api_tests.cc b/orttraining/orttraining/test/training_api/core/training_api_tests.cc index e2232687d0b07..3cbb05cced0a1 100644 --- a/orttraining/orttraining/test/training_api/core/training_api_tests.cc +++ b/orttraining/orttraining/test/training_api/core/training_api_tests.cc @@ -66,6 +66,24 @@ Status CreateFakeOptimizerCheckpointStateOnCPU( return Status::OK(); } +void RunInferenceSession(const Environment& env, const PathString& inference_model_path) { + auto inference_session = std::make_unique(onnxruntime::SessionOptions(), env); + ASSERT_STATUS_OK(inference_session->Load(inference_model_path)); + ASSERT_STATUS_OK(inference_session->Initialize()); + + std::vector input_names({"input-0"}); + OrtValue graph_input; + GenerateRandomInput(std::array{2, 784}, graph_input); + + std::vector feeds; + feeds.emplace_back(graph_input); + std::vector output_names({"output-0"}); + std::vector outputs; + + ASSERT_STATUS_OK(inference_session->Run(RunOptions(), input_names, feeds, output_names, &outputs)); + ASSERT_EQ(outputs.size(), 1U); +} + void TestModuleExport(const std::vector>& providers) { auto training_model_uri = MODEL_FOLDER "training_model.onnx"; auto eval_model_uri = MODEL_FOLDER "eval_model.onnx"; @@ -117,21 +135,56 @@ void TestModuleExport(const std::vector>& pr ASSERT_EQ(softmaxceloss_node_found(eval_model), true); ASSERT_EQ(softmaxceloss_node_found(inference_model), false); - // Try running an inference session - auto inference_session = std::make_unique(onnxruntime::SessionOptions(), *env); - ASSERT_STATUS_OK(inference_session->Load(inference_model_path)); - ASSERT_STATUS_OK(inference_session->Initialize()); - std::vector input_names({"input-0"}); - OrtValue graph_input; - GenerateRandomInput(std::array{2, 784}, graph_input); - std::vector feeds; - feeds.emplace_back(graph_input); - std::vector output_names({"output-0"}); - std::vector outputs; - ASSERT_STATUS_OK(inference_session->Run(RunOptions(), input_names, feeds, output_names, &outputs)); - ASSERT_EQ(outputs.size(), 1U); + RunInferenceSession(*env, inference_model_path); } +void TestModuleExportWithExternalData(const std::vector>& providers) { + auto training_model_uri = MODEL_FOLDER "training_model.onnx"; + auto eval_model_uri = MODEL_FOLDER "eval_model.onnx"; + + onnxruntime::training::api::CheckpointState state; + auto checkpoint_to_load_path = MODEL_FOLDER "checkpoint.ckpt"; + ASSERT_STATUS_OK(onnxruntime::training::api::LoadCheckpoint(checkpoint_to_load_path, state)); + + // whilst the checkpoint doesn't _actually_ have external data, setting this to true is enough to test that + // ExportModelForInferencing will use external data when writing out the ONNX model. + state.has_external_data = true; + + std::unique_ptr env; + ASSERT_STATUS_OK(Environment::Create(nullptr, env)); + auto model_identifier = ModelIdentifiers(onnxruntime::ToUTF8String(training_model_uri), + std::optional(onnxruntime::ToUTF8String(eval_model_uri)), + std::nullopt); + auto model = std::make_unique( + model_identifier, &state, onnxruntime::SessionOptions(), *env, providers); + + std::string inference_model_path = "inference_model.onnx"; + std::string external_data_for_inference_model = inference_model_path + ".data"; + + // if exported inference model and corresponding data already exists, then delete it + if (std::filesystem::exists(inference_model_path)) { + std::filesystem::remove(inference_model_path); + } + + if (std::filesystem::exists(external_data_for_inference_model)) { + std::filesystem::remove(external_data_for_inference_model); + } + + std::vector graph_output_names({"output-0"}); + + ASSERT_STATUS_OK(model->ExportModelForInferencing(inference_model_path, graph_output_names)); + + // check that exported files exist + ASSERT_TRUE(std::filesystem::exists(inference_model_path)); + ASSERT_TRUE(std::filesystem::exists(external_data_for_inference_model)); + + // ensure there's roughly the expected amount of data in each. sizes may differ a little by platform. + ASSERT_GT(std::filesystem::file_size(inference_model_path), 1000); + ASSERT_LT(std::filesystem::file_size(inference_model_path), 2000); + ASSERT_GT(std::filesystem::file_size(external_data_for_inference_model), 1500000); + + RunInferenceSession(*env, ToPathString(inference_model_path)); +} } // namespace TEST(TrainingApiTest, ModuleParametersSize) { @@ -442,6 +495,11 @@ TEST(TrainingApiTest, ModuleExportModelForInferencingCPU) { TestModuleExport(providers); } +TEST(TrainingApiTest, ModuleExportModelForInferencingCPU_WithExternalData) { + std::vector> providers{onnxruntime::test::DefaultCpuExecutionProvider()}; + TestModuleExportWithExternalData(providers); +} + #if defined(USE_CUDA) TEST(TrainingApiTest, ModuleExportModelForInferencingCUDA) { std::vector> providers{onnxruntime::test::DefaultCudaExecutionProvider()}; diff --git a/orttraining/orttraining/training_api/checkpoint.cc b/orttraining/orttraining/training_api/checkpoint.cc index 720bdd7e68dd3..879191acb6605 100644 --- a/orttraining/orttraining/training_api/checkpoint.cc +++ b/orttraining/orttraining/training_api/checkpoint.cc @@ -7,11 +7,74 @@ #include "core/flatbuffers/schema/ort_training_checkpoint.fbs.h" #include "core/framework/framework_common.h" #include "core/graph/graph_flatbuffers_utils.h" +#include "core/framework/tensor_external_data_info.h" namespace onnxruntime::training::api { +PathString ExternalCheckpointDataPath(const PathString& checkpoint_path) { + return checkpoint_path + ORT_TSTR(".data"); +} + namespace { +/** + * @brief Helper method to read data from an external file. + * @param external_data_stream Stream that data will be read from. + * @param offset Offset in the external data file to begin reading from. + * @param output_buffer Buffer to store the read data. + * @return Status of the operation. + */ +Status ReadFromExternalFileHelper(std::ifstream& external_data_stream, + uint64_t offset, gsl::span output_buffer) { + external_data_stream.seekg(offset); + external_data_stream.read(reinterpret_cast(output_buffer.data()), output_buffer.size()); + + const auto [err, msg] = GetErrnoInfo(); + ORT_RETURN_IF(external_data_stream.fail(), "Failed reading external checkpoint data. ", msg, " errno:", errno); + + return Status::OK(); +} + +/** + * @brief Helper method to write data to an external file. + * @param external_data_stream Stream where data will be written to. + * @param data_type data type to write -- used to determine alignment. + * @param bytes information to write in bytes. + * @param offset Modified to be the offset in the external data file where the data was written. + * @return Status of the operation. + */ +Status WriteToExternalFileHelper(std::ofstream& external_data_stream, + int32_t data_type, gsl::span bytes, uint64_t& offset) { + // for now align everything to 4 or 8 bytes. we can optimize this later if needed. + int32_t alignment = 4; + + // Add more special-cased types if needed. Currently we don't expect to see any other types that are >= 8-bytes here. + if (data_type == ONNX_NAMESPACE::TensorProto_DataType_INT64 || + data_type == ONNX_NAMESPACE::TensorProto_DataType_DOUBLE) { + alignment = 8; + } + + int64_t pos = external_data_stream.tellp(); + + if (pos % alignment != 0) { + // 8 bytes of 0's so we can pad to alignment 8 in a single `write` + constexpr static const uint64_t zeros = 0; + std::streamsize padding = alignment - (pos % alignment); + // skipping validation of this write. doesn't matter if this or the 'real' write below fails. if this does the + // other will as well as nothing will clear the failure bit in the ofstream in between the calls. + external_data_stream.write(reinterpret_cast(&zeros), padding); + pos += padding; + } + + external_data_stream.write(reinterpret_cast(bytes.data()), bytes.size()); + const auto [err, msg] = GetErrnoInfo(); + ORT_RETURN_IF(external_data_stream.fail(), "Failed writing external checkpoint data. ", msg, " errno:", errno); + + offset = pos; + + return Status::OK(); +} + /** * @brief Sort keys of a hash map. * @param hash_map Hash map to sort. @@ -37,13 +100,15 @@ InlinedVector SortedKeys(const T& hash_map) { * @param copy_tensor Function to copy the tensor to a cpu buffer. * @param builder Builder to create flatbuffer tensors. * @param flatbuffer_tensor Flatbuffer tensor to be populated. + * @param external_data_writer Delegate to write tensor data to an external file. May be a nullptr. * @return Status of the operation. */ Status FlatbufferTensorFromOrtValue( const std::string& tensor_name, const OrtValue& ort_value, const std::function copy_tensor, flatbuffers::FlatBufferBuilder& builder, - flatbuffers::Offset& fbs_tensor) { + flatbuffers::Offset& fbs_tensor, + fbs::utils::ExternalDataWriter external_data_writer) { // Check if the OrtValue is a tensor. ORT_RETURN_IF_NOT(ort_value.IsTensor(), "Only tensor OrtValues can be saved to a checkpoint."); const onnxruntime::Tensor& src_tensor = ort_value.Get(); @@ -56,9 +121,9 @@ Status FlatbufferTensorFromOrtValue( const OrtMemoryInfo cpu_alloc_info{onnxruntime::CPU, OrtDeviceAllocator}; onnxruntime::Tensor dst_tensor{src_tensor.DataType(), src_tensor.Shape(), tensor_data_buffer.data(), cpu_alloc_info}; ORT_RETURN_IF_ERROR(copy_tensor(src_tensor, dst_tensor)); - ORT_RETURN_IF_ERROR(fbs::utils::SaveOrtTensorOrtFormat(tensor_name, dst_tensor, builder, fbs_tensor)); + ORT_RETURN_IF_ERROR(fbs::utils::SaveOrtTensorOrtFormat(tensor_name, dst_tensor, builder, fbs_tensor, external_data_writer)); } else { - ORT_RETURN_IF_ERROR(fbs::utils::SaveOrtTensorOrtFormat(tensor_name, src_tensor, builder, fbs_tensor)); + ORT_RETURN_IF_ERROR(fbs::utils::SaveOrtTensorOrtFormat(tensor_name, src_tensor, builder, fbs_tensor, external_data_writer)); } return Status::OK(); @@ -70,10 +135,12 @@ Status FlatbufferTensorFromOrtValue( * @param fbs_tensor Flatbuffer tensor. * @param tensor_name Name of the tensor. * @param ort_value OrtValue object to be populated. + * @param external_data_reader delegate to read initializer data from an external file or buffer * @return Status of the operation. */ Status OrtValueFromFlatbufferTensor(const fbs::Tensor& fbs_tensor, - std::string& tensor_name, OrtValue& ort_value) { + std::string& tensor_name, OrtValue& ort_value, + const fbs::utils::ExternalDataReader& external_data_reader) { // The assumption is that the flatbuffer buffer will be destructed once the checkpoint has been loaded. // And so, we must allocate a buffer where the tensor data can be copied using the cpu allocator. // This buffer is owned by the OrtValue. @@ -82,7 +149,7 @@ Status OrtValueFromFlatbufferTensor(const fbs::Tensor& fbs_tensor, AllocatorPtr cpu_allocator = cpu_provider.CreatePreferredAllocators()[0]; std::unique_ptr ort_tensor = std::make_unique(); - ORT_RETURN_IF_ERROR(fbs::utils::LoadOrtTensorOrtFormat(fbs_tensor, cpu_allocator, tensor_name, *ort_tensor)); + ORT_RETURN_IF_ERROR(fbs::utils::LoadOrtTensorOrtFormat(fbs_tensor, cpu_allocator, tensor_name, *ort_tensor, external_data_reader)); ort_value.Init(ort_tensor.release(), DataTypeImpl::GetType(), DataTypeImpl::GetType()->GetDeleteFunc()); @@ -97,13 +164,15 @@ Status OrtValueFromFlatbufferTensor(const fbs::Tensor& fbs_tensor, * @param data_transfer_manager Data transfer manager to copy the OrtValue tensor to a cpu buffer. * @param builder Builder to create flatbuffer tensors. * @param flatbuffer_tensors Flatbuffer tensors to be populated. + * @param external_data_writer Optional delegate to write the tensor data to an external file. * @return Status of the operation. */ Status FlatbufferTensorsFromOrtValues( const InlinedHashMap& name_to_ort_value, const DataTransferManager* data_transfer_manager, flatbuffers::FlatBufferBuilder& builder, - std::vector>& flatbuffer_tensors) { + std::vector>& flatbuffer_tensors, + fbs::utils::ExternalDataWriter external_data_writer = nullptr) { for (const auto& name : SortedKeys(name_to_ort_value)) { const OrtValue& ort_value = name_to_ort_value.at(name); flatbuffers::Offset fbs_tensor; @@ -114,7 +183,7 @@ Status FlatbufferTensorsFromOrtValues( "Actual: nullptr."); return data_transfer_manager->CopyTensor(src_tensor, dst_tensor); }, - builder, fbs_tensor)); + builder, fbs_tensor, external_data_writer)); flatbuffer_tensors.push_back(fbs_tensor); } @@ -126,17 +195,18 @@ Status FlatbufferTensorsFromOrtValues( * * @param flatbuffer_tensors Flatbuffer tensors. * @param name_to_ort_value Name to OrtValue map to be populated. + * @param external_data_reader delegate to read initializer data from an external file or buffer * @return Status of the operation. */ Status OrtValuesFromFlatbufferTensors( const flatbuffers::Vector>& flatbuffer_tensors, - InlinedHashMap& name_to_ort_value) { + InlinedHashMap& name_to_ort_value, const fbs::utils::ExternalDataReader& external_data_reader) { for (const auto* fbs_tensor : flatbuffer_tensors) { ORT_RETURN_IF_NOT(fbs_tensor, "Encountered a nullptr flatbuffer tensor. Checkpoint file is invalid."); std::string tensor_name; OrtValue ort_value; - ORT_RETURN_IF_ERROR(OrtValueFromFlatbufferTensor(*fbs_tensor, tensor_name, ort_value)); + ORT_RETURN_IF_ERROR(OrtValueFromFlatbufferTensor(*fbs_tensor, tensor_name, ort_value, external_data_reader)); name_to_ort_value.emplace(std::move(tensor_name), std::move(ort_value)); } @@ -157,7 +227,9 @@ Status ToFile(const PathString& checkpoint_path, flatbuffers::FlatBufferBuilder& const uint8_t* buf = builder.GetBufferPointer(); int size = builder.GetSize(); file.write(reinterpret_cast(buf), size); - ORT_RETURN_IF_NOT(file, "Failed to save checkpoint to file: ", ToUTF8String(checkpoint_path)); + const auto [err, msg] = GetErrnoInfo(); + ORT_RETURN_IF_NOT(file, "Failed to save checkpoint to file: ", ToUTF8String(checkpoint_path), ". error:", msg, + " errno:", errno); return Status::OK(); } @@ -169,12 +241,15 @@ Status ToFile(const PathString& checkpoint_path, flatbuffers::FlatBufferBuilder& * @param trainable_tensor_protos trainable parameters in TensorProto format. * @param non_trainable_tensor_protos non-trainable parameters in TensorProto format. * @param checkpoint_path file where checkpoint is saved. + * @param nominal_checkpoint if true, create a nominal checkpoint. + * @param external_data_threshold threshold in bytes for external data. If the size of the data of the + * TensorProtos exceeds this threshold, then we save the data in an external data file. * @return Status */ -Status FromTensorProtos( - gsl::span trainable_tensor_protos, - gsl::span non_trainable_tensor_protos, - const PathString& checkpoint_path, const bool nominal_checkpoint) { +Status FromTensorProtos(gsl::span trainable_tensor_protos, + gsl::span non_trainable_tensor_protos, + const PathString& checkpoint_path, const bool nominal_checkpoint, + const size_t external_data_threshold) { const auto check_unique = [](gsl::span tensor_protos, InlinedHashSet& unique_names) { for (const auto& tensor_proto : tensor_protos) { @@ -194,29 +269,73 @@ Status FromTensorProtos( constexpr size_t m_bytes = 1024 * 1024; size_t fbs_buffer_size = 0U; + size_t fbs_potential_external_buffer_size = 0; + + const auto update_fbs_buffer_size = [&](const ONNX_NAMESPACE::TensorProto& tensor_proto) { + auto bytes = tensor_proto.ByteSizeLong(); + fbs_buffer_size += bytes; + + if (tensor_proto.data_location() == ONNX_NAMESPACE::TensorProto_DataLocation_EXTERNAL) { + std::unique_ptr external_data_info; + ORT_RETURN_IF_ERROR(ExternalDataInfo::Create(tensor_proto.external_data(), external_data_info)); + fbs_buffer_size += external_data_info->GetLength(); + } + + if (bytes > onnxruntime::fbs::utils::kMinimumSizeForExternalData && + tensor_proto.data_type() != ONNX_NAMESPACE::TensorProto_DataType_STRING) { + fbs_potential_external_buffer_size += bytes; + } + + return Status::OK(); + }; + for (const auto& tensor_proto : trainable_tensor_protos) { - fbs_buffer_size += tensor_proto.ByteSizeLong(); + ORT_RETURN_IF_ERROR(update_fbs_buffer_size(tensor_proto)); } + for (const auto& tensor_proto : non_trainable_tensor_protos) { - fbs_buffer_size += tensor_proto.ByteSizeLong(); + ORT_RETURN_IF_ERROR(update_fbs_buffer_size(tensor_proto)); } // Align buffer size to 1MB. fbs_buffer_size = std::max(fbs_buffer_size, m_bytes); fbs_buffer_size = ((fbs_buffer_size + m_bytes - 1) / m_bytes) * m_bytes; + + const bool use_external_data = fbs_buffer_size >= external_data_threshold * m_bytes; + + fbs::utils::ExternalDataWriter external_data_writer = nullptr; + std::optional external_data_stream; + + if (use_external_data) { + fbs_buffer_size -= fbs_potential_external_buffer_size; // reduce fbs buffer size to account for external data + + auto data_path = ExternalCheckpointDataPath(checkpoint_path); + external_data_stream = std::ofstream(data_path, std::ios::binary); + + ORT_RETURN_IF(external_data_stream->fail(), "Failed to create checkpoint's external data file: ", + ToUTF8String(data_path)); + + // setup the data writer to write aligned data to external_data_stream + external_data_writer = [&external_data_stream](int32_t data_type, gsl::span bytes, + uint64_t& offset) { + return WriteToExternalFileHelper(external_data_stream.value(), data_type, bytes, offset); + }; + } + flatbuffers::FlatBufferBuilder builder(fbs_buffer_size); - const auto tensor_protos_to_fbs_tensors = [&builder](const auto& tensor_protos, auto& fbs_tensors) { - fbs_tensors.reserve(tensor_protos.size()); - for (const auto& tensor_proto : tensor_protos) { - flatbuffers::Offset fbs_tensor; - ORT_RETURN_IF_ERROR( - fbs::utils::SaveInitializerOrtFormat(builder, tensor_proto, Path(), fbs_tensor)); - fbs_tensors.push_back(fbs_tensor); - } + const auto tensor_protos_to_fbs_tensors = + [&builder, &external_data_writer](const auto& tensor_protos, auto& fbs_tensors) { + fbs_tensors.reserve(tensor_protos.size()); + for (const auto& tensor_proto : tensor_protos) { + flatbuffers::Offset fbs_tensor; + ORT_RETURN_IF_ERROR( + fbs::utils::SaveInitializerOrtFormat(builder, tensor_proto, Path(), fbs_tensor, external_data_writer)); + fbs_tensors.push_back(fbs_tensor); + } - return Status::OK(); - }; + return Status::OK(); + }; std::vector> trainable_tensors; ORT_RETURN_IF_ERROR(tensor_protos_to_fbs_tensors(trainable_tensor_protos, trainable_tensors)); @@ -224,6 +343,11 @@ Status FromTensorProtos( std::vector> non_trainable_tensors; ORT_RETURN_IF_ERROR(tensor_protos_to_fbs_tensors(non_trainable_tensor_protos, non_trainable_tensors)); + if (external_data_stream) { + ORT_RETURN_IF(external_data_stream->fail(), "Failed writing external checkpoint data."); + external_data_stream->close(); + } + const auto fbs_trainable_tensors = builder.CreateVector(trainable_tensors); const auto fbs_non_trainable_tensors = builder.CreateVector(non_trainable_tensors); @@ -231,6 +355,7 @@ Status FromTensorProtos( module_state_builder.add_requires_grad_params(fbs_trainable_tensors); module_state_builder.add_frozen_params(fbs_non_trainable_tensors); module_state_builder.add_is_nominal_state(nominal_checkpoint); + module_state_builder.add_has_external_data(use_external_data); flatbuffers::Offset fbs_module_state = module_state_builder.Finish(); fbs::CheckpointBuilder checkpoint_builder(builder); @@ -249,11 +374,13 @@ Status FromTensorProtos( * @param module_state module state containing the model's trainable and non-trainable parameters. * @param builder Flatbuffer builder. * @param fbs_module_state Flatbuffer module state to be populated. + * @param external_data_writer Optional delegate to write tensor data to an external file. * @return Status of the operation. */ Status FromModuleState(const ModuleCheckpointState& module_state, flatbuffers::FlatBufferBuilder& builder, - flatbuffers::Offset& fbs_module_state) { + flatbuffers::Offset& fbs_module_state, + fbs::utils::ExternalDataWriter external_data_writer = nullptr) { if (module_state.named_parameters.empty()) { return Status::OK(); } @@ -280,14 +407,14 @@ Status FromModuleState(const ModuleCheckpointState& module_state, ORT_RETURN_IF_ERROR(FlatbufferTensorsFromOrtValues( requires_grad_params, module_state.train_session_data_transfer_mgr, - builder, trainable_tensors)); + builder, trainable_tensors, external_data_writer)); std::vector> non_trainable_tensors; non_trainable_tensors.reserve(frozen_params.size()); ORT_RETURN_IF_ERROR(FlatbufferTensorsFromOrtValues( frozen_params, module_state.train_session_data_transfer_mgr, - builder, non_trainable_tensors)); + builder, non_trainable_tensors, external_data_writer)); const auto fbs_trainable_tensors = builder.CreateVector(trainable_tensors); const auto fbs_non_trainable_tensors = builder.CreateVector(non_trainable_tensors); @@ -296,6 +423,9 @@ Status FromModuleState(const ModuleCheckpointState& module_state, module_state_builder.add_requires_grad_params(fbs_trainable_tensors); module_state_builder.add_frozen_params(fbs_non_trainable_tensors); module_state_builder.add_is_nominal_state(module_state.is_nominal_state); + if (external_data_writer) { + module_state_builder.add_has_external_data(true); + } fbs_module_state = module_state_builder.Finish(); return Status::OK(); @@ -428,9 +558,25 @@ Status FromCheckpointState( const CheckpointState& state, const PathString& checkpoint_path, const bool include_optimizer_state) { flatbuffers::FlatBufferBuilder builder(1024); + fbs::utils::ExternalDataWriter external_data_writer = nullptr; + std::optional external_data_stream; + if (state.has_external_data) { + auto data_path = ExternalCheckpointDataPath(checkpoint_path); + external_data_stream = std::ofstream(data_path, std::ios::binary); + + ORT_RETURN_IF(external_data_stream->fail(), "Failed to create checkpoint's external data file: ", + ToUTF8String(data_path)); + + // setup the data writer to write aligned data to external_data_stream + external_data_writer = [&external_data_stream](int32_t data_type, gsl::span bytes, + uint64_t& offset) { + return WriteToExternalFileHelper(external_data_stream.value(), data_type, bytes, offset); + }; + } + // Write weight tensors files. flatbuffers::Offset module_state; - ORT_RETURN_IF_ERROR(FromModuleState(state.module_checkpoint_state, builder, module_state)); + ORT_RETURN_IF_ERROR(FromModuleState(state.module_checkpoint_state, builder, module_state, external_data_writer)); // Write optimizer state tensors files. std::vector> optimizer_groups; @@ -485,17 +631,19 @@ Status FromFile(const PathString& checkpoint_path, InlinedVector& check * * @param fbs_module_state Flatbuffer module state. * @param module_state Module state to be populated. + * @param external_data_reader delegate to read initializer data from an external file or buffer * @return Status of the operation. */ Status ToModuleState( - const onnxruntime::fbs::ModuleState& fbs_module_state, ModuleCheckpointState& module_state) { + const onnxruntime::fbs::ModuleState& fbs_module_state, ModuleCheckpointState& module_state, + const fbs::utils::ExternalDataReader& external_data_reader) { const auto* requires_grad_params = fbs_module_state.requires_grad_params(); ORT_RETURN_IF_NOT(requires_grad_params, "Expected: Valid trainable tensors flatbuffer.", " Actual: Encountered a nullptr. Checkpoint file is invalid"); flatbuffers::uoffset_t trainable_params_size = requires_grad_params->size(); InlinedHashMap trainable_params; trainable_params.reserve(trainable_params_size); - ORT_RETURN_IF_ERROR(OrtValuesFromFlatbufferTensors(*requires_grad_params, trainable_params)); + ORT_RETURN_IF_ERROR(OrtValuesFromFlatbufferTensors(*requires_grad_params, trainable_params, external_data_reader)); for (auto& [name, value] : trainable_params) { auto param = std::make_shared(name, value, true); @@ -508,7 +656,7 @@ Status ToModuleState( flatbuffers::uoffset_t non_trainable_params_size = frozen_params->size(); InlinedHashMap non_trainable_params; non_trainable_params.reserve(non_trainable_params_size); - ORT_RETURN_IF_ERROR(OrtValuesFromFlatbufferTensors(*frozen_params, non_trainable_params)); + ORT_RETURN_IF_ERROR(OrtValuesFromFlatbufferTensors(*frozen_params, non_trainable_params, external_data_reader)); for (auto& [name, value] : non_trainable_params) { auto param = std::make_shared(name, value, false); @@ -525,11 +673,12 @@ Status ToModuleState( * * @param optimizer_groups Flatbuffer optimizer groups. * @param optimizer_state Optimizer state to be populated. + * @param external_data_reader delegate to read initializer data from an external file or buffer * @return Status of the operation. */ Status ToOptimizerState( const flatbuffers::Vector>& optimizer_groups, - OptimizerCheckpointState& optimizer_state) { + OptimizerCheckpointState& optimizer_state, const fbs::utils::ExternalDataReader& external_data_reader) { for (const auto* optimizer_group : optimizer_groups) { ORT_RETURN_IF_NOT(optimizer_group, "Expected: Valid optimizer groups flatbuffer.", " Actual: Encountered a nullptr. Checkpoint file is invalid"); @@ -557,7 +706,7 @@ Status ToOptimizerState( ORT_RETURN_IF_NOT(momentums, "Expected: Valid optimizer momentum tensors flatbuffer.", " Actual: Encountered a nullptr. Checkpoint file is invalid"); ORT_RETURN_IF_ERROR(OrtValuesFromFlatbufferTensors( - *momentums, optimizer_state_it->second->param_named_optimizer_states[param_name])); + *momentums, optimizer_state_it->second->param_named_optimizer_states[param_name], external_data_reader)); } } @@ -623,10 +772,12 @@ Status ToPropertyBag(const onnxruntime::fbs::PropertyBag& fbs_property_bag, * * @param checkpoint_path Path to the checkpoint file. * @param model_proto Model proto to be populated. + * @param checkpoint_path Path to the checkpoint file. * @return Status of the operation. */ Status ToModelProto(gsl::span checkpoint_bytes, - ONNX_NAMESPACE::ModelProto& model_proto) { + ONNX_NAMESPACE::ModelProto& model_proto, + const PathString& checkpoint_path) { flatbuffers::Verifier verifier(checkpoint_bytes.data(), checkpoint_bytes.size()); ORT_RETURN_IF_NOT(fbs::VerifyCheckpointBuffer(verifier), "Checkpoint verification failed."); @@ -654,21 +805,37 @@ Status ToModelProto(gsl::span checkpoint_bytes, "Cannot load a nominal checkpoint to a model proto. " "Expected: Complete checkpoint. Actual: Nominal checkpoint."); + fbs::utils::ExternalDataReader external_data_reader = nullptr; + std::optional external_data_stream; + + if (module_state->has_external_data()) { + auto data_path = ExternalCheckpointDataPath(checkpoint_path); + external_data_stream = std::ifstream(data_path, std::ios::binary); + ORT_RETURN_IF(external_data_stream->fail(), + "Failed to open checkpoint's external data file: ", ToUTF8String(data_path)); + + external_data_reader = [&external_data_stream](uint64_t offset, gsl::span output_buffer) { + return ReadFromExternalFileHelper(external_data_stream.value(), offset, output_buffer); + }; + } + InlinedHashMap param_tensor_protos; param_tensor_protos.reserve( static_cast(requires_grad_params->size()) + static_cast(frozen_params->size())); - const auto flatbuffer_tensors_to_tensor_protos = [¶m_tensor_protos](const auto& flatbuffer_tensors) { - OrtFormatLoadOptions load_options{false, false}; - for (const auto* fbs_tensor : flatbuffer_tensors) { - ORT_RETURN_IF_NOT(fbs_tensor, "Checkpoint is invalid. Expected: Valid flatbuffer tensor. Actual: nullptr."); - ONNX_NAMESPACE::TensorProto tensor_proto; - ORT_RETURN_IF_ERROR(fbs::utils::LoadInitializerOrtFormat(*fbs_tensor, tensor_proto, load_options)); - param_tensor_protos.insert({fbs_tensor->name()->str(), tensor_proto}); - } + const auto flatbuffer_tensors_to_tensor_protos = + [¶m_tensor_protos, &external_data_reader](const auto& flatbuffer_tensors) { + OrtFormatLoadOptions load_options{false, false}; + for (const auto* fbs_tensor : flatbuffer_tensors) { + ORT_RETURN_IF_NOT(fbs_tensor, "Checkpoint is invalid. Expected: Valid flatbuffer tensor. Actual: nullptr."); + ONNX_NAMESPACE::TensorProto tensor_proto; + ORT_RETURN_IF_ERROR(fbs::utils::LoadInitializerOrtFormat(*fbs_tensor, tensor_proto, load_options, + external_data_reader)); + param_tensor_protos.insert({fbs_tensor->name()->str(), tensor_proto}); + } - return Status::OK(); - }; + return Status::OK(); + }; ORT_RETURN_IF_ERROR(flatbuffer_tensors_to_tensor_protos(*requires_grad_params)); ORT_RETURN_IF_ERROR(flatbuffer_tensors_to_tensor_protos(*frozen_params)); @@ -690,11 +857,12 @@ Status ToModelProto(gsl::span checkpoint_bytes, /** * @brief Load checkpoint from a checkpoint file to a checkpoint state. * - * @param checkpoint_path Path to the checkpoint file. + * @param checkpoint_bytes Buffer with checkpoint. * @param state Checkpoint state to be populated. + * @param checkpoint_path Path to the checkpoint file. Optional to support loading from buffer. * @return Status of the operation. */ -Status ToCheckpointState(gsl::span checkpoint_bytes, CheckpointState& state) { +Status ToCheckpointState(gsl::span checkpoint_bytes, CheckpointState& state, std::optional checkpoint_path) { flatbuffers::Verifier verifier(checkpoint_bytes.data(), checkpoint_bytes.size()); ORT_RETURN_IF_NOT(fbs::VerifyCheckpointBuffer(verifier), "Checkpoint verification failed."); @@ -706,13 +874,37 @@ Status ToCheckpointState(gsl::span checkpoint_bytes, CheckpointSt "Checkpoint is invalid. Checkpoint version ", checkpoint_version, " is not supported."); const auto* fbs_module_state = fbs_checkpoint->module_state(); + + fbs::utils::ExternalDataReader external_data_reader = nullptr; + std::optional external_data_stream; + + state.has_external_data = false; + if (nullptr != fbs_module_state && fbs_module_state->has_external_data()) { + state.has_external_data = true; + ORT_RETURN_IF_NOT(checkpoint_path.has_value(), + "External data is present in the checkpoint but the checkpoint path is not provided. External data with loading from buffer is not supported yet."); + auto data_path = ExternalCheckpointDataPath(*checkpoint_path); + external_data_stream = std::ifstream(data_path, std::ios::binary); + + if (external_data_stream.value().fail()) { + const auto [err, errmsg] = GetErrnoInfo(); + return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT, + "Failed to open checkpoint's external data file: ", ToUTF8String(data_path), + " error:", errmsg, " errno:", err); + } + + external_data_reader = [&external_data_stream](uint64_t offset, gsl::span output_buffer) { + return ReadFromExternalFileHelper(external_data_stream.value(), offset, output_buffer); + }; + } + if (nullptr != fbs_module_state) { - ORT_RETURN_IF_ERROR(ToModuleState(*fbs_module_state, state.module_checkpoint_state)); + ORT_RETURN_IF_ERROR(ToModuleState(*fbs_module_state, state.module_checkpoint_state, external_data_reader)); } const auto* fbs_optimizer_groups = fbs_checkpoint->optimizer_groups(); if (nullptr != fbs_optimizer_groups) { - ORT_RETURN_IF_ERROR(ToOptimizerState(*fbs_optimizer_groups, state.optimizer_checkpoint_state)); + ORT_RETURN_IF_ERROR(ToOptimizerState(*fbs_optimizer_groups, state.optimizer_checkpoint_state, external_data_reader)); } const auto* fbs_property_bag = fbs_checkpoint->property_bag(); @@ -745,13 +937,14 @@ InlinedVector Nominalize(gsl::span trainable_tensor_protos, gsl::span non_trainable_tensor_protos, - const PathString& checkpoint_path, const bool nominal_checkpoint) { + const PathString& checkpoint_path, const bool nominal_checkpoint, + const size_t external_data_threshold) { ORT_RETURN_IF_NOT(FLATBUFFERS_LITTLEENDIAN, "ORT training checkpoint format only supports little-endian machines"); return nominal_checkpoint ? save::FromTensorProtos(Nominalize(trainable_tensor_protos), Nominalize(non_trainable_tensor_protos), - checkpoint_path, nominal_checkpoint) + checkpoint_path, nominal_checkpoint, external_data_threshold) : save::FromTensorProtos(trainable_tensor_protos, non_trainable_tensor_protos, checkpoint_path, - nominal_checkpoint); + nominal_checkpoint, external_data_threshold); } #endif @@ -766,13 +959,13 @@ Status LoadCheckpoint(const PathString& checkpoint_path, CheckpointState& checkp InlinedVector checkpoint_bytes; ORT_RETURN_IF_ERROR(load::FromFile(checkpoint_path, checkpoint_bytes)); - return load::ToCheckpointState(checkpoint_bytes, checkpoint_states); + return load::ToCheckpointState(checkpoint_bytes, checkpoint_states, checkpoint_path); } Status LoadCheckpointFromBuffer(gsl::span checkpoint_bytes, CheckpointState& checkpoint_state) { ORT_RETURN_IF_NOT(FLATBUFFERS_LITTLEENDIAN, "ORT training checkpoint format only supports little-endian machines"); - return load::ToCheckpointState(checkpoint_bytes, checkpoint_state); + return load::ToCheckpointState(checkpoint_bytes, checkpoint_state, std::nullopt); } #if !defined(ORT_MINIMAL_BUILD) @@ -782,7 +975,7 @@ Status LoadCheckpointToModel(const PathString& checkpoint_path, InlinedVector checkpoint_bytes; ORT_RETURN_IF_ERROR(load::FromFile(checkpoint_path, checkpoint_bytes)); - return load::ToModelProto(checkpoint_bytes, model_proto); + return load::ToModelProto(checkpoint_bytes, model_proto, checkpoint_path); } #endif diff --git a/orttraining/orttraining/training_api/checkpoint.h b/orttraining/orttraining/training_api/checkpoint.h index 95d3820a33a70..0aab9087db8ed 100644 --- a/orttraining/orttraining/training_api/checkpoint.h +++ b/orttraining/orttraining/training_api/checkpoint.h @@ -30,8 +30,16 @@ struct CheckpointState { ModuleCheckpointState module_checkpoint_state; OptimizerCheckpointState optimizer_checkpoint_state; PropertyBag property_bag; + bool has_external_data = false; }; +/** + * @brief Get the external data path for a given checkpoint path. + * + * @param checkpoint_path file where checkpoint is stored. + */ +PathString ExternalCheckpointDataPath(const PathString& checkpoint_path); + /** * @brief Save training states as ORT checkpoint. * @@ -50,11 +58,18 @@ Status SaveCheckpoint(const CheckpointState& state, const PathString& checkpoint * @param non_trainable_tensor_protos non-trainable parameters in TensorProto format. * @param checkpoint_path file where checkpoint is saved. * @param nominal_checkpoint flag indicating whether to save the complete checkpoint or the nominal checkpoint. + * @param external_data_threshold optional threshold in bytes for external data. If the size of the data of the + * TensorProtos exceeds this threshold, then we save the data in an external data file. + * @remarks The calculation of total size required to save a model is in-exact as we only look at the size of the + * initializer data. That ignores things like initializer names and shapes. Due to this the external data + * threshold should include a buffer to allow for this, which is why we default to 1.8GB to avoid creating a + * checkpoint file that is >2GB (which would fail due to the usage of 32-bit offsets). * @return Status */ Status SaveCheckpoint(gsl::span trainable_tensor_protos, gsl::span non_trainable_tensor_protos, - const PathString& checkpoint_path, const bool nominal_checkpoint); + const PathString& checkpoint_path, const bool nominal_checkpoint, + const size_t external_data_threshold = 1800 * 1024 * 1024); // 1.8GB default #endif /** diff --git a/orttraining/orttraining/training_api/module.cc b/orttraining/orttraining/training_api/module.cc index 41ed79d285533..562b5da1b2b2b 100644 --- a/orttraining/orttraining/training_api/module.cc +++ b/orttraining/orttraining/training_api/module.cc @@ -674,11 +674,20 @@ Status Module::ExportModelForInferencing(const std::string& inference_model_path // The cloned model's inputs are transformed such that the model has only user defined inputs. All parameters // are moved to be constant initializers for the model. - ORT_RETURN_IF_ERROR(TransformModelInputsForInference(inference_model->MainGraph(), state_->module_checkpoint_state.named_parameters, + ORT_RETURN_IF_ERROR(TransformModelInputsForInference(inference_model->MainGraph(), + state_->module_checkpoint_state.named_parameters, eval_sess_->GetDataTransferManager())); + if (state_->has_external_data) { + std::string external_data_name = + ORT_TSTR_CONVERT_TO_PRINTABLE_STRING(ExternalCheckpointDataPath(ToPathString(inference_model_path))); + PathString inference_model_pathstring = ToPathString(inference_model_path); + ORT_THROW_IF_ERROR( + Model::SaveWithExternalInitializers(*inference_model, inference_model_pathstring, external_data_name, 64)); + } else { + ORT_THROW_IF_ERROR(Model::Save(*inference_model, inference_model_path)); + } // Save the model at the desired location. - ORT_THROW_IF_ERROR(Model::Save(*inference_model, inference_model_path)); return Status::OK(); } #endif