Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Objective-C static analysis build #18606

Merged
merged 3 commits into from
Nov 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions cmake/patches/abseil/absl_gh_issue_1435_workaround.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
--- absl/container/internal/layout.h 2023-11-28 09:35:48
+++ absl/container/internal/layout.updated.h 2023-11-28 10:13:14
@@ -181,9 +181,11 @@
#include <sanitizer/asan_interface.h>
#endif

-#if defined(__GXX_RTTI)
-#define ABSL_INTERNAL_HAS_CXA_DEMANGLE
-#endif
+// Comment out ABSL_INTERNAL_HAS_CXA_DEMANGLE definition to work around this issue:
+// https://github.com/abseil/abseil-cpp/issues/1435
+// #if defined(__GXX_RTTI)
+// #define ABSL_INTERNAL_HAS_CXA_DEMANGLE
+// #endif

#ifdef ABSL_INTERNAL_HAS_CXA_DEMANGLE
#include <cxxabi.h>
2 changes: 1 addition & 1 deletion include/onnxruntime/core/graph/graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -668,7 +668,7 @@ class Node {
The Graph representation containing the graph inputs and outputs, the Node instances,
and the edges connecting the nodes.
*/
class Graph {
class Graph { // NOLINT(clang-analyzer-optin.performance.Padding): preserve existing data member order for readability
public:
/** Gets the Graph name. */
const std::string& Name() const noexcept;
Expand Down
45 changes: 28 additions & 17 deletions onnxruntime/core/providers/coreml/model/model.mm
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@
using namespace onnxruntime::coreml;

namespace {
// Converts a UTF8 const char* to an NSString. Throws on failure.
NSString* _Nonnull Utf8StringToNSString(const char* utf8_str) {
NSString* result = [NSString stringWithUTF8String:utf8_str];
ORT_ENFORCE(result != nil, "NSString conversion failed.");
return result;
}

/**
* Computes the static output shape used to allocate the output tensor.
* `inferred_shape` is the inferred shape known at model compile time. It may contain dynamic dimensions (-1).
Expand Down Expand Up @@ -152,19 +159,20 @@ Status CreateInputFeatureProvider(const std::unordered_map<std::string, OnnxTens
deallocator:^(void* /* bytes */) {
}
error:&error];
ORT_RETURN_IF(error != nil,
ORT_RETURN_IF(multi_array == nil,
"Failed to create MLMultiArray for feature: ", name,
", error: ", [[error localizedDescription] UTF8String]);
(error != nil) ? MakeString(", error: ", [[error localizedDescription] UTF8String]) : "");

MLFeatureValue* feature_value = [MLFeatureValue featureValueWithMultiArray:multi_array];
NSString* feature_name = [NSString stringWithUTF8String:name.c_str()];
NSString* feature_name = Utf8StringToNSString(name.c_str());
feature_dictionary[feature_name] = feature_value;
}

auto* feature_provider = [[MLDictionaryFeatureProvider alloc] initWithDictionary:feature_dictionary
error:&error];
ORT_RETURN_IF(error != nil,
"Failed to create MLDictionaryFeatureProvider, error: ", [[error localizedDescription] UTF8String]);
ORT_RETURN_IF(feature_provider == nil,
"Failed to create MLDictionaryFeatureProvider",
(error != nil) ? MakeString(", error: ", [[error localizedDescription] UTF8String]) : "");

*feature_provider_out = feature_provider;
conversion_buffers_out = std::move(conversion_buffers);
Expand Down Expand Up @@ -251,7 +259,7 @@ - (Status)predict:(const std::unordered_map<std::string, OnnxTensorData>&)inputs
get_output_tensor_mutable_raw_data_fn
API_AVAILABLE_OS_VERSIONS;

@property MLModel* model API_AVAILABLE_OS_VERSIONS;
@property(nullable) MLModel* model API_AVAILABLE_OS_VERSIONS;

@end

Expand Down Expand Up @@ -297,12 +305,15 @@ - (void)dealloc {
- (Status)loadModel {
NSError* error = nil;
NSURL* modelUrl = [NSURL URLWithString:coreml_model_path_];
NSAssert(modelUrl != nil, @"modelUrl must not be nil");
if (modelUrl == nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Failed to create model URL from path");
}

NSURL* compileUrl = [MLModel compileModelAtURL:modelUrl error:&error];

if (error != nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Error compiling model ",
[[error localizedDescription] cStringUsingEncoding:NSUTF8StringEncoding]);
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Error compiling model: ",
[[error localizedDescription] UTF8String]);
}

compiled_model_path_ = [compileUrl path];
Expand All @@ -313,9 +324,9 @@ - (Status)loadModel {
: MLComputeUnitsAll;
_model = [MLModel modelWithContentsOfURL:compileUrl configuration:config error:&error];

if (error != NULL) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Error Creating MLModel ",
[[error localizedDescription] cStringUsingEncoding:NSUTF8StringEncoding]);
if (_model == nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Failed to create MLModel",
(error != nil) ? MakeString(", error: ", [[error localizedDescription] UTF8String]) : "");
}

return Status::OK();
Expand All @@ -327,7 +338,7 @@ - (Status)predict:(const std::unordered_map<std::string, OnnxTensorData>&)inputs
Status status = Status::OK();
ORT_TRY {
if (_model == nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "model is not loaded");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Model is not loaded");
}

id<MLFeatureProvider> input_features;
Expand All @@ -342,12 +353,12 @@ - (Status)predict:(const std::unordered_map<std::string, OnnxTensorData>&)inputs

if (error != nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Error executing model: ",
[[error localizedDescription] cStringUsingEncoding:NSUTF8StringEncoding]);
[[error localizedDescription] UTF8String]);
}

for (const auto& [output_name, output_tensor_info] : outputs) {
MLFeatureValue* output_value =
[output_features featureValueForName:[NSString stringWithUTF8String:output_name.c_str()]];
[output_features featureValueForName:Utf8StringToNSString(output_name.c_str())];

if (output_value == nil) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "output_features has no value for ", output_name);
Expand Down Expand Up @@ -452,7 +463,7 @@ Status Predict(const std::unordered_map<std::string, OnnxTensorData>& inputs,
return status;
}

return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Execution::LoadModel requires macos 10.15+ or ios 13+ ");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Execution::LoadModel requires macos 10.15+ or ios 13+");
}

Status Execution::Predict(const std::unordered_map<std::string, OnnxTensorData>& inputs,
Expand All @@ -468,7 +479,7 @@ Status Predict(const std::unordered_map<std::string, OnnxTensorData>& inputs,
}
}

return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Execution::LoadModel requires macos 10.15+ or ios 13+ ");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Execution::Predict requires macos 10.15+ or ios 13+");
}

Model::Model(const std::string& path, const logging::Logger& logger, uint32_t coreml_flags)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,11 @@ jobs:
--build --parallel --target onnx_proto
displayName: Generate compile_commands.json and ONNX protobuf files

- script: |
patch < "$(Build.SourcesDirectory)/cmake/patches/abseil/absl_gh_issue_1435_workaround.patch"
workingDirectory: "$(Build.BinariesDirectory)/Debug/_deps/abseil_cpp-src"
displayName: Apply absl_gh_issue_1435_workaround.patch

- script: |
set -e

Expand Down
Loading