Skip to content

Commit

Permalink
Fix pylint issues
Browse files Browse the repository at this point in the history
  • Loading branch information
preetha-intel committed Jul 23, 2024
1 parent 7fe61d6 commit bd433e2
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 10 deletions.
16 changes: 8 additions & 8 deletions onnxruntime/core/providers/openvino/backends/basic_backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -95,14 +95,14 @@ BasicBackend::BasicBackend(const ONNX_NAMESPACE::ModelProto& model_proto,
} else if ((!subgraph_context_.has_dynamic_input_shape) &&
((hw_target.find("AUTO") == std::string::npos) ||
(global_context_.OpenVINO_Version.at(0) >= 2024 && global_context_.OpenVINO_Version.at(1) > 2))) {
// Optimized OV compile_model API is supported with AUTO from version 2024.3 and above
// Inputs with static dimenstions
const std::string model = model_proto.SerializeAsString();
exe_network_ = global_context_.ie_core.CompileModel(model,
hw_target,
device_config,
subgraph_context_.subgraph_name);
ie_cnn_network_ = exe_network_.Get().get_runtime_model();
// Optimized OV compile_model API is supported with AUTO from version 2024.3 and above
// Inputs with static dimenstions
const std::string model = model_proto.SerializeAsString();
exe_network_ = global_context_.ie_core.CompileModel(model,
hw_target,
device_config,
subgraph_context_.subgraph_name);
ie_cnn_network_ = exe_network_.Get().get_runtime_model();
} else { // For all other types use ov::Model Type
ie_cnn_network_ = CreateOVModel(model_proto, global_context_, const_outputs_map_);
exe_network_ = global_context_.ie_core.CompileModel(
Expand Down
4 changes: 2 additions & 2 deletions onnxruntime/core/providers/openvino/ov_interface.cc
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ OVExeNetwork OVCore::CompileModel(const std::string& onnx_model,
const std::string& name) {
ov::CompiledModel obj;
try {
obj = oe.compile_model(onnx_model, ov::Tensor(), hw_target, device_config);
obj = oe.compile_model(onnx_model, ov::Tensor(), hw_target, device_config);
#ifndef NDEBUG
printDebugInfo(obj);
#endif
Expand Down Expand Up @@ -138,7 +138,7 @@ OVExeNetwork OVCore::ImportModel(std::shared_ptr<std::istringstream> model_strea
}

void OVCore::SetCache(const std::string& cache_dir_path) {
oe.set_property(ov::cache_dir(cache_dir_path));
oe.set_property(ov::cache_dir(cache_dir_path));
}

#ifdef IO_BUFFER_ENABLED
Expand Down

0 comments on commit bd433e2

Please sign in to comment.