Skip to content

Commit

Permalink
Fix OpenVINO CI issues
Browse files Browse the repository at this point in the history
  • Loading branch information
sspintel committed Oct 31, 2023
1 parent e223464 commit 2efda9a
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 44 deletions.
2 changes: 1 addition & 1 deletion onnxruntime/core/providers/openvino/backend_manager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ BackendManager::BackendManager(const onnxruntime::Node& fused_node,
bool BackendManager::ModelHasBatchedInputs(const ONNX_NAMESPACE::ModelProto& model_proto) const {
bool has_batched_inputs = true;

for (int i = 0; i < static_cast<int>(subgraph_context_.input_indexes).size(); i++) {
for (int i = 0; i < static_cast<int>(subgraph_context_.input_indexes.size()); i++) {
auto& input = model_proto.graph().input(subgraph_context_.input_indexes[i]);

// Batch-process only raw image inputs (NCHW or NHWC layouts)
Expand Down
2 changes: 1 addition & 1 deletion onnxruntime/core/providers/openvino/backend_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
namespace onnxruntime {
namespace openvino_ep {
namespace backend_utils {
const char log_tag[] = "[OpenVINO-EP] ";
const std::string log_tag = "[OpenVINO-EP] ";

#ifndef NDEBUG
bool IsDebugEnabled();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ void BasicBackend::StartAsyncInference(Ort::KernelContext& context, OVInferReque
// avoid input copies on the CPU device
if (global_context_.device_type.find("CPU") != std::string::npos) {
tensor_ptr = std::make_shared<ov::Tensor>(input->get_element_type(), input_tensor_shape,
reinterpret_cast<void*>(tensor_data));
(void*)tensor_data);

Check warning on line 220 in onnxruntime/core/providers/openvino/backends/basic_backend.cc

View workflow job for this annotation

GitHub Actions / cpplint

[cpplint] onnxruntime/core/providers/openvino/backends/basic_backend.cc#L220

Using C-style cast. Use reinterpret_cast<void*>(...) instead [readability/casting] [4]
Raw output
onnxruntime/core/providers/openvino/backends/basic_backend.cc:220:  Using C-style cast.  Use reinterpret_cast<void*>(...) instead  [readability/casting] [4]
} else {
tensor_ptr = std::make_shared<ov::Tensor>(input->get_element_type(), input_tensor_shape);
FillInputBlob(tensor_ptr, batch_slice_idx, input_name, context, subgraph_context_);
Expand Down
75 changes: 37 additions & 38 deletions onnxruntime/core/providers/openvino/contexts.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,47 +6,46 @@
#include <vector>
#include <unordered_map>
#include <string>
s
#include "ov_interface.h"

namespace onnxruntime {
namespace openvino_ep {
namespace onnxruntime {
namespace openvino_ep {

// Holds context applicable to the entire EP instance.
struct GlobalContext {
OVCore ie_core;
bool is_wholly_supported_graph = false;
bool enable_npu_fast_compile = false;
bool enable_opencl_throttling = false;
bool enable_dynamic_shapes = false;
size_t num_of_threads;
std::string device_type;
std::string precision_str;
std::string device_id;
std::string cache_dir;
int num_streams;
std::vector<bool> deviceAvailableList = {true, true, true, true, true, true, true, true};
std::vector<std::string> deviceTags = {"0", "1", "2", "3", "4", "5", "6", "7"};
std::string onnx_model_name;
std::string onnx_model_path_name;
int onnx_opset_version;
void* context = 0;
bool use_api_2;
};
// Holds context applicable to the entire EP instance.
struct GlobalContext {
OVCore ie_core;
bool is_wholly_supported_graph = false;
bool enable_npu_fast_compile = false;
bool enable_opencl_throttling = false;
bool enable_dynamic_shapes = false;
size_t num_of_threads;
std::string device_type;
std::string precision_str;
std::string device_id;
std::string cache_dir;
int num_streams;
std::vector<bool> deviceAvailableList = {true, true, true, true, true, true, true, true};
std::vector<std::string> deviceTags = {"0", "1", "2", "3", "4", "5", "6", "7"};
std::string onnx_model_name;
std::string onnx_model_path_name;
int onnx_opset_version;
void* context = 0;
bool use_api_2;
};

// Holds context specific to subgraph.
struct SubGraphContext {
bool has_dynamic_input_shape = false;
bool enable_batching = false;
bool set_npu_config = false;
bool is_constant = false;
void* context = 0;
std::string subgraph_name;
std::vector<int> input_indexes;
std::unordered_map<std::string, int> input_names;
std::unordered_map<std::string, int> output_names;
std::string precision;
};
// Holds context specific to subgraph.
struct SubGraphContext {
bool has_dynamic_input_shape = false;
bool enable_batching = false;
bool set_npu_config = false;
bool is_constant = false;
void* context = 0;
std::string subgraph_name;
std::vector<int> input_indexes;
std::unordered_map<std::string, int> input_names;
std::unordered_map<std::string, int> output_names;
std::string precision;
};

} // namespace openvino_ep
} // namespace openvino_ep
} // namespace onnxruntime
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ struct OpenVINO_Provider : Provider {

if (provider_options_map.find("context") != provider_options_map.end()) {
std::string str = provider_options_map.at("context");
unsigned int64_t number = std::strtoull(str.c_str(), nullptr, 16);
uint64_t number = std::strtoull(str.c_str(), nullptr, 16);
context = reinterpret_cast<void*>(number);
}

Expand Down
2 changes: 1 addition & 1 deletion onnxruntime/core/providers/openvino/ov_interface.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ using WaitMode = InferenceEngine::IInferRequest::WaitMode;
namespace onnxruntime {
namespace openvino_ep {

const char log_tag[] = "[OpenVINO-EP] ";
const std::string log_tag = "[OpenVINO-EP] ";
std::shared_ptr<OVNetwork> OVCore::ReadModel(const std::string& model) const {
try {
OVTensor weights;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1190,7 +1190,7 @@ bool DataOps::node_is_supported(const std::map<std::string, std::set<std::string
if (opset->second.find(optype) == opset->second.end() && op_fun == ops_supported_as_function.end()) {
#ifndef NDEBUG
if (openvino_ep::backend_utils::IsDebugEnabled()) {
std::cout << "The operator is not available in OpenVINO ngraph operators list
std::cout << "The operator is not available in OpenVINO ngraph operators list"
<< "nor the operator is a special ONNX function"
<< std::endl;
}
Expand Down

0 comments on commit 2efda9a

Please sign in to comment.