Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DO NOT REVIEW][DO NOT MERGE] Removal ONNX dependency #27757

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,6 @@
path = thirdparty/ocl/clhpp_headers
url = https://github.com/KhronosGroup/OpenCL-CLHPP.git
ignore = dirty
[submodule "thirdparty/onnx"]
path = thirdparty/onnx/onnx
url = https://github.com/onnx/onnx.git
[submodule "thirdparty/protobuf"]
path = thirdparty/protobuf/protobuf
url = https://github.com/protocolbuffers/protobuf.git
Expand Down
9 changes: 6 additions & 3 deletions src/frontends/onnx/frontend/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ ov_add_frontend(NAME onnx
FILEDESCRIPTION "FrontEnd to load and convert ONNX file format"
LINK_LIBRARIES openvino_onnx_common openvino::core::dev)

# Required to build it in a strict order in case onnx_common contains onnx headers
if(NOT ONNX_FOUND)
add_dependencies(openvino_onnx_frontend openvino_onnx_common)
endif()

set(ONNX_OPSET_VERSION 21 CACHE INTERNAL "Supported version of ONNX operator set")
target_compile_definitions(${TARGET_NAME} PRIVATE ONNX_OPSET_VERSION=${ONNX_OPSET_VERSION})

Expand All @@ -85,6 +90,4 @@ if(BUILD_SHARED_LIBS)
endif()

ov_ncc_naming_style(FOR_TARGET ${TARGET_NAME}
SOURCE_DIRECTORIES "${${TARGET_NAME}_INCLUDE_DIR}"
DEFINITIONS
$<TARGET_PROPERTY:onnx,INTERFACE_COMPILE_DEFINITIONS>)
SOURCE_DIRECTORIES "${${TARGET_NAME}_INCLUDE_DIR}")
2 changes: 0 additions & 2 deletions src/frontends/onnx/frontend/src/core/graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,6 @@ Graph::Graph(const std::string& model_dir,
m_ops_bridge{detail::init_ops_bridge(m_extensions.conversions)} {
m_model = common::make_unique<Model>(model_proto, detail::build_model_opset(*model_proto, m_ops_bridge));

transform::expand_onnx_functions(*model_proto);

std::map<std::string, Tensor> initializers;

// Process all initializers in the graph
Expand Down
119 changes: 0 additions & 119 deletions src/frontends/onnx/frontend/src/core/transform.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,132 +9,13 @@
# pragma warning(disable : 4244)
#endif

#include <onnx/defs/function.h>
#include <onnx/defs/schema.h>
#include <onnx/shape_inference/implementation.h>

#include <algorithm>

#include "core/model.hpp"
#include "core/transform.hpp"
#include "openvino/util/log.hpp"
#include "ops_bridge.hpp"

using namespace ::ONNX_NAMESPACE;

namespace ov {
namespace frontend {
namespace onnx {
namespace transform {
namespace {
TypeProto get_input_type(std::string const& name, GraphProto& graph) {
for (const auto& input : graph.input()) {
if (input.name() == name) {
return input.type();
}
}
for (const auto& initializer : graph.initializer()) {
if (initializer.name() == name) {
TypeProto ret;
auto* tensor_type = ret.mutable_tensor_type();
tensor_type->set_elem_type(initializer.data_type());

auto* tensor_shape = tensor_type->mutable_shape();
tensor_shape->clear_dim();
const auto& initializer_dims = initializer.dims();
for (auto&& dim : initializer_dims) {
auto* new_dim = tensor_shape->add_dim();
new_dim->set_dim_value(dim);
}
return ret;
}
}
for (const auto& value_info : graph.value_info()) {
if (value_info.name() == name) {
return value_info.type();
}
}
return TypeProto();
}

void function_expand_and_remove_original_node(const NodeProto& node,
const FunctionProto& func_proto,
GraphProto* graph,
int current_node_idx) {
const auto before_expand_size = graph->node().size();
FunctionExpandHelper(node, func_proto, *graph);
const auto added_nodes = graph->node().size() - before_expand_size;

// Remove the original node which contained the function
graph->mutable_node()->erase(graph->mutable_node()->begin() + current_node_idx);

// Move nodes from expanded function to position of removed node
std::rotate(graph->mutable_node()->begin() + current_node_idx,
graph->mutable_node()->end() - added_nodes,
graph->mutable_node()->end());
}

} // namespace
} // namespace transform
} // namespace onnx
} // namespace frontend
} // namespace ov

void ov::frontend::onnx::transform::expand_onnx_functions(ModelProto& model_proto) {
auto graph_proto = model_proto.mutable_graph();

for (int i = 0; i < graph_proto->node().size(); ++i) {
NodeProto node = graph_proto->node().Get(i);

// Check if node operation is one of the functions we want to expand
if (std::find(onnx_functions_to_expand.begin(), onnx_functions_to_expand.end(), node.op_type()) ==
onnx_functions_to_expand.end()) {
continue;
}

// Retrieve the operation schema from ONNX library
int opset_version = static_cast<int>(get_opset_version(model_proto, node.domain()));
const auto* schema_registry = OpSchemaRegistry::Instance();
const auto node_op_schema = schema_registry->GetSchema(node.op_type(), opset_version, node.domain());

// Check if operation schema found
if (!node_op_schema) {
continue;
}

// Check if operation schema contains a function body and expand function
if (node_op_schema->HasFunction()) {
const auto* func_proto = node_op_schema->GetFunction();
// Move index to the previous position because a first node of expanded function can have also function
function_expand_and_remove_original_node(node, *func_proto, graph_proto, i--);
}

else if (node_op_schema->HasContextDependentFunction()) {
// In order to expand a context-dependent function, we need to infer types
try {
shape_inference::InferShapes(model_proto);
#ifdef ENABLE_OPENVINO_DEBUG
} catch (const std::exception& e) {
OPENVINO_WARN("ONNX ov::Shape inference failed: ", e.what());
}
#else
} catch (const std::exception&) {
}
#endif
std::vector<TypeProto> input_types;
for (const auto& input : node.input()) {
input_types.push_back(get_input_type(input, *graph_proto));
}

FunctionBodyBuildContextImpl ctx(node, input_types);
FunctionProto func_proto;
node_op_schema->BuildContextDependentFunction(ctx, func_proto);
// Move index to the previous position because a first node of expanded function can have also function
function_expand_and_remove_original_node(node, func_proto, graph_proto, i--);
}
}
}

void ov::frontend::onnx::transform::fixup_legacy_operators(ModelProto& model_proto) {
auto graph_proto = model_proto.mutable_graph();
for (auto& node : *graph_proto->mutable_node()) {
Expand Down
16 changes: 0 additions & 16 deletions src/frontends/onnx/frontend/src/core/transform.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,6 @@ namespace transform {

using ::ONNX_NAMESPACE::ModelProto;

static const std::vector<std::string> onnx_functions_to_expand = {"AffineGrid",
"Bernoulli",
"CenterCropPad",
"NegativeLogLikelihoodLoss",
"SoftmaxCrossEntropyLoss"};

/// \brief Replace nodes with expanded body of ONNX functions
///
/// Some ONNX operators are specified as functions, which can be expanded to
/// a subgraph or more primitive operations. This functions modifies the ONNX
/// model by replacing operations of types listed in onnx_functions_to_expand
/// with their expanded subgraphs.
///
/// \param model_proto Protobuf message with ONNX model to transform.
void expand_onnx_functions(ModelProto& model_proto);

static const std::vector<std::string> legacy_ops_to_fixup = {"DeformableConv2D",
"DetectionOutput",
"ExperimentalDetectronDetectionOutput",
Expand Down
41 changes: 1 addition & 40 deletions src/frontends/onnx/frontend/src/editor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
#include "editor.hpp"

#include <onnx/onnx_pb.h>
#include <onnx/shape_inference/implementation.h>

#include <fstream>

Expand Down Expand Up @@ -250,40 +249,6 @@ void graph_topological_sort(GraphProto* graph) {
graph->mutable_node()->Swap(result.mutable_node());
}
}

class InferShapesAutoRelease {
public:
InferShapesAutoRelease(std::shared_ptr<ModelProto> model_proto)
: m_model_proto{model_proto},
m_infer_shapes_was_run{false} {}

bool infer_shapes() {
try { // unexpected exceptions of external onnx lib
shape_inference::InferShapes(*m_model_proto);
m_infer_shapes_was_run = true;
} catch (...) {
release();
}
return m_infer_shapes_was_run;
}

void release() {
try {
m_model_proto->mutable_graph()->clear_value_info();
} catch (...) {
}
}

~InferShapesAutoRelease() {
if (m_infer_shapes_was_run) {
release();
}
}

private:
std::shared_ptr<ModelProto> m_model_proto;
bool m_infer_shapes_was_run;
};
} // namespace

/// \brief A helper class used to hold the ModelProto object as its field
Expand Down Expand Up @@ -411,7 +376,6 @@ PartialShape ONNXModelEditor::get_tensor_shape(const std::string& tensor_name) c
const ValueInfoProto* value_info = nullptr;
const TensorProto* tensor = nullptr;
const auto onnx_graph = m_pimpl->m_model_proto->mutable_graph();
InferShapesAutoRelease onnx_shapes(m_pimpl->m_model_proto);
if (const auto input = find_graph_input(*onnx_graph, tensor_name)) {
value_info = input;
} else if (const auto output = find_graph_output(*onnx_graph, tensor_name)) {
Expand All @@ -421,7 +385,7 @@ PartialShape ONNXModelEditor::get_tensor_shape(const std::string& tensor_name) c
} else if (const auto initializer = find_graph_initializer(*onnx_graph, tensor_name)) {
tensor = initializer;
} else {
auto shape_infer_applied = onnx_shapes.infer_shapes();
auto shape_infer_applied = false;
if (!shape_infer_applied) {
OPENVINO_WARN("Cannot replace existing shapes during get_tensor_shape");
return PartialShape::dynamic();
Expand Down Expand Up @@ -460,9 +424,6 @@ void ONNXModelEditor::extract_subgraph(const std::vector<InputEdge>& inputs,
m_pimpl->m_model_proto->mutable_graph()->mutable_output()->Clear();
}

InferShapesAutoRelease onnx_shapes(m_pimpl->m_model_proto);
onnx_shapes.infer_shapes();

SubgraphExtractor editor{*(m_pimpl->m_model_proto->mutable_graph())};
editor.add_new_inputs(inputs, merge_inputs);
editor.add_new_outputs(outputs);
Expand Down
61 changes: 58 additions & 3 deletions src/frontends/onnx/onnx_common/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,43 @@
#

set(TARGET_NAME "openvino_onnx_common")
set(OV_ONNX_NAMESPACE openvino_onnx)


file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE PUBLIC_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp)

if(NOT ONNX_FOUND)
message (STATUS "ONNX DBG Generating own onnx headers")
# Generate protobuf file on build time for each '.proto' file in src/proto
set(protofiles_root_dir "${CMAKE_CURRENT_SOURCE_DIR}/src/proto")
file(GLOB_RECURSE proto_files ${protofiles_root_dir}/*.proto)

foreach(proto_file IN LISTS proto_files)
file(RELATIVE_PATH proto_file_relative "${CMAKE_SOURCE_DIR}" "${proto_file}")
get_filename_component(FILE_WE ${proto_file} NAME_WE)
file(RELATIVE_PATH relative_path ${protofiles_root_dir} ${proto_file})
get_filename_component(relative_path ${relative_path} DIRECTORY)
set(OUTPUT_PB_SRC ${CMAKE_CURRENT_BINARY_DIR}/${relative_path}/${FILE_WE}.pb.cc)
set(OUTPUT_PB_HEADER ${CMAKE_CURRENT_BINARY_DIR}/${relative_path}/${FILE_WE}.pb.h)
add_custom_command(
OUTPUT "${OUTPUT_PB_SRC}" "${OUTPUT_PB_HEADER}"
COMMAND ${PROTOC_EXECUTABLE} ARGS --cpp_out ${CMAKE_CURRENT_BINARY_DIR} -I ${protofiles_root_dir} ${proto_file}
DEPENDS ${PROTOC_DEPENDENCY} ${proto_file}
COMMENT "Running C++ protocol buffer compiler (${PROTOC_EXECUTABLE}) on ${proto_file_relative}"
VERBATIM
COMMAND_EXPAND_LISTS)
list(APPEND LIBRARY_SRC "${OUTPUT_PB_SRC}")
list(APPEND PUBLIC_HEADERS "${OUTPUT_PB_HEADER}")
endforeach()
endif()

# Create named folders for the sources within the .vcproj
# Empty name lists them directly under the .vcproj

source_group("src" FILES ${LIBRARY_SRC})
source_group("include" FILES ${PUBLIC_HEADERS} ${PUBLIC_HEADERS})

# Create static library

add_library(${TARGET_NAME} STATIC ${LIBRARY_SRC} ${PUBLIC_HEADERS})

set_target_properties(${TARGET_NAME} PROPERTIES
Expand All @@ -26,12 +51,42 @@ set(ONNX_COMMON_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_include_directories(${TARGET_NAME}
PUBLIC $<BUILD_INTERFACE:${ONNX_COMMON_INCLUDE_DIR}>
$<INSTALL_INTERFACE:${FRONTEND_INSTALL_INCLUDE}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}>
PRIVATE ${ONNX_COMMON_SRC_DIR})

target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime openvino::util)

ov_link_system_libraries(${TARGET_NAME} PUBLIC onnx_proto onnx)
if(ONNX_FOUND)
message (STATUS "ONNX DBG Linked system onnx_proto and onnx")
ov_link_system_libraries(${TARGET_NAME} PUBLIC onnx_proto onnx)
else()
target_compile_definitions(${TARGET_NAME} PUBLIC ONNX_NAMESPACE=${OV_ONNX_NAMESPACE})
if(ENABLE_SYSTEM_PROTOBUF)
# use imported target name with namespace
set(protobuf_target_name "protobuf::${protobuf_target_name}")
else()
set(protobuf_target_name libprotobuf)
endif()

message (STATUS "ONNX DBG Linked system Protobuf: " ${protobuf_target_name})
ov_link_system_libraries(${TARGET_NAME} PUBLIC ${protobuf_target_name})
endif()

ov_add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

# Files produced by protobuf may contain missing declarations
if(CMAKE_COMPILER_IS_GNUCXX OR OV_COMPILER_IS_CLANG OR (OV_COMPILER_IS_INTEL_LLVM AND UNIX))
target_compile_options(${TARGET_NAME} PRIVATE $<$<COMPILE_LANGUAGE:CXX>:-Wno-missing-declarations>)
endif()
# Protobuf generates files which cause a following warnings
if(SUGGEST_OVERRIDE_SUPPORTED)
target_compile_options(${TARGET_NAME} PRIVATE $<$<COMPILE_LANGUAGE:CXX>:-Wno-suggest-override>)
endif()
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# 4244 conversion from 'XXX' to 'YYY', possible loss of data
ov_add_compiler_flags(/wd4244)
# '<': signed/unsigned mismatch
ov_add_compiler_flags(/wd4018)
endif()

ov_install_static_lib(${TARGET_NAME} ${OV_CPACK_COMP_CORE})
5 changes: 5 additions & 0 deletions src/frontends/onnx/onnx_common/include/onnx/onnx_pb.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
// Copyright (C) 2018-2024 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "onnx/onnx.pb.h"
Loading
Loading