diff --git a/src/bindings/python/src/pyopenvino/frontend/extension.cpp b/src/bindings/python/src/pyopenvino/frontend/extension.cpp index 01c36e22e0b..1856f0ad111 100644 --- a/src/bindings/python/src/pyopenvino/frontend/extension.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/extension.cpp @@ -8,14 +8,13 @@ #include #include "extension/json_config.hpp" -#include "manager.hpp" #include "openvino/frontend/exception.hpp" #include "openvino/frontend/extension/conversion.hpp" #include "openvino/frontend/extension/decoder_transformation.hpp" #include "openvino/frontend/extension/op.hpp" #include "openvino/frontend/extension/progress_reporter.hpp" #include "openvino/frontend/extension/telemetry.hpp" -#include "pyopenvino/graph/model.hpp" +#include "pyopenvino/utils/utils.hpp" namespace py = pybind11; @@ -130,7 +129,7 @@ void regclass_frontend_OpExtension(py::module m) { const std::map& attr_values_map) { std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } return std::make_shared>(fw_type_name, attr_names_map, any_map); }), @@ -144,8 +143,9 @@ void regclass_frontend_OpExtension(py::module m) { const std::map& attr_values_map) { std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } + return std::make_shared>(ov_type_name, fw_type_name, attr_names_map, any_map); }), py::arg("ov_type_name"), diff --git a/src/bindings/python/src/pyopenvino/frontend/frontend_module.cmake b/src/bindings/python/src/pyopenvino/frontend/frontend_module.cmake index 8dbf311d8f2..77196d1fddd 100644 --- a/src/bindings/python/src/pyopenvino/frontend/frontend_module.cmake +++ b/src/bindings/python/src/pyopenvino/frontend/frontend_module.cmake @@ -1,4 +1,4 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2018-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # @@ -19,7 +19,8 @@ function(frontend_module TARGET FRAMEWORK INSTALL_COMPONENT) add_dependencies(${TARGET_NAME} pyopenvino) - target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}") + target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}" + "${PYTHON_SOURCE_DIR}/pyopenvino/utils/") target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime openvino::frontend::${FRAMEWORK}) # Compatibility with python 2.7 which has deprecated "register" specifier diff --git a/src/bindings/python/src/pyopenvino/frontend/node_context.cpp b/src/bindings/python/src/pyopenvino/frontend/node_context.cpp index 074ba394138..0a1451c0bd1 100644 --- a/src/bindings/python/src/pyopenvino/frontend/node_context.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/node_context.cpp @@ -72,7 +72,7 @@ void regclass_frontend_NodeContext(py::module m) { CAST_TO_PY(any, dtype, int64_t); CAST_TO_PY(any, dtype, bool); CAST_TO_PY(any, dtype, std::string); - CAST_TO_PY(any, dtype, float); + CAST_TO_PY(any, dtype, double); CAST_TO_PY(any, dtype, ov::element::Type); CAST_TO_PY(any, dtype, ov::PartialShape); @@ -83,7 +83,7 @@ void regclass_frontend_NodeContext(py::module m) { CAST_VEC_TO_PY(any, dtype, std::vector); #endif CAST_VEC_TO_PY(any, dtype, std::vector); - CAST_VEC_TO_PY(any, dtype, std::vector); + CAST_VEC_TO_PY(any, dtype, std::vector); CAST_VEC_TO_PY(any, dtype, std::vector); CAST_VEC_TO_PY(any, dtype, std::vector); diff --git a/src/bindings/python/src/pyopenvino/frontend/onnx/extension.cpp b/src/bindings/python/src/pyopenvino/frontend/onnx/extension.cpp index d1b1e0ad60e..736085c220a 100644 --- a/src/bindings/python/src/pyopenvino/frontend/onnx/extension.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/onnx/extension.cpp @@ -3,6 +3,7 @@ // #include "extension.hpp" +#include "utils.hpp" #include #include @@ -52,9 +53,10 @@ void regclass_frontend_onnx_OpExtension(py::module m) { ext.def(py::init([](const std::string& fw_type_name, const std::map& attr_names_map, const std::map& attr_values_map) { + std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } return std::make_shared>(fw_type_name, attr_names_map, any_map); }), py::arg("fw_type_name"), @@ -65,9 +67,10 @@ void regclass_frontend_onnx_OpExtension(py::module m) { const std::string& fw_type_name, const std::map& attr_names_map, const std::map& attr_values_map) { + std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } return std::make_shared>(ov_type_name, fw_type_name, attr_names_map, any_map); }), diff --git a/src/bindings/python/src/pyopenvino/frontend/tensorflow/extension.cpp b/src/bindings/python/src/pyopenvino/frontend/tensorflow/extension.cpp index 0ffb7ffbbc6..8ebae2e4330 100644 --- a/src/bindings/python/src/pyopenvino/frontend/tensorflow/extension.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/tensorflow/extension.cpp @@ -3,6 +3,7 @@ // #include "extension.hpp" +#include "utils.hpp" #include #include @@ -52,7 +53,7 @@ void regclass_frontend_tensorflow_OpExtension(py::module m) { const std::map& attr_values_map) { std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } return std::make_shared>(fw_type_name, attr_names_map, any_map); }), py::arg("fw_type_name"), @@ -65,7 +66,7 @@ void regclass_frontend_tensorflow_OpExtension(py::module m) { const std::map& attr_values_map) { std::map any_map; for (const auto& it : attr_values_map) { - any_map[it.first] = it.second; + any_map[it.first] = py_object_to_any(it.second); } return std::make_shared>(ov_type_name, fw_type_name, attr_names_map, any_map); }), diff --git a/src/bindings/python/src/pyopenvino/utils/utils.hpp b/src/bindings/python/src/pyopenvino/utils/utils.hpp new file mode 100644 index 00000000000..4d862594f00 --- /dev/null +++ b/src/bindings/python/src/pyopenvino/utils/utils.hpp @@ -0,0 +1,62 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +ov::Any py_object_to_any(const pybind11::object& py_obj) { + if (pybind11::isinstance(py_obj)) { + return py_obj.cast(); + } else if (pybind11::isinstance(py_obj)) { + return py_obj.cast(); + } else if (pybind11::isinstance(py_obj)) { + return py_obj.cast(); + } else if (pybind11::isinstance(py_obj)) { + return py_obj.cast(); + } else if (pybind11::isinstance(py_obj)) { + auto _list = py_obj.cast(); + enum class PY_TYPE : int { + UNKNOWN = 0, + STR, + INT, + FLOAT, + BOOL + }; + PY_TYPE detected_type = PY_TYPE::UNKNOWN; + for (const auto &it: _list) { + auto check_type = [&](PY_TYPE type) { + if (detected_type == PY_TYPE::UNKNOWN || detected_type == type) { + detected_type = type; + return; + } + OPENVINO_ASSERT("Incorrect attribute. Mixed types in the list are not allowed."); + }; + if (pybind11::isinstance(it)) { + check_type(PY_TYPE::STR); + } else if (pybind11::isinstance(it)) { + check_type(PY_TYPE::INT); + } else if (pybind11::isinstance(it)) { + check_type(PY_TYPE::FLOAT); + } else if (pybind11::isinstance(it)) { + check_type(PY_TYPE::BOOL); + } + } + + switch (detected_type) { + case PY_TYPE::STR: + return _list.cast>(); + case PY_TYPE::FLOAT: + return _list.cast>(); + case PY_TYPE::INT: + return _list.cast>(); + case PY_TYPE::BOOL: + return _list.cast>(); + default: + OPENVINO_ASSERT(false, "Unsupported attribute type."); + } + } + OPENVINO_ASSERT(false, "Unsupported attribute type."); +} diff --git a/src/bindings/python/tests/test_frontend/test_frontend_onnx.py b/src/bindings/python/tests/test_frontend/test_frontend_onnx.py index e1b013cb159..74914ee2aef 100644 --- a/src/bindings/python/tests/test_frontend/test_frontend_onnx.py +++ b/src/bindings/python/tests/test_frontend/test_frontend_onnx.py @@ -91,6 +91,42 @@ def create_onnx_model_with_custom_attributes(): return make_model(graph, producer_name="ngraph ONNX Importer") +def create_onnx_model_for_op_extension(): + # operation with double attribute + elu = onnx.helper.make_node("Elu", alpha=1.0, inputs=["x"], outputs=["elu"]) + + # operation with vector, enum, bool attributes + avg_pool = onnx.helper.make_node("AveragePool", kernel_shape=[2, 2], auto_pad="SAME_LOWER", + strides=[2, 2], + inputs=["elu"], outputs=["avg_pool"]) + + # operation with no attributes + floor = onnx.helper.make_node("Floor", inputs=["avg_pool"], outputs=["floor"]) + + # operation with int64_t attribute + concat = onnx.helper.make_node("Concat", axis=0, inputs=["floor", "avg_pool"], outputs=["concat"]) + + const_tensor = onnx.helper.make_tensor("const_tensor", + onnx.TensorProto.FLOAT, + [1], + [0.5]) + + const_node = onnx.helper.make_node("Constant", [], outputs=["const_node"], + value=const_tensor, name="const_node") + # operation with enum attribute + mul = onnx.helper.make_node("Mul", inputs=["concat", "const_node"], outputs=["mul"]) + + # operation with element::type (class) attribute + cast = onnx.helper.make_node("Cast", to=int(onnx.TensorProto.FLOAT), inputs=["mul"], outputs=["out"]) + input_tensors = [ + make_tensor_value_info("x", onnx.TensorProto.FLOAT, (1, 3, 32, 32)), + ] + output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (3, 3, 32, 32))] + graph = make_graph([const_node, elu, avg_pool, floor, concat, mul, cast], "graph", + input_tensors, output_tensors) + return make_model(graph, producer_name="ngraph ONNX Importer") + + def run_function(function, *inputs, expected): runtime = get_runtime() computation = runtime.computation(function) @@ -106,6 +142,7 @@ fem = FrontEndManager() onnx_model_filename = "model.onnx" onnx_model_with_custom_attributes_filename = "model_custom_attributes.onnx" onnx_model_with_subgraphs_filename = "model_subgraphs.onnx" +onnx_model_for_op_extension_test = "model_op_extension.onnx" ONNX_FRONTEND_NAME = "onnx" @@ -114,12 +151,14 @@ def setup_module(): onnx.save_model(create_onnx_model_with_custom_attributes(), onnx_model_with_custom_attributes_filename) onnx.save_model(create_onnx_model_with_subgraphs(), onnx_model_with_subgraphs_filename) + onnx.save_model(create_onnx_model_for_op_extension(), onnx_model_for_op_extension_test) def teardown_module(): os.remove(onnx_model_filename) os.remove(onnx_model_with_custom_attributes_filename) os.remove(onnx_model_with_subgraphs_filename) + os.remove(onnx_model_for_op_extension_test) def skip_if_onnx_frontend_is_disabled(): @@ -425,7 +464,8 @@ def test_onnx_conversion_extension(): assert invoked -def test_op_extension_via_onnx_extension(): +@pytest.mark.parametrize("opset_prefix", ["opset1.", "opset1::", "opset8.", "opset8::", ""]) +def test_op_extension_specify_opset(opset_prefix): skip_if_onnx_frontend_is_disabled() # use specific (openvino.frontend.onnx) import here @@ -433,47 +473,123 @@ def test_op_extension_via_onnx_extension(): from openvino.runtime import Core ie = Core() - ie.add_extension(OpExtension("FW_OV_OP")) - ie.add_extension(OpExtension("OV_OP", "FW_OP_1")) - ie.add_extension(OpExtension("OV_OP", "FW_OP_2", {"ov_attribute_1": "fw_attribute_1", - "ov_attribute_2": "fw_attribute_2"})) - ie.add_extension(OpExtension("OV_OP", "FW_OP_3", {"ov_attribute_1": "fw_attribute_1", - "ov_attribute_2": "fw_attribute_2"}, - {"ov_attribute_str": "string", - "ov_attribute_int": 4, - "ov_attribute_bool": True, - "ov_attribute_float": 4., - "ov_attribute_vec_string": ["str1", "str2", "str3"], - "ov_attribute_vec_int": [1, 2, 3, 4, 5, 6, 7], - "ov_attribute_vec_bool": [True, False, True], - "ov_attribute_vec_float": [1., 2., 3., 4., 5., 6., 7.]})) - model = ie.read_model(onnx_model_filename) + # check the model is valid + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + # add extensions + fw_operation = "Floor" + ov_operation = opset_prefix + fw_operation + ie.add_extension(OpExtension(ov_operation, fw_operation)) + + model = ie.read_model(onnx_model_for_op_extension_test) assert model -def test_op_extension_via_frontend_extension(): +@pytest.mark.parametrize("opset_prefix", ["opset1..", "opset1:::", "opset.", "opset::", "wrong"]) +def test_op_extension_specify_wrong_opset(opset_prefix): skip_if_onnx_frontend_is_disabled() - # use specific (openvino.frontend) import here + # use specific (openvino.frontend.onnx) import here + from openvino.frontend.onnx import OpExtension + from openvino.runtime import Core + + ie = Core() + + # add extensions + fw_operation = "Floor" + ov_operation = opset_prefix + fw_operation + ie.add_extension(OpExtension(ov_operation, fw_operation)) + + with pytest.raises(Exception): + ie.read_model(onnx_model_for_op_extension_test) + + +def test_op_extension_via_onnx_extension_set_attrs_values(): + skip_if_onnx_frontend_is_disabled() + + # use specific (openvino.frontend.onnx) import here + from openvino.frontend.onnx import OpExtension + from openvino.runtime import Core + + ie = Core() + + # check the model is valid + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + # add extensions + ie.add_extension(OpExtension("Multiply", "Mul", {}, {"auto_broadcast": "numpy"})) + ie.add_extension(OpExtension("Elu", {}, {"alpha": 1.})) + ie.add_extension(OpExtension("Floor")) + ie.add_extension(OpExtension("Concat", {}, {"axis": 0})) + ie.add_extension(OpExtension("Convert", "Cast", {}, {"destination_type": "i64"})) + ie.add_extension(OpExtension("AvgPool", "AveragePool", {}, {"kernel": [2, 2], + "strides": [2, 2], + "pads_begin": [0, 0], + "pads_end": [1, 1], + "exclude-pad": True, + "auto_pad": "same_upper", + "rounding_type": "floor"})) + + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + +def test_op_extension_via_frontend_extension_set_attrs_values(): + skip_if_onnx_frontend_is_disabled() + + # use common (openvino.frontend) import here from openvino.frontend import OpExtension from openvino.runtime import Core ie = Core() - ie.add_extension(OpExtension("FW_OV_OP")) - ie.add_extension(OpExtension("OV_OP", "FW_OP_1")) - ie.add_extension(OpExtension("OV_OP", "FW_OP_2", {"ov_attribute_1": "fw_attribute_1", - "ov_attribute_2": "fw_attribute_2"})) - ie.add_extension(OpExtension("OV_OP", "FW_OP_3", {"ov_attribute_1": "fw_attribute_1", - "ov_attribute_2": "fw_attribute_2"}, - {"ov_attribute_str": "string", - "ov_attribute_int": 4, - "ov_attribute_bool": True, - "ov_attribute_float": 4., - "ov_attribute_vec_string": ["str1", "str2", "str3"], - "ov_attribute_vec_int": [1, 2, 3, 4, 5, 6, 7], - "ov_attribute_vec_bool": [True, False, True], - "ov_attribute_vec_float": [1., 2., 3., 4., 5., 6., 7.]})) - - model = ie.read_model(onnx_model_filename) + # check the model is valid + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + # add extensions + ie.add_extension(OpExtension("Multiply", "Mul", {}, {"auto_broadcast": "numpy"})) + ie.add_extension(OpExtension("Elu", "Elu", {}, {"alpha": 1.})) + ie.add_extension(OpExtension("Floor")) + ie.add_extension(OpExtension("Concat", {}, {"axis": 0})) + ie.add_extension(OpExtension("Convert", "Cast", {}, {"destination_type": "i64"})) + ie.add_extension(OpExtension("AvgPool", "AveragePool", {}, {"kernel": [2, 2], + "strides": [2, 2], + "pads_begin": [0, 0], + "pads_end": [1, 1], + "exclude-pad": True, + "auto_pad": "same_upper", + "rounding_type": "floor"})) + + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + +def test_op_extension_via_frontend_extension_map_attributes(): + skip_if_onnx_frontend_is_disabled() + + # use common (openvino.frontend) import here + from openvino.frontend import OpExtension + from openvino.runtime import Core + + ie = Core() + # check the model is valid + model = ie.read_model(onnx_model_for_op_extension_test) + assert model + + # add extensions + ie.add_extension(OpExtension("Elu", "Elu", {"alpha": "alpha"})) + ie.add_extension(OpExtension("Concat", {"axis": "axis"}, {"axis": 0})) + + ie.add_extension(OpExtension("AvgPool", "AveragePool", {"kernel": "kernel_shape", + "strides": "strides", + "auto_pad": "auto_pad"}, + {"pads_begin": [0, 0], + "pads_end": [1, 1], + "exclude-pad": True, + "rounding_type": "floor"})) + + model = ie.read_model(onnx_model_for_op_extension_test) assert model diff --git a/src/frontends/common/include/openvino/frontend/extension/op.hpp b/src/frontends/common/include/openvino/frontend/extension/op.hpp index bbe4aa8e276..bfbb8938317 100644 --- a/src/frontends/common/include/openvino/frontend/extension/op.hpp +++ b/src/frontends/common/include/openvino/frontend/extension/op.hpp @@ -91,12 +91,22 @@ public: void on_adapter(const std::string& name, ValueAccessor& adapter) override { auto p_value = m_attr_values_map.find(name); + if (p_value != m_attr_values_map.end()) { adapter.set_as_any(p_value->second); } else { auto p_name = m_attr_names_map.find(name); const std::string& target_name = p_name != m_attr_names_map.end() ? p_name->second : name; - adapter.set_as_any(m_context.get_attribute_as_any(target_name)); + try { + adapter.set_as_any(m_context.get_attribute_as_any(target_name)); + } catch (::ov::AssertFailure ex) { + OPENVINO_ASSERT(false, + ex.what(), + "\nValue for attribute \"", + target_name, + "\" is not set or mapping between " + "framework and openvino node attributes is incorrect."); + } } } @@ -142,7 +152,7 @@ OpExtensionBase::OpExtensionBase(const std::string& ov const std::map& attr_values_map) : BaseConversionType(fw_type_name, OpConversionFunction( - [&]() -> std::shared_ptr { + [=]() -> std::shared_ptr { auto split = [](const std::string& s, const std::string& delimiter) { size_t pos_start = 0, pos_end, delim_len = delimiter.length(); std::string token; @@ -194,7 +204,7 @@ OpExtensionBase::OpExtensionBase(const std::string& ov } else { FRONT_END_GENERAL_CHECK( false, - "Invalid OpenVINO operation format, one of the next is expected:" + "Invalid OpenVINO operation format, one of the next is expected: \n" "opsetN::OpName or opsetN.OpName or OpName. Provided operation format: ", ov_type_name); } @@ -206,7 +216,8 @@ OpExtensionBase::OpExtensionBase(const std::string& ov "name ", op_name); } - return opset.create(op_name)->shared_from_this(); + + return std::shared_ptr(opset.create(op_name)); }, attr_names_map, attr_values_map)) {} diff --git a/src/frontends/onnx/frontend/src/core/attribute.cpp b/src/frontends/onnx/frontend/src/core/attribute.cpp index b4734e9b998..7b48d02f93b 100644 --- a/src/frontends/onnx/frontend/src/core/attribute.cpp +++ b/src/frontends/onnx/frontend/src/core/attribute.cpp @@ -28,13 +28,31 @@ Subgraph Attribute::get_subgraph(const Graph* parent_graph) const { ov::Any Attribute::get_any() const { switch (get_type()) { case Type::float_point: - return get_float(); + // OV has automatic downcasting of node attributes: + // double -> float + // but upcasting is not supported: + // float -> double + // so float value from protobuf leads to the issue + // when we are trying to get an attribute of double type in ov::Node + return static_cast(get_float()); case Type::integer: return get_integer(); case Type::string: return get_string(); - case Type::float_point_array: - return get_float_array(); + case Type::float_point_array: { + auto float_array = get_float_array(); + // OV has automatic downcasting of node attributes: + // double -> float + // but upcasting is not supported: + // float -> double + // so float value from protobuf leads to the issue + // when we are trying to get an attribute of double type in ov::Node + std::vector double_array(float_array.size()); + for (size_t i = 0; i < float_array.size(); ++i) { + double_array[i] = static_cast(float_array[i]); + } + return double_array; + } case Type::integer_array: return get_integer_array(); case Type::string_array: