OpExtension: fix framework attributes handling (#10445)

* Fix attribute handling in OpExtension, add unit tests

* add missed file

* fix warning

* fix warning

* rename convert_from_py_object method to py_object_to_any, fix PEP8

* fix PEP8

* delete redundant include dir, fix includes
This commit is contained in:
Ivan Tikhonov 2022-02-17 17:42:12 +03:00 committed by GitHub
parent 61f657795c
commit ade4c6c7f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 265 additions and 53 deletions

View File

@ -8,14 +8,13 @@
#include <pybind11/stl_bind.h>
#include "extension/json_config.hpp"
#include "manager.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/extension/decoder_transformation.hpp"
#include "openvino/frontend/extension/op.hpp"
#include "openvino/frontend/extension/progress_reporter.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "pyopenvino/graph/model.hpp"
#include "pyopenvino/utils/utils.hpp"
namespace py = pybind11;
@ -130,7 +129,7 @@ void regclass_frontend_OpExtension(py::module m) {
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(fw_type_name, attr_names_map, any_map);
}),
@ -144,8 +143,9 @@ void regclass_frontend_OpExtension(py::module m) {
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(ov_type_name, fw_type_name, attr_names_map, any_map);
}),
py::arg("ov_type_name"),

View File

@ -1,4 +1,4 @@
# Copyright (C) 2021 Intel Corporation
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
@ -19,7 +19,8 @@ function(frontend_module TARGET FRAMEWORK INSTALL_COMPONENT)
add_dependencies(${TARGET_NAME} pyopenvino)
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}"
"${PYTHON_SOURCE_DIR}/pyopenvino/utils/")
target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime openvino::frontend::${FRAMEWORK})
# Compatibility with python 2.7 which has deprecated "register" specifier

View File

@ -72,7 +72,7 @@ void regclass_frontend_NodeContext(py::module m) {
CAST_TO_PY(any, dtype, int64_t);
CAST_TO_PY(any, dtype, bool);
CAST_TO_PY(any, dtype, std::string);
CAST_TO_PY(any, dtype, float);
CAST_TO_PY(any, dtype, double);
CAST_TO_PY(any, dtype, ov::element::Type);
CAST_TO_PY(any, dtype, ov::PartialShape);
@ -83,7 +83,7 @@ void regclass_frontend_NodeContext(py::module m) {
CAST_VEC_TO_PY(any, dtype, std::vector<bool>);
#endif
CAST_VEC_TO_PY(any, dtype, std::vector<std::string>);
CAST_VEC_TO_PY(any, dtype, std::vector<float>);
CAST_VEC_TO_PY(any, dtype, std::vector<double>);
CAST_VEC_TO_PY(any, dtype, std::vector<ov::element::Type>);
CAST_VEC_TO_PY(any, dtype, std::vector<ov::PartialShape>);

View File

@ -3,6 +3,7 @@
//
#include "extension.hpp"
#include "utils.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
@ -52,9 +53,10 @@ void regclass_frontend_onnx_OpExtension(py::module m) {
ext.def(py::init([](const std::string& fw_type_name,
const std::map<std::string, std::string>& attr_names_map,
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(fw_type_name, attr_names_map, any_map);
}), py::arg("fw_type_name"),
@ -65,9 +67,10 @@ void regclass_frontend_onnx_OpExtension(py::module m) {
const std::string& fw_type_name,
const std::map<std::string, std::string>& attr_names_map,
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(ov_type_name, fw_type_name, attr_names_map, any_map);
}),

View File

@ -3,6 +3,7 @@
//
#include "extension.hpp"
#include "utils.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
@ -52,7 +53,7 @@ void regclass_frontend_tensorflow_OpExtension(py::module m) {
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(fw_type_name, attr_names_map, any_map);
}), py::arg("fw_type_name"),
@ -65,7 +66,7 @@ void regclass_frontend_tensorflow_OpExtension(py::module m) {
const std::map<std::string, py::object>& attr_values_map) {
std::map<std::string, ov::Any> any_map;
for (const auto& it : attr_values_map) {
any_map[it.first] = it.second;
any_map[it.first] = py_object_to_any(it.second);
}
return std::make_shared<OpExtension<void>>(ov_type_name, fw_type_name, attr_names_map, any_map);
}),

View File

@ -0,0 +1,62 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <pybind11/pybind11.h>
#include <openvino/core/any.hpp>
ov::Any py_object_to_any(const pybind11::object& py_obj) {
if (pybind11::isinstance<pybind11::str>(py_obj)) {
return py_obj.cast<std::string>();
} else if (pybind11::isinstance<pybind11::bool_>(py_obj)) {
return py_obj.cast<bool>();
} else if (pybind11::isinstance<pybind11::float_>(py_obj)) {
return py_obj.cast<double>();
} else if (pybind11::isinstance<pybind11::int_>(py_obj)) {
return py_obj.cast<int64_t>();
} else if (pybind11::isinstance<pybind11::list>(py_obj)) {
auto _list = py_obj.cast<pybind11::list>();
enum class PY_TYPE : int {
UNKNOWN = 0,
STR,
INT,
FLOAT,
BOOL
};
PY_TYPE detected_type = PY_TYPE::UNKNOWN;
for (const auto &it: _list) {
auto check_type = [&](PY_TYPE type) {
if (detected_type == PY_TYPE::UNKNOWN || detected_type == type) {
detected_type = type;
return;
}
OPENVINO_ASSERT("Incorrect attribute. Mixed types in the list are not allowed.");
};
if (pybind11::isinstance<pybind11::str>(it)) {
check_type(PY_TYPE::STR);
} else if (pybind11::isinstance<pybind11::int_>(it)) {
check_type(PY_TYPE::INT);
} else if (pybind11::isinstance<pybind11::float_>(it)) {
check_type(PY_TYPE::FLOAT);
} else if (pybind11::isinstance<pybind11::bool_>(it)) {
check_type(PY_TYPE::BOOL);
}
}
switch (detected_type) {
case PY_TYPE::STR:
return _list.cast<std::vector<std::string>>();
case PY_TYPE::FLOAT:
return _list.cast<std::vector<double>>();
case PY_TYPE::INT:
return _list.cast<std::vector<int64_t>>();
case PY_TYPE::BOOL:
return _list.cast<std::vector<bool>>();
default:
OPENVINO_ASSERT(false, "Unsupported attribute type.");
}
}
OPENVINO_ASSERT(false, "Unsupported attribute type.");
}

View File

@ -91,6 +91,42 @@ def create_onnx_model_with_custom_attributes():
return make_model(graph, producer_name="ngraph ONNX Importer")
def create_onnx_model_for_op_extension():
# operation with double attribute
elu = onnx.helper.make_node("Elu", alpha=1.0, inputs=["x"], outputs=["elu"])
# operation with vector<size_t>, enum, bool attributes
avg_pool = onnx.helper.make_node("AveragePool", kernel_shape=[2, 2], auto_pad="SAME_LOWER",
strides=[2, 2],
inputs=["elu"], outputs=["avg_pool"])
# operation with no attributes
floor = onnx.helper.make_node("Floor", inputs=["avg_pool"], outputs=["floor"])
# operation with int64_t attribute
concat = onnx.helper.make_node("Concat", axis=0, inputs=["floor", "avg_pool"], outputs=["concat"])
const_tensor = onnx.helper.make_tensor("const_tensor",
onnx.TensorProto.FLOAT,
[1],
[0.5])
const_node = onnx.helper.make_node("Constant", [], outputs=["const_node"],
value=const_tensor, name="const_node")
# operation with enum attribute
mul = onnx.helper.make_node("Mul", inputs=["concat", "const_node"], outputs=["mul"])
# operation with element::type (class) attribute
cast = onnx.helper.make_node("Cast", to=int(onnx.TensorProto.FLOAT), inputs=["mul"], outputs=["out"])
input_tensors = [
make_tensor_value_info("x", onnx.TensorProto.FLOAT, (1, 3, 32, 32)),
]
output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (3, 3, 32, 32))]
graph = make_graph([const_node, elu, avg_pool, floor, concat, mul, cast], "graph",
input_tensors, output_tensors)
return make_model(graph, producer_name="ngraph ONNX Importer")
def run_function(function, *inputs, expected):
runtime = get_runtime()
computation = runtime.computation(function)
@ -106,6 +142,7 @@ fem = FrontEndManager()
onnx_model_filename = "model.onnx"
onnx_model_with_custom_attributes_filename = "model_custom_attributes.onnx"
onnx_model_with_subgraphs_filename = "model_subgraphs.onnx"
onnx_model_for_op_extension_test = "model_op_extension.onnx"
ONNX_FRONTEND_NAME = "onnx"
@ -114,12 +151,14 @@ def setup_module():
onnx.save_model(create_onnx_model_with_custom_attributes(),
onnx_model_with_custom_attributes_filename)
onnx.save_model(create_onnx_model_with_subgraphs(), onnx_model_with_subgraphs_filename)
onnx.save_model(create_onnx_model_for_op_extension(), onnx_model_for_op_extension_test)
def teardown_module():
os.remove(onnx_model_filename)
os.remove(onnx_model_with_custom_attributes_filename)
os.remove(onnx_model_with_subgraphs_filename)
os.remove(onnx_model_for_op_extension_test)
def skip_if_onnx_frontend_is_disabled():
@ -425,7 +464,8 @@ def test_onnx_conversion_extension():
assert invoked
def test_op_extension_via_onnx_extension():
@pytest.mark.parametrize("opset_prefix", ["opset1.", "opset1::", "opset8.", "opset8::", ""])
def test_op_extension_specify_opset(opset_prefix):
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
@ -433,47 +473,123 @@ def test_op_extension_via_onnx_extension():
from openvino.runtime import Core
ie = Core()
ie.add_extension(OpExtension("FW_OV_OP"))
ie.add_extension(OpExtension("OV_OP", "FW_OP_1"))
ie.add_extension(OpExtension("OV_OP", "FW_OP_2", {"ov_attribute_1": "fw_attribute_1",
"ov_attribute_2": "fw_attribute_2"}))
ie.add_extension(OpExtension("OV_OP", "FW_OP_3", {"ov_attribute_1": "fw_attribute_1",
"ov_attribute_2": "fw_attribute_2"},
{"ov_attribute_str": "string",
"ov_attribute_int": 4,
"ov_attribute_bool": True,
"ov_attribute_float": 4.,
"ov_attribute_vec_string": ["str1", "str2", "str3"],
"ov_attribute_vec_int": [1, 2, 3, 4, 5, 6, 7],
"ov_attribute_vec_bool": [True, False, True],
"ov_attribute_vec_float": [1., 2., 3., 4., 5., 6., 7.]}))
model = ie.read_model(onnx_model_filename)
# check the model is valid
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
# add extensions
fw_operation = "Floor"
ov_operation = opset_prefix + fw_operation
ie.add_extension(OpExtension(ov_operation, fw_operation))
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
def test_op_extension_via_frontend_extension():
@pytest.mark.parametrize("opset_prefix", ["opset1..", "opset1:::", "opset.", "opset::", "wrong"])
def test_op_extension_specify_wrong_opset(opset_prefix):
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend) import here
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import OpExtension
from openvino.runtime import Core
ie = Core()
# add extensions
fw_operation = "Floor"
ov_operation = opset_prefix + fw_operation
ie.add_extension(OpExtension(ov_operation, fw_operation))
with pytest.raises(Exception):
ie.read_model(onnx_model_for_op_extension_test)
def test_op_extension_via_onnx_extension_set_attrs_values():
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import OpExtension
from openvino.runtime import Core
ie = Core()
# check the model is valid
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
# add extensions
ie.add_extension(OpExtension("Multiply", "Mul", {}, {"auto_broadcast": "numpy"}))
ie.add_extension(OpExtension("Elu", {}, {"alpha": 1.}))
ie.add_extension(OpExtension("Floor"))
ie.add_extension(OpExtension("Concat", {}, {"axis": 0}))
ie.add_extension(OpExtension("Convert", "Cast", {}, {"destination_type": "i64"}))
ie.add_extension(OpExtension("AvgPool", "AveragePool", {}, {"kernel": [2, 2],
"strides": [2, 2],
"pads_begin": [0, 0],
"pads_end": [1, 1],
"exclude-pad": True,
"auto_pad": "same_upper",
"rounding_type": "floor"}))
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
def test_op_extension_via_frontend_extension_set_attrs_values():
skip_if_onnx_frontend_is_disabled()
# use common (openvino.frontend) import here
from openvino.frontend import OpExtension
from openvino.runtime import Core
ie = Core()
ie.add_extension(OpExtension("FW_OV_OP"))
ie.add_extension(OpExtension("OV_OP", "FW_OP_1"))
ie.add_extension(OpExtension("OV_OP", "FW_OP_2", {"ov_attribute_1": "fw_attribute_1",
"ov_attribute_2": "fw_attribute_2"}))
ie.add_extension(OpExtension("OV_OP", "FW_OP_3", {"ov_attribute_1": "fw_attribute_1",
"ov_attribute_2": "fw_attribute_2"},
{"ov_attribute_str": "string",
"ov_attribute_int": 4,
"ov_attribute_bool": True,
"ov_attribute_float": 4.,
"ov_attribute_vec_string": ["str1", "str2", "str3"],
"ov_attribute_vec_int": [1, 2, 3, 4, 5, 6, 7],
"ov_attribute_vec_bool": [True, False, True],
"ov_attribute_vec_float": [1., 2., 3., 4., 5., 6., 7.]}))
model = ie.read_model(onnx_model_filename)
# check the model is valid
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
# add extensions
ie.add_extension(OpExtension("Multiply", "Mul", {}, {"auto_broadcast": "numpy"}))
ie.add_extension(OpExtension("Elu", "Elu", {}, {"alpha": 1.}))
ie.add_extension(OpExtension("Floor"))
ie.add_extension(OpExtension("Concat", {}, {"axis": 0}))
ie.add_extension(OpExtension("Convert", "Cast", {}, {"destination_type": "i64"}))
ie.add_extension(OpExtension("AvgPool", "AveragePool", {}, {"kernel": [2, 2],
"strides": [2, 2],
"pads_begin": [0, 0],
"pads_end": [1, 1],
"exclude-pad": True,
"auto_pad": "same_upper",
"rounding_type": "floor"}))
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
def test_op_extension_via_frontend_extension_map_attributes():
skip_if_onnx_frontend_is_disabled()
# use common (openvino.frontend) import here
from openvino.frontend import OpExtension
from openvino.runtime import Core
ie = Core()
# check the model is valid
model = ie.read_model(onnx_model_for_op_extension_test)
assert model
# add extensions
ie.add_extension(OpExtension("Elu", "Elu", {"alpha": "alpha"}))
ie.add_extension(OpExtension("Concat", {"axis": "axis"}, {"axis": 0}))
ie.add_extension(OpExtension("AvgPool", "AveragePool", {"kernel": "kernel_shape",
"strides": "strides",
"auto_pad": "auto_pad"},
{"pads_begin": [0, 0],
"pads_end": [1, 1],
"exclude-pad": True,
"rounding_type": "floor"}))
model = ie.read_model(onnx_model_for_op_extension_test)
assert model

View File

@ -91,12 +91,22 @@ public:
void on_adapter(const std::string& name, ValueAccessor<void>& adapter) override {
auto p_value = m_attr_values_map.find(name);
if (p_value != m_attr_values_map.end()) {
adapter.set_as_any(p_value->second);
} else {
auto p_name = m_attr_names_map.find(name);
const std::string& target_name = p_name != m_attr_names_map.end() ? p_name->second : name;
adapter.set_as_any(m_context.get_attribute_as_any(target_name));
try {
adapter.set_as_any(m_context.get_attribute_as_any(target_name));
} catch (::ov::AssertFailure ex) {
OPENVINO_ASSERT(false,
ex.what(),
"\nValue for attribute \"",
target_name,
"\" is not set or mapping between "
"framework and openvino node attributes is incorrect.");
}
}
}
@ -142,7 +152,7 @@ OpExtensionBase<BaseConversionType, void>::OpExtensionBase(const std::string& ov
const std::map<std::string, ov::Any>& attr_values_map)
: BaseConversionType(fw_type_name,
OpConversionFunction(
[&]() -> std::shared_ptr<ov::Node> {
[=]() -> std::shared_ptr<ov::Node> {
auto split = [](const std::string& s, const std::string& delimiter) {
size_t pos_start = 0, pos_end, delim_len = delimiter.length();
std::string token;
@ -194,7 +204,7 @@ OpExtensionBase<BaseConversionType, void>::OpExtensionBase(const std::string& ov
} else {
FRONT_END_GENERAL_CHECK(
false,
"Invalid OpenVINO operation format, one of the next is expected:"
"Invalid OpenVINO operation format, one of the next is expected: \n"
"opsetN::OpName or opsetN.OpName or OpName. Provided operation format: ",
ov_type_name);
}
@ -206,7 +216,8 @@ OpExtensionBase<BaseConversionType, void>::OpExtensionBase(const std::string& ov
"name ",
op_name);
}
return opset.create(op_name)->shared_from_this();
return std::shared_ptr<ngraph::Node>(opset.create(op_name));
},
attr_names_map,
attr_values_map)) {}

View File

@ -28,13 +28,31 @@ Subgraph Attribute::get_subgraph(const Graph* parent_graph) const {
ov::Any Attribute::get_any() const {
switch (get_type()) {
case Type::float_point:
return get_float();
// OV has automatic downcasting of node attributes:
// double -> float
// but upcasting is not supported:
// float -> double
// so float value from protobuf leads to the issue
// when we are trying to get an attribute of double type in ov::Node
return static_cast<double>(get_float());
case Type::integer:
return get_integer();
case Type::string:
return get_string();
case Type::float_point_array:
return get_float_array();
case Type::float_point_array: {
auto float_array = get_float_array();
// OV has automatic downcasting of node attributes:
// double -> float
// but upcasting is not supported:
// float -> double
// so float value from protobuf leads to the issue
// when we are trying to get an attribute of double type in ov::Node
std::vector<double> double_array(float_array.size());
for (size_t i = 0; i < float_array.size(); ++i) {
double_array[i] = static_cast<double>(float_array[i]);
}
return double_array;
}
case Type::integer_array:
return get_integer_array();
case Type::string_array: