From 34af04e1a80a74dd80747dd9a8e6112e859763bf Mon Sep 17 00:00:00 2001 From: Jozef Daniecki Date: Fri, 30 Oct 2020 05:31:27 +0100 Subject: [PATCH] Serialization of experimental and custom ops (#2862) * Add IEGeneric node type handling. * Remove dependency on plugin_api library. IEGeneric type name is passed via Visiotr API as new syntetic atribute. * Add custom operations support * Fix charachter literals comparison. * Pass custom opsets to CNNNetwork:serialize(). IE extensions are stored in ngraph CNNNetwork and later used to pass custom opsets to serialization transformation. * Refactor custom ops tests to use template_extension library. * Add comment on __generic_ie_type__ purpose. --- inference-engine/include/cpp/ie_cnn_network.h | 5 +- .../cnn_network_ngraph_impl.cpp | 20 +++- .../cnn_network_ngraph_impl.hpp | 5 +- .../src/inference_engine/generic_ie.cpp | 14 +++ .../src/plugin_api/generic_ie.hpp | 2 + .../src/readers/ir_reader/ie_ir_parser.cpp | 4 +- .../src/readers/ir_reader/ie_ir_parser.hpp | 1 + .../readers/onnx_reader/ie_onnx_reader.cpp | 2 +- .../include/transformations/serialize.hpp | 5 +- .../src/transformations/serialize.cpp | 51 +++++++-- .../ir_serialization/custom_ops.cpp | 103 ++++++++++++++++++ .../models/custom_op.prototxt | 66 +++++++++++ .../ir_serialization/models/custom_op.xml | 50 +++++++++ ...xperimental_detectron_detection_output.xml | 38 +++++++ ...mental_detectron_roi_feature_extractor.xml | 38 +++++++ .../ir_serialization/serialize.cpp | 34 ++++++ 16 files changed, 414 insertions(+), 24 deletions(-) create mode 100644 inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp create mode 100644 inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.prototxt create mode 100644 inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.xml create mode 100644 inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_detection_output.xml create mode 100644 inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_roi_feature_extractor.xml diff --git a/inference-engine/include/cpp/ie_cnn_network.h b/inference-engine/include/cpp/ie_cnn_network.h index 8cd2358996b..9f3ea1949b8 100644 --- a/inference-engine/include/cpp/ie_cnn_network.h +++ b/inference-engine/include/cpp/ie_cnn_network.h @@ -20,6 +20,7 @@ #include "ie_common.h" #include "ie_data.h" #include "details/ie_exception_conversion.hpp" +#include "ie_extension.h" namespace ngraph { @@ -54,8 +55,10 @@ public: * This constructor wraps existing ngraph::Function * If you want to avoid modification of original Function, please create a copy * @param network Pointer to the ngraph::Function object + * @param exts Vector of pointers to IE extension objects */ - explicit CNNNetwork(const std::shared_ptr& network); + explicit CNNNetwork(const std::shared_ptr& network, + const std::vector& exts = {}); /** * @brief A destructor diff --git a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp index 379c26001e9..dea28219d02 100644 --- a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp +++ b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp @@ -53,7 +53,8 @@ static std::shared_ptr copyFunction(const std::shared_ptr& graph) { +CNNNetwork::CNNNetwork(const std::shared_ptr& graph, + const std::vector& exts) { OV_ITT_SCOPED_TASK(itt::domains::IE, "CNNNetwork::CNNNetwork"); if (graph == nullptr) { @@ -61,7 +62,7 @@ CNNNetwork::CNNNetwork(const std::shared_ptr& graph) { } // Create CNNNetworkNGraphImpl - network = std::make_shared(graph); + network = std::make_shared(graph, exts); actual = network.get(); if (actual == nullptr) { THROW_IE_EXCEPTION << "CNNNetwork was not initialized."; @@ -111,8 +112,10 @@ void CNNNetworkNGraphImpl::createDataForResult(const ::ngraph::Output<::ngraph:: } } -CNNNetworkNGraphImpl::CNNNetworkNGraphImpl(const std::shared_ptr& nGraph) - : _ngraph_function(nGraph) { +CNNNetworkNGraphImpl::CNNNetworkNGraphImpl( + const std::shared_ptr& nGraph, + const std::vector& exts) + : _ngraph_function(nGraph), _ie_extensions(exts) { // Restore usual attributes for ICNNNetwork auto keep_input_info = [](CNNNetworkNGraphImpl& network, const DataPtr& inData) { InputInfo::Ptr info(new InputInfo()); @@ -402,8 +405,15 @@ StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath, ResponseDesc* resp) const noexcept { try { if (getFunction()) { + std::map custom_opsets; + for (auto extension : _ie_extensions) { + auto opset = extension->getOpSets(); + custom_opsets.insert(begin(opset), end(opset)); + } ngraph::pass::Manager manager; - manager.register_pass(xmlPath, binPath); + manager.register_pass( + xmlPath, binPath, ngraph::pass::Serialize::Version::IR_V10, + custom_opsets); manager.run_passes(_ngraph_function); } else { #ifdef ENABLE_V7_SERIALIZE diff --git a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp index 7eb5c5d973e..ed6c3c75f7e 100644 --- a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp +++ b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp @@ -27,6 +27,7 @@ #include "ie_common.h" #include "ie_data.h" #include "ie_input_info.hpp" +#include "ie_extension.h" namespace InferenceEngine { namespace details { @@ -36,7 +37,8 @@ namespace details { */ class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl): public ICNNNetwork { public: - CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph); + CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph, + const std::vector& exts = {}); CNNNetworkNGraphImpl(const ICNNNetwork& nGraph); ~CNNNetworkNGraphImpl() override = default; @@ -88,6 +90,7 @@ protected: private: InferenceEngine::InputsDataMap _inputData; std::map _outputData; + const std::vector _ie_extensions; /** * @brief Create DataPtr for nGraph operation diff --git a/inference-engine/src/inference_engine/generic_ie.cpp b/inference-engine/src/inference_engine/generic_ie.cpp index 4dd9ae46579..e20a4eb9975 100644 --- a/inference-engine/src/inference_engine/generic_ie.cpp +++ b/inference-engine/src/inference_engine/generic_ie.cpp @@ -168,3 +168,17 @@ void ngraph::op::GenericIE::validate_and_infer_types() { << " with type " << type; } } + +bool ngraph::op::GenericIE::visit_attributes(ngraph::AttributeVisitor& visitor) { + for (const auto& p : params) { + std::string name = p.first; + std::string value = p.second; + visitor.on_attribute(name, value); + } + // This is a way to pass type name to transformations::Serialize() without + // adding plugin_api dependency on transformation library + std::string name = "__generic_ie_type__"; + std::string value = getType(); + visitor.on_attribute(name, value); + return true; +} diff --git a/inference-engine/src/plugin_api/generic_ie.hpp b/inference-engine/src/plugin_api/generic_ie.hpp index f921143e9d1..a7e352a233c 100644 --- a/inference-engine/src/plugin_api/generic_ie.hpp +++ b/inference-engine/src/plugin_api/generic_ie.hpp @@ -104,6 +104,8 @@ public: std::shared_ptr clone_with_new_inputs(const OutputVector& new_args) const override; + bool visit_attributes(ngraph::AttributeVisitor& visitor) override; + static void addExtension(std::shared_ptr func, const InferenceEngine::IShapeInferExtensionPtr& ext); static std::vector getExtensions(std::shared_ptr func); diff --git a/inference-engine/src/readers/ir_reader/ie_ir_parser.cpp b/inference-engine/src/readers/ir_reader/ie_ir_parser.cpp index 157984e528f..fd43522516d 100644 --- a/inference-engine/src/readers/ir_reader/ie_ir_parser.cpp +++ b/inference-engine/src/readers/ir_reader/ie_ir_parser.cpp @@ -63,7 +63,7 @@ public: originBlob(weights) { } }; -V10Parser::V10Parser(const std::vector& exts) { +V10Parser::V10Parser(const std::vector& exts) : _exts(exts) { // Load default opsets opsets["opset1"] = ngraph::get_opset1(); opsets["opset2"] = ngraph::get_opset2(); @@ -196,7 +196,7 @@ std::shared_ptr V10Parser::parse(const pugi::xml_node& root, std::i result_nodes[0]->add_control_dependency(assign); } } - CNNNetwork net(function); + CNNNetwork net(function, _exts); parsePreProcess(net, root, binStream); return net; } diff --git a/inference-engine/src/readers/ir_reader/ie_ir_parser.hpp b/inference-engine/src/readers/ir_reader/ie_ir_parser.hpp index 78b64b3f5df..c515cc62c0f 100644 --- a/inference-engine/src/readers/ir_reader/ie_ir_parser.hpp +++ b/inference-engine/src/readers/ir_reader/ie_ir_parser.hpp @@ -59,6 +59,7 @@ public: private: std::map opsets; + const std::vector _exts; struct GenericLayerParams { struct LayerPortData { diff --git a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp index 866806131c2..6c3f5bb0933 100644 --- a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp +++ b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp @@ -62,7 +62,7 @@ bool ONNXReader::supportModel(std::istream& model) const { } CNNNetwork ONNXReader::read(std::istream& model, const std::vector& exts) const { - return CNNNetwork(ngraph::onnx_import::import_onnx_model(model, readPathFromStream(model))); + return CNNNetwork(ngraph::onnx_import::import_onnx_model(model, readPathFromStream(model)), exts); } INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept { diff --git a/inference-engine/src/transformations/include/transformations/serialize.hpp b/inference-engine/src/transformations/include/transformations/serialize.hpp index ca7fc5a1249..7a26024695b 100644 --- a/inference-engine/src/transformations/include/transformations/serialize.hpp +++ b/inference-engine/src/transformations/include/transformations/serialize.hpp @@ -33,11 +33,12 @@ public: bool run_on_function(std::shared_ptr f) override; Serialize(const std::string& xmlPath, const std::string& binPath, - Version version = Version::IR_V10) - : m_xmlPath{xmlPath}, m_binPath{binPath}, m_version{version} {} + Version version = Version::IR_V10, std::map custom_opsets = {}) + : m_xmlPath{xmlPath}, m_binPath{binPath}, m_version{version}, m_custom_opsets{custom_opsets} {} private: const std::string m_xmlPath; const std::string m_binPath; const Version m_version; + const std::map m_custom_opsets; }; diff --git a/inference-engine/src/transformations/src/transformations/serialize.cpp b/inference-engine/src/transformations/src/transformations/serialize.cpp index 19dc17a3f81..0b0a4687982 100644 --- a/inference-engine/src/transformations/src/transformations/serialize.cpp +++ b/inference-engine/src/transformations/src/transformations/serialize.cpp @@ -18,8 +18,8 @@ NGRAPH_RTTI_DEFINITION(ngraph::pass::Serialize, "Serialize", 0); namespace { // helpers template -std::string joinVec(std::vector const& vec, - std::string const& glue = std::string(",")) { +std::string joinVec(const std::vector& vec, + const std::string& glue = std::string(",")) { if (vec.empty()) return ""; std::stringstream oss; oss << vec[0]; @@ -51,6 +51,8 @@ class XmlVisitor : public ngraph::AttributeVisitor { } public: + std::string ie_generic_type_name = ""; + XmlVisitor(pugi::xml_node& data) : m_data(data) {} void on_adapter(const std::string& name, @@ -65,7 +67,15 @@ public: } void on_adapter(const std::string& name, ngraph::ValueAccessor& adapter) override { - m_data.append_attribute(name.c_str()).set_value(adapter.get().c_str()); + // __generic_ie_type__ should not be serialized as a attribute + // it is a WA to retrieve layer type name without introducing dependency on + // plugi_api library on transformations library + if (name == "__generic_ie_type__") { + ie_generic_type_name = adapter.get(); + } else { + m_data.append_attribute(name.c_str()) + .set_value(adapter.get().c_str()); + } } void on_adapter(const std::string& name, ngraph::ValueAccessor& adapter) override { @@ -160,7 +170,9 @@ ConstantAtributes dump_constant_data(std::vector& bin, return attr; } -std::string get_opset_name(const ngraph::Node* n) { +std::string get_opset_name( + const ngraph::Node* n, + const std::map& custom_opsets) { auto opsets = std::array, 5>{ ngraph::get_opset1(), ngraph::get_opset2(), ngraph::get_opset3(), ngraph::get_opset4(), ngraph::get_opset5()}; @@ -171,6 +183,15 @@ std::string get_opset_name(const ngraph::Node* n) { return "opset" + std::to_string(idx + 1); } } + + for (const auto& custom_opset : custom_opsets) { + std::string name = custom_opset.first; + ngraph::OpSet opset = custom_opset.second; + if (opset.contains_op_type(n)) { + return name; + } + } + return "experimental"; } @@ -180,8 +201,6 @@ std::string get_opset_name(const ngraph::Node* n) { // discrepancies discoverd, translations needs to be added here. std::string get_type_name(const ngraph::Node* n) { std::string name = n->get_type_name(); - NGRAPH_CHECK(name != "GenericIE", "Unsupported type in ", n); - const std::unordered_map translator = { {"Constant", "Const"}}; if (translator.count(name) > 0) { @@ -250,8 +269,10 @@ std::string get_node_unique_name(std::unordered_set& unique_names, return name; } -void ngfunction_2_irv10(pugi::xml_document& doc, std::vector& bin, - const ngraph::Function& f) { +void ngfunction_2_irv10( + pugi::xml_document& doc, std::vector& bin, + const ngraph::Function& f, + const std::map& custom_opsets) { pugi::xml_node netXml = doc.append_child("net"); netXml.append_attribute("name").set_value(f.get_friendly_name().c_str()); netXml.append_attribute("version").set_value("10"); @@ -270,9 +291,9 @@ void ngfunction_2_irv10(pugi::xml_document& doc, std::vector& bin, layer.append_attribute("id").set_value(layer_ids.find(node)->second); layer.append_attribute("name").set_value( get_node_unique_name(unique_names, node).c_str()); - layer.append_attribute("type").set_value(get_type_name(node).c_str()); + auto layer_type_attribute = layer.append_attribute("type"); layer.append_attribute("version").set_value( - get_opset_name(node).c_str()); + get_opset_name(node, custom_opsets).c_str()); // pugi::xml_node data = layer.append_child("data"); @@ -281,7 +302,13 @@ void ngfunction_2_irv10(pugi::xml_document& doc, std::vector& bin, XmlVisitor visitor{data}; NGRAPH_CHECK(node->visit_attributes(visitor), "Visitor API is not supported in ", node); - + std::string node_type_name {node->get_type_name()}; + if (node_type_name == "GenericIE") { + layer_type_attribute.set_value( + visitor.ie_generic_type_name.c_str()); + } else { + layer_type_attribute.set_value(get_type_name(node).c_str()); + } // constant atributes (special case) if (auto constant = dynamic_cast(node)) { ConstantAtributes attr = dump_constant_data(bin, *constant); @@ -347,7 +374,7 @@ bool pass::Serialize::run_on_function(std::shared_ptr f) { std::vector constants; switch (m_version) { case Version::IR_V10: - ngfunction_2_irv10(xml_doc, constants, *f); + ngfunction_2_irv10(xml_doc, constants, *f, m_custom_opsets); break; default: NGRAPH_UNREACHABLE("Unsupported version"); diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp new file mode 100644 index 00000000000..60ab91cdebf --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp @@ -0,0 +1,103 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include +#include +#include +#include "common_test_utils/ngraph_test_utils.hpp" +#include "ie_core.hpp" +#include "ngraph/ngraph.hpp" +#include "transformations/serialize.hpp" + +#ifndef IR_SERIALIZATION_MODELS_PATH // should be already defined by cmake +#define IR_SERIALIZATION_MODELS_PATH "" +#endif + +#ifndef IE_BUILD_POSTFIX // should be already defined by cmake +#define IE_BUILD_POSTFIX "" +#endif + +static std::string get_extension_path() { + return FileUtils::makeSharedLibraryName( + {}, std::string("template_extension") + IE_BUILD_POSTFIX); +} + +class CustomOpsSerializationTest : public ::testing::Test { +protected: + std::string test_name = + ::testing::UnitTest::GetInstance()->current_test_info()->name(); + std::string m_out_xml_path = test_name + ".xml"; + std::string m_out_bin_path = test_name + ".bin"; + + void TearDown() override { + std::remove(m_out_xml_path.c_str()); + std::remove(m_out_bin_path.c_str()); + } +}; + +TEST_F(CustomOpsSerializationTest, CustomOpUser_MO) { + const std::string model = IR_SERIALIZATION_MODELS_PATH "custom_op.xml"; + + InferenceEngine::Core ie; + ie.AddExtension( + InferenceEngine::make_so_pointer( + get_extension_path())); + + auto expected = ie.ReadNetwork(model); + expected.serialize(m_out_xml_path, m_out_bin_path); + auto result = ie.ReadNetwork(m_out_xml_path, m_out_bin_path); + + bool success; + std::string message; + std::tie(success, message) = + compare_functions(result.getFunction(), expected.getFunction()); + + ASSERT_TRUE(success) << message; +} + +TEST_F(CustomOpsSerializationTest, CustomOpUser_ONNXImporter) { + const std::string model = IR_SERIALIZATION_MODELS_PATH "custom_op.prototxt"; + + InferenceEngine::Core ie; + ie.AddExtension( + InferenceEngine::make_so_pointer( + get_extension_path())); + + auto expected = ie.ReadNetwork(model); + expected.serialize(m_out_xml_path, m_out_bin_path); + auto result = ie.ReadNetwork(m_out_xml_path, m_out_bin_path); + + bool success; + std::string message; + std::tie(success, message) = + compare_functions(result.getFunction(), expected.getFunction()); + + ASSERT_TRUE(success) << message; +} + +TEST_F(CustomOpsSerializationTest, CustomOpTransformation) { + const std::string model = IR_SERIALIZATION_MODELS_PATH "custom_op.xml"; + + InferenceEngine::Core ie; + auto extension = + InferenceEngine::make_so_pointer( + get_extension_path()); + ie.AddExtension(extension); + auto expected = ie.ReadNetwork(model); + ngraph::pass::Manager manager; + manager.register_pass( + m_out_xml_path, m_out_bin_path, + ngraph::pass::Serialize::Version::IR_V10, extension->getOpSets()); + manager.run_passes(expected.getFunction()); + auto result = ie.ReadNetwork(m_out_xml_path, m_out_bin_path); + + bool success; + std::string message; + std::tie(success, message) = + compare_functions(result.getFunction(), expected.getFunction()); + + ASSERT_TRUE(success) << message; +} diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.prototxt b/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.prototxt new file mode 100644 index 00000000000..3d643ab7758 --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.prototxt @@ -0,0 +1,66 @@ +# This is syntetic model created by hand desined only for white-box unit testing +ir_version: 3 +producer_name: "nGraph ONNX Importer" +graph { + node { + input: "A" + output: "Y" + name: "operation" + op_type: "Template" + domain: "custom_domain" + attribute { + name: "add" + type: INT + i: 11 + } + } + name: "test_graph" + input { + name: "A" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 1 + } + } + } + } + } + output { + name: "Y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 1 + } + } + } + } + } +} +opset_import { + version: 1 + domain: "com.example" +} \ No newline at end of file diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.xml b/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.xml new file mode 100644 index 00000000000..a7f9ef8b32f --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/models/custom_op.xml @@ -0,0 +1,50 @@ + + + + + + + + + 2 + 2 + 2 + 1 + + + + + + + + 2 + 2 + 2 + 1 + + + + + 2 + 2 + 2 + 1 + + + + + + + 2 + 2 + 2 + 1 + + + + + + + + + \ No newline at end of file diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_detection_output.xml b/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_detection_output.xml new file mode 100644 index 00000000000..d00f042bac9 --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_detection_output.xml @@ -0,0 +1,38 @@ + + + + + + + + + 1 + + + + + + + + 1 + + + + + 1 + + + + + + + 1 + + + + + + + + + diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_roi_feature_extractor.xml b/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_roi_feature_extractor.xml new file mode 100644 index 00000000000..56eb115b46b --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/models/experimental_detectron_roi_feature_extractor.xml @@ -0,0 +1,38 @@ + + + + + + + + + 1 + + + + + + + + 1 + + + + + 1 + + + + + + + 1 + + + + + + + + + diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/serialize.cpp b/inference-engine/tests/functional/inference_engine/ir_serialization/serialize.cpp index f41a4a0c052..a901309ddb7 100644 --- a/inference-engine/tests/functional/inference_engine/ir_serialization/serialize.cpp +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/serialize.cpp @@ -165,3 +165,37 @@ TEST_F(SerializationTest, ModelWithConstants_ONNXImporter) { ASSERT_TRUE(success) << message; } + +TEST_F(SerializationTest, ExperimentalDetectronROIFeatureExtractor_MO) { + const std::string model = IR_SERIALIZATION_MODELS_PATH + "experimental_detectron_roi_feature_extractor.xml"; + + InferenceEngine::Core ie; + auto expected = ie.ReadNetwork(model); + expected.serialize(m_out_xml_path, m_out_bin_path); + auto result = ie.ReadNetwork(m_out_xml_path, m_out_bin_path); + + bool success; + std::string message; + std::tie(success, message) = + compare_functions(result.getFunction(), expected.getFunction()); + + ASSERT_TRUE(success) << message; +} + +TEST_F(SerializationTest, ExperimentalDetectronDetectionOutput_MO) { + const std::string model = IR_SERIALIZATION_MODELS_PATH + "experimental_detectron_detection_output.xml"; + + InferenceEngine::Core ie; + auto expected = ie.ReadNetwork(model); + expected.serialize(m_out_xml_path, m_out_bin_path); + auto result = ie.ReadNetwork(m_out_xml_path, m_out_bin_path); + + bool success; + std::string message; + std::tie(success, message) = + compare_functions(result.getFunction(), expected.getFunction()); + + ASSERT_TRUE(success) << message; +}