From af105b86f8e78cdaa5df16a0c3f4913f1745f383 Mon Sep 17 00:00:00 2001 From: Alexandra Sidorova Date: Mon, 10 Jan 2022 17:51:33 +0300 Subject: [PATCH] [CPU] Fixed Replicate via ov::Model (#9252) --- src/bindings/python/tests/__init__.py | 2 -- .../python/tests/test_onnx/test_zoo_models.py | 2 -- .../python/tests_compatibility/__init__.py | 2 -- .../test_onnx/test_zoo_models.py | 2 -- src/plugins/intel_cpu/src/mkldnn_graph.cpp | 19 +++++----- src/plugins/intel_cpu/src/mkldnn_graph.h | 2 +- .../subgraph_tests/simple_if.cpp | 25 +++++++++++++ .../subgraph/simple_if.hpp | 5 +++ .../src/subgraph/simple_if.cpp | 35 +++++++++++++++++++ 9 files changed, 74 insertions(+), 20 deletions(-) diff --git a/src/bindings/python/tests/__init__.py b/src/bindings/python/tests/__init__.py index ebb26410104..6f04c205919 100644 --- a/src/bindings/python/tests/__init__.py +++ b/src/bindings/python/tests/__init__.py @@ -60,8 +60,6 @@ xfail_issue_38708 = xfail_test(reason="RuntimeError: While validating ONNX node xfail_issue_38710 = xfail_test(reason="RuntimeError: data has zero dimension which is not allowed") xfail_issue_38713 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations: " "ai.onnx.preview.training.Momentum") -xfail_issue_45457 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::Loop " - "Not constant termination condition body output is not supported") xfail_issue_38724 = xfail_test(reason="RuntimeError: While validating ONNX node '': " "tf_crop_and_resize - this type of coordinate transformation mode " "is not supported. Choose one of the following modes: " diff --git a/src/bindings/python/tests/test_onnx/test_zoo_models.py b/src/bindings/python/tests/test_onnx/test_zoo_models.py index d012e9ced3a..4d9199cb1db 100644 --- a/src/bindings/python/tests/test_onnx/test_zoo_models.py +++ b/src/bindings/python/tests/test_onnx/test_zoo_models.py @@ -13,7 +13,6 @@ from tests.test_onnx.utils.model_importer import ModelImportRunner from tests import ( xfail_issue_67415, xfail_issue_38701, - xfail_issue_45457, xfail_issue_37957, xfail_issue_39669, xfail_issue_37973, @@ -193,7 +192,6 @@ if len(zoo_models) > 0: (xfail_issue_39669, "test_MSFT_opset9_cgan_cgan_cpu"), (xfail_issue_47495, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"), - (xfail_issue_45457, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"), (xfail_issue_63643, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"), ] for test_case in import_xfail_list + execution_xfail_list: diff --git a/src/bindings/python/tests_compatibility/__init__.py b/src/bindings/python/tests_compatibility/__init__.py index 1cb86e1a3b7..2081db27c98 100644 --- a/src/bindings/python/tests_compatibility/__init__.py +++ b/src/bindings/python/tests_compatibility/__init__.py @@ -65,8 +65,6 @@ xfail_issue_38708 = xfail_test(reason="RuntimeError: While validating ONNX node xfail_issue_38710 = xfail_test(reason="RuntimeError: data has zero dimension which is not allowed") xfail_issue_38713 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations: " "ai.onnx.preview.training.Momentum") -xfail_issue_45457 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::Loop " - "Not constant termination condition body output is not supported") xfail_issue_38722 = xfail_test(reason="RuntimeError: While validating ONNX nodes MatMulInteger " "and QLinearMatMul " "Input0 scale and input0 zero point shape must be same and 1") diff --git a/src/bindings/python/tests_compatibility/test_onnx/test_zoo_models.py b/src/bindings/python/tests_compatibility/test_onnx/test_zoo_models.py index f24c16e5a28..baa6f853c34 100644 --- a/src/bindings/python/tests_compatibility/test_onnx/test_zoo_models.py +++ b/src/bindings/python/tests_compatibility/test_onnx/test_zoo_models.py @@ -12,7 +12,6 @@ from tests_compatibility.test_onnx.utils.model_importer import ModelImportRunner from tests_compatibility import ( xfail_issue_38701, - xfail_issue_45457, xfail_issue_37957, xfail_issue_38084, xfail_issue_39669, @@ -183,7 +182,6 @@ if len(zoo_models) > 0: (xfail_issue_39669, "test_MSFT_opset9_cgan_cgan_cpu"), (xfail_issue_47495, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"), - (xfail_issue_45457, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"), (xfail_issue_63643, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"), ] for test_case in import_xfail_list + execution_xfail_list: diff --git a/src/plugins/intel_cpu/src/mkldnn_graph.cpp b/src/plugins/intel_cpu/src/mkldnn_graph.cpp index f9374c48610..56ebf892143 100644 --- a/src/plugins/intel_cpu/src/mkldnn_graph.cpp +++ b/src/plugins/intel_cpu/src/mkldnn_graph.cpp @@ -85,7 +85,7 @@ template void MKLDNNGraph::CreateGraph(const std::shared_ptr &subgraph, const MKLDNNExtensionManager::Ptr& extMgr) { +void MKLDNNGraph::Replicate(const std::shared_ptr &subgraph, const MKLDNNExtensionManager::Ptr& extMgr) { this->_name = "subgraph"; this->reuse_io_tensors = false; @@ -93,7 +93,7 @@ void MKLDNNGraph::Replicate(const std::shared_ptr &subgr ngraph::pass::low_precision::LowPrecision::isFunctionQuantized(subgraph); // Map data object onto producer node - std::map, std::pair> op2node; + std::map, MKLDNNNodePtr> op2node; // nodes which has no consumers (output or just unused). But doesn't marked as graph output. // Will be stored as fake output separately. @@ -130,13 +130,13 @@ void MKLDNNGraph::Replicate(const std::shared_ptr &subgr outputNodesMap[inputID] = node; } + op2node[op] = node; + for (size_t port = 0; port < op->get_input_size(); port++) { auto parentOp = op->get_input_node_shared_ptr(port); + auto parentNode = op2node[parentOp]; - auto portInfo = op2node[parentOp]; - auto parentNode = portInfo.first; - - MKLDNNEdgePtr edge(new MKLDNNEdge(parentNode, node, getParentOutputPort(op, parentOp, port), port)); + MKLDNNEdgePtr edge(new MKLDNNEdge(parentNode, node, getParentOutputPort(op, parentOp, port), static_cast(port))); node->addEdge(edge); graphEdges.push_back(edge); } @@ -145,9 +145,7 @@ void MKLDNNGraph::Replicate(const std::shared_ptr &subgr ngraph::op::v0::Result::get_type_info_static(), ngraph::op::v3::Assign::get_type_info_static(), ngraph::op::v6::Assign::get_type_info_static())) { - int outPortIdx = 0; for (int oi = 0; oi < op->get_output_size(); oi++) { - op2node[op->output(oi).get_node_shared_ptr()] = {node, outPortIdx++}; if (op->get_output_target_inputs(oi).empty()) { unusedOutputs.push_back(op->output(oi)); } @@ -157,9 +155,8 @@ void MKLDNNGraph::Replicate(const std::shared_ptr &subgr // Add stub output node for unused data for (auto unusedOutput : unusedOutputs) { - auto portInfo = op2node[unusedOutput.get_node_shared_ptr()]; - auto parentNode = portInfo.first; - auto port = portInfo.second; + auto parentNode = op2node[unusedOutput.get_node_shared_ptr()]; + const auto port = unusedOutput.get_index(); const auto nodeName = std::string("stub_") + std::to_string(unusedOutput.get_index()) + "_" + parentNode->getName(); const MKLDNNNodePtr outNode = std::make_shared(parentNode->outputShapes[port], parentNode->getOriginalOutputPrecisionAtPort(port), diff --git a/src/plugins/intel_cpu/src/mkldnn_graph.h b/src/plugins/intel_cpu/src/mkldnn_graph.h index 93f1b9b1bbc..16e45427d7f 100644 --- a/src/plugins/intel_cpu/src/mkldnn_graph.h +++ b/src/plugins/intel_cpu/src/mkldnn_graph.h @@ -220,7 +220,7 @@ protected: static mkldnn::engine eng; void Replicate(const InferenceEngine::CNNNetwork &network, const MKLDNNExtensionManager::Ptr& extMgr); - void Replicate(const std::shared_ptr &subgraph, const MKLDNNExtensionManager::Ptr& extMgr); + void Replicate(const std::shared_ptr &subgraph, const MKLDNNExtensionManager::Ptr& extMgr); void InitGraph(); void InitNodes(); void InitDescriptors(); diff --git a/src/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/simple_if.cpp b/src/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/simple_if.cpp index 5678295d0ca..f9607230c3f 100644 --- a/src/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/simple_if.cpp +++ b/src/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/simple_if.cpp @@ -152,4 +152,29 @@ TEST_P(SimpleIfNotConstConditionAndDimsIncreaseTest, CompareWithRefs) { run(); }; +// the axis of split in test suit "SimpleIfNotConstConditionUnusedOutputPortsTest" is hardcoded as 1, so shape[axis] should be static +std::vector> inputShapes_4 = { + { + {{}, {{5, 7}}}, + }, + { + { + {-1, 5, -1}, + {{10, 5, 10}, {2, 5, 5}, {1, 5, 5}} + }, + }, +}; + +INSTANTIATE_TEST_SUITE_P(smoke_If, SimpleIfNotConstConditionUnusedOutputPortsTest, + ::testing::Combine( + ::testing::ValuesIn(inputShapes_4), + ::testing::ValuesIn(inTypes), + ::testing::ValuesIn(conditions), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + SimpleIfNotConstConditionUnusedOutputPortsTest::getTestCaseName); + +TEST_P(SimpleIfNotConstConditionUnusedOutputPortsTest, CompareWithRefs) { + run(); +}; + } // namespace diff --git a/src/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/simple_if.hpp b/src/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/simple_if.hpp index 61aab99df09..94c97aef512 100644 --- a/src/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/simple_if.hpp +++ b/src/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/simple_if.hpp @@ -58,4 +58,9 @@ protected: void compare(const std::vector &expected, const std::vector &actual) override; }; +class SimpleIfNotConstConditionUnusedOutputPortsTest : public SimpleIfNotConstConditionTest { +protected: + void SetUp() override; +}; + } // namespace SubgraphTestsDefinitions diff --git a/src/tests/functional/shared_test_classes/src/subgraph/simple_if.cpp b/src/tests/functional/shared_test_classes/src/subgraph/simple_if.cpp index 4cc469d17b2..acaa0cbcb05 100644 --- a/src/tests/functional/shared_test_classes/src/subgraph/simple_if.cpp +++ b/src/tests/functional/shared_test_classes/src/subgraph/simple_if.cpp @@ -250,4 +250,39 @@ void SimpleIfNotConstConditionAndDimsIncreaseTest::compare(const std::vector shapes; + ov::test::ElementType inType; + std::tie(shapes, inType, condition, targetDevice) = this->GetParam(); + + init_input_shapes(shapes); + for (auto &target : targetStaticShapes) + target.emplace_back(ov::Shape{}); + auto params = ngraph::builder::makeDynamicParams(inType, inputDynamicShapes); + params.emplace_back(std::make_shared(ov::element::Type_t::boolean, ov::Shape{})); + + auto p1 = std::make_shared(inType, inputDynamicShapes[0]); + auto p2 = std::make_shared(inType, inputDynamicShapes[0]); + + const size_t axis = 1; + const size_t dim = inputDynamicShapes[0][axis].get_length(); // should be static for this test suit + auto thenOp = ngraph::builder::makeSplit(p1, inType, dim, axis); + auto thenRes = std::make_shared(thenOp->output(dim / 2)); + + auto elseOp = ngraph::builder::makeSplit(p2, inType, dim, axis); + auto elseRes = std::make_shared(elseOp->output(dim - 1)); + + auto thenBody = std::make_shared(ov::OutputVector{thenRes}, ov::ParameterVector{p1}); + auto elseBody = std::make_shared(ov::OutputVector{elseRes}, ov::ParameterVector{p2}); + + auto ifOp = std::make_shared(params[1]); + ifOp->set_then_body(thenBody); + ifOp->set_else_body(elseBody); + ifOp->set_input(params[0], p1, p2); + auto ifRes = ifOp->set_output(thenRes, elseRes); + + ov::ResultVector results{std::make_shared(ifRes)}; + function = std::make_shared(results, params, "SimpleIfNotConstConditionUnusedOutputPortsTest"); +} + } // namespace SubgraphTestsDefinitions