[CPU] Fixed Replicate via ov::Model (#9252)

This commit is contained in:
Alexandra Sidorova 2022-01-10 17:51:33 +03:00 committed by GitHub
parent 8fe5484645
commit af105b86f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 74 additions and 20 deletions

View File

@ -60,8 +60,6 @@ xfail_issue_38708 = xfail_test(reason="RuntimeError: While validating ONNX node
xfail_issue_38710 = xfail_test(reason="RuntimeError: data has zero dimension which is not allowed")
xfail_issue_38713 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations: "
"ai.onnx.preview.training.Momentum")
xfail_issue_45457 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::Loop "
"Not constant termination condition body output is not supported")
xfail_issue_38724 = xfail_test(reason="RuntimeError: While validating ONNX node '<Node(Resize): Y>': "
"tf_crop_and_resize - this type of coordinate transformation mode "
"is not supported. Choose one of the following modes: "

View File

@ -13,7 +13,6 @@ from tests.test_onnx.utils.model_importer import ModelImportRunner
from tests import (
xfail_issue_67415,
xfail_issue_38701,
xfail_issue_45457,
xfail_issue_37957,
xfail_issue_39669,
xfail_issue_37973,
@ -193,7 +192,6 @@ if len(zoo_models) > 0:
(xfail_issue_39669, "test_MSFT_opset9_cgan_cgan_cpu"),
(xfail_issue_47495, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"),
(xfail_issue_45457, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"),
(xfail_issue_63643, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"),
]
for test_case in import_xfail_list + execution_xfail_list:

View File

@ -65,8 +65,6 @@ xfail_issue_38708 = xfail_test(reason="RuntimeError: While validating ONNX node
xfail_issue_38710 = xfail_test(reason="RuntimeError: data has zero dimension which is not allowed")
xfail_issue_38713 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations: "
"ai.onnx.preview.training.Momentum")
xfail_issue_45457 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::Loop "
"Not constant termination condition body output is not supported")
xfail_issue_38722 = xfail_test(reason="RuntimeError: While validating ONNX nodes MatMulInteger "
"and QLinearMatMul "
"Input0 scale and input0 zero point shape must be same and 1")

View File

@ -12,7 +12,6 @@ from tests_compatibility.test_onnx.utils.model_importer import ModelImportRunner
from tests_compatibility import (
xfail_issue_38701,
xfail_issue_45457,
xfail_issue_37957,
xfail_issue_38084,
xfail_issue_39669,
@ -183,7 +182,6 @@ if len(zoo_models) > 0:
(xfail_issue_39669, "test_MSFT_opset9_cgan_cgan_cpu"),
(xfail_issue_47495, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"),
(xfail_issue_45457, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"),
(xfail_issue_63643, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"),
]
for test_case in import_xfail_list + execution_xfail_list:

View File

@ -85,7 +85,7 @@ template void MKLDNNGraph::CreateGraph(const std::shared_ptr<const ngraph::Funct
template void MKLDNNGraph::CreateGraph(const CNNNetwork&,
const MKLDNNExtensionManager::Ptr&, MKLDNNWeightsSharing::Ptr&);
void MKLDNNGraph::Replicate(const std::shared_ptr<const ngraph::Function> &subgraph, const MKLDNNExtensionManager::Ptr& extMgr) {
void MKLDNNGraph::Replicate(const std::shared_ptr<const ov::Model> &subgraph, const MKLDNNExtensionManager::Ptr& extMgr) {
this->_name = "subgraph";
this->reuse_io_tensors = false;
@ -93,7 +93,7 @@ void MKLDNNGraph::Replicate(const std::shared_ptr<const ngraph::Function> &subgr
ngraph::pass::low_precision::LowPrecision::isFunctionQuantized(subgraph);
// Map data object onto producer node
std::map<std::shared_ptr<ngraph::Node>, std::pair<MKLDNNNodePtr, int>> op2node;
std::map<std::shared_ptr<ov::Node>, MKLDNNNodePtr> op2node;
// nodes which has no consumers (output or just unused). But doesn't marked as graph output.
// Will be stored as fake output separately.
@ -130,13 +130,13 @@ void MKLDNNGraph::Replicate(const std::shared_ptr<const ngraph::Function> &subgr
outputNodesMap[inputID] = node;
}
op2node[op] = node;
for (size_t port = 0; port < op->get_input_size(); port++) {
auto parentOp = op->get_input_node_shared_ptr(port);
auto parentNode = op2node[parentOp];
auto portInfo = op2node[parentOp];
auto parentNode = portInfo.first;
MKLDNNEdgePtr edge(new MKLDNNEdge(parentNode, node, getParentOutputPort(op, parentOp, port), port));
MKLDNNEdgePtr edge(new MKLDNNEdge(parentNode, node, getParentOutputPort(op, parentOp, port), static_cast<int>(port)));
node->addEdge(edge);
graphEdges.push_back(edge);
}
@ -145,9 +145,7 @@ void MKLDNNGraph::Replicate(const std::shared_ptr<const ngraph::Function> &subgr
ngraph::op::v0::Result::get_type_info_static(),
ngraph::op::v3::Assign::get_type_info_static(),
ngraph::op::v6::Assign::get_type_info_static())) {
int outPortIdx = 0;
for (int oi = 0; oi < op->get_output_size(); oi++) {
op2node[op->output(oi).get_node_shared_ptr()] = {node, outPortIdx++};
if (op->get_output_target_inputs(oi).empty()) {
unusedOutputs.push_back(op->output(oi));
}
@ -157,9 +155,8 @@ void MKLDNNGraph::Replicate(const std::shared_ptr<const ngraph::Function> &subgr
// Add stub output node for unused data
for (auto unusedOutput : unusedOutputs) {
auto portInfo = op2node[unusedOutput.get_node_shared_ptr()];
auto parentNode = portInfo.first;
auto port = portInfo.second;
auto parentNode = op2node[unusedOutput.get_node_shared_ptr()];
const auto port = unusedOutput.get_index();
const auto nodeName = std::string("stub_") + std::to_string(unusedOutput.get_index()) + "_" + parentNode->getName();
const MKLDNNNodePtr outNode = std::make_shared<MKLDNNInputNode>(parentNode->outputShapes[port],
parentNode->getOriginalOutputPrecisionAtPort(port),

View File

@ -220,7 +220,7 @@ protected:
static mkldnn::engine eng;
void Replicate(const InferenceEngine::CNNNetwork &network, const MKLDNNExtensionManager::Ptr& extMgr);
void Replicate(const std::shared_ptr<const ngraph::Function> &subgraph, const MKLDNNExtensionManager::Ptr& extMgr);
void Replicate(const std::shared_ptr<const ov::Model> &subgraph, const MKLDNNExtensionManager::Ptr& extMgr);
void InitGraph();
void InitNodes();
void InitDescriptors();

View File

@ -152,4 +152,29 @@ TEST_P(SimpleIfNotConstConditionAndDimsIncreaseTest, CompareWithRefs) {
run();
};
// the axis of split in test suit "SimpleIfNotConstConditionUnusedOutputPortsTest" is hardcoded as 1, so shape[axis] should be static
std::vector<std::vector<ov::test::InputShape>> inputShapes_4 = {
{
{{}, {{5, 7}}},
},
{
{
{-1, 5, -1},
{{10, 5, 10}, {2, 5, 5}, {1, 5, 5}}
},
},
};
INSTANTIATE_TEST_SUITE_P(smoke_If, SimpleIfNotConstConditionUnusedOutputPortsTest,
::testing::Combine(
::testing::ValuesIn(inputShapes_4),
::testing::ValuesIn(inTypes),
::testing::ValuesIn(conditions),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
SimpleIfNotConstConditionUnusedOutputPortsTest::getTestCaseName);
TEST_P(SimpleIfNotConstConditionUnusedOutputPortsTest, CompareWithRefs) {
run();
};
} // namespace

View File

@ -58,4 +58,9 @@ protected:
void compare(const std::vector<ov::runtime::Tensor> &expected, const std::vector<ov::runtime::Tensor> &actual) override;
};
class SimpleIfNotConstConditionUnusedOutputPortsTest : public SimpleIfNotConstConditionTest {
protected:
void SetUp() override;
};
} // namespace SubgraphTestsDefinitions

View File

@ -250,4 +250,39 @@ void SimpleIfNotConstConditionAndDimsIncreaseTest::compare(const std::vector<ov:
SubgraphBaseTest::compare(expected, actual);
}
void SimpleIfNotConstConditionUnusedOutputPortsTest::SetUp() {
std::vector<ov::test::InputShape> shapes;
ov::test::ElementType inType;
std::tie(shapes, inType, condition, targetDevice) = this->GetParam();
init_input_shapes(shapes);
for (auto &target : targetStaticShapes)
target.emplace_back(ov::Shape{});
auto params = ngraph::builder::makeDynamicParams(inType, inputDynamicShapes);
params.emplace_back(std::make_shared<ov::op::v0::Parameter>(ov::element::Type_t::boolean, ov::Shape{}));
auto p1 = std::make_shared<ov::op::v0::Parameter>(inType, inputDynamicShapes[0]);
auto p2 = std::make_shared<ov::op::v0::Parameter>(inType, inputDynamicShapes[0]);
const size_t axis = 1;
const size_t dim = inputDynamicShapes[0][axis].get_length(); // should be static for this test suit
auto thenOp = ngraph::builder::makeSplit(p1, inType, dim, axis);
auto thenRes = std::make_shared<ov::op::v0::Result>(thenOp->output(dim / 2));
auto elseOp = ngraph::builder::makeSplit(p2, inType, dim, axis);
auto elseRes = std::make_shared<ov::op::v0::Result>(elseOp->output(dim - 1));
auto thenBody = std::make_shared<ov::Model>(ov::OutputVector{thenRes}, ov::ParameterVector{p1});
auto elseBody = std::make_shared<ov::Model>(ov::OutputVector{elseRes}, ov::ParameterVector{p2});
auto ifOp = std::make_shared<ov::op::v8::If>(params[1]);
ifOp->set_then_body(thenBody);
ifOp->set_else_body(elseBody);
ifOp->set_input(params[0], p1, p2);
auto ifRes = ifOp->set_output(thenRes, elseRes);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(ifRes)};
function = std::make_shared<ov::Model>(results, params, "SimpleIfNotConstConditionUnusedOutputPortsTest");
}
} // namespace SubgraphTestsDefinitions