Hetero creates single result node (#9572)
This commit is contained in:
parent
12ab842970
commit
790f02c0b1
@ -14,6 +14,13 @@ using namespace HeteroTests;
|
|||||||
// this tests load plugin by library name: this is not available during static linkage
|
// this tests load plugin by library name: this is not available during static linkage
|
||||||
#ifndef OPENVINO_STATIC_LIBRARY
|
#ifndef OPENVINO_STATIC_LIBRARY
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_SUITE_P(smoke_manyTargetInputs, HeteroSyntheticTest,
|
||||||
|
::testing::Combine(
|
||||||
|
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
|
||||||
|
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
|
||||||
|
[] {return ngraph::builder::subgraph::makeConvPool2Relu2();}, {"Conv_1"}, true))),
|
||||||
|
HeteroSyntheticTest::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode, HeteroSyntheticTest,
|
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode, HeteroSyntheticTest,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
|
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
|
||||||
|
@ -303,29 +303,37 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
|
|||||||
// Break graph using insertion of result parameter split
|
// Break graph using insertion of result parameter split
|
||||||
NodeMap<ngraph::Node*> subgraphParameterToPrevResult;
|
NodeMap<ngraph::Node*> subgraphParameterToPrevResult;
|
||||||
std::vector<std::shared_ptr<ngraph::op::Result>> results;
|
std::vector<std::shared_ptr<ngraph::op::Result>> results;
|
||||||
for (auto&& input : subgraphInputs) {
|
{
|
||||||
if (!ngraph::op::is_parameter(input.get_node()) && !ngraph::op::is_constant(input.get_node())) {
|
std::set<ngraph::Output<ngraph::Node>> subgraphOutputs;
|
||||||
auto output = input.get_source_output();
|
for (auto&& input : subgraphInputs) {
|
||||||
output.remove_target_input(input);
|
if (!ngraph::op::is_parameter(input.get_node()) && !ngraph::op::is_constant(input.get_node())) {
|
||||||
|
subgraphOutputs.insert(input.get_source_output());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (auto&& output : subgraphOutputs) {
|
||||||
|
auto inputs = output.get_target_inputs();
|
||||||
auto result = std::make_shared<ngraph::op::Result>(output);
|
auto result = std::make_shared<ngraph::op::Result>(output);
|
||||||
result->set_friendly_name(output.get_node()->get_friendly_name() + "_" +
|
result->set_friendly_name(output.get_node()->get_friendly_name() + "_" +
|
||||||
std::to_string(output.get_index()) + "_result");
|
std::to_string(output.get_index()) + "_result");
|
||||||
ngraph::copy_runtime_info(output.get_node_shared_ptr(), result);
|
ngraph::copy_runtime_info(output.get_node_shared_ptr(), result);
|
||||||
auto parameter =
|
|
||||||
std::make_shared<ngraph::op::Parameter>(output.get_element_type(), output.get_partial_shape());
|
|
||||||
parameter->set_friendly_name(input.get_node()->get_friendly_name() + "_" +
|
|
||||||
std::to_string(input.get_index()) + "_parameter");
|
|
||||||
ngraph::copy_runtime_info(input.get_node()->shared_from_this(), parameter);
|
|
||||||
input.replace_source_output(parameter->output(0));
|
|
||||||
results.push_back(result);
|
|
||||||
subgraphIds.emplace(result.get(), subgraphIds[output.get_node()]);
|
subgraphIds.emplace(result.get(), subgraphIds[output.get_node()]);
|
||||||
subgraphIds.emplace(parameter.get(), subgraphIds[input.get_node()]);
|
results.push_back(result);
|
||||||
subgraphParameterToPrevResult.emplace(parameter.get(), result.get());
|
for (auto&& input : inputs) {
|
||||||
_blobNameMap.emplace(
|
output.remove_target_input(input);
|
||||||
parameter->get_friendly_name(),
|
auto parameter =
|
||||||
output.get_node()->get_friendly_name() + ((output.get_node()->get_output_size() != 1)
|
std::make_shared<ngraph::op::Parameter>(output.get_element_type(), output.get_partial_shape());
|
||||||
? ("." + std::to_string(output.get_index()))
|
parameter->set_friendly_name(input.get_node()->get_friendly_name() + "_" +
|
||||||
: std::string{}));
|
std::to_string(input.get_index()) + "_parameter");
|
||||||
|
ngraph::copy_runtime_info(input.get_node()->shared_from_this(), parameter);
|
||||||
|
input.replace_source_output(parameter->output(0));
|
||||||
|
subgraphIds.emplace(parameter.get(), subgraphIds[input.get_node()]);
|
||||||
|
subgraphParameterToPrevResult.emplace(parameter.get(), result.get());
|
||||||
|
_blobNameMap.emplace(
|
||||||
|
parameter->get_friendly_name(),
|
||||||
|
output.get_node()->get_friendly_name() + ((output.get_node()->get_output_size() != 1)
|
||||||
|
? ("." + std::to_string(output.get_index()))
|
||||||
|
: std::string{}));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -353,6 +361,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
|
|||||||
subgraph._affinity = itAffinity->second;
|
subgraph._affinity = itAffinity->second;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
results = {};
|
||||||
|
|
||||||
// Subgraph topological sort
|
// Subgraph topological sort
|
||||||
std::vector<Subgraph> allSubgraphs;
|
std::vector<Subgraph> allSubgraphs;
|
||||||
|
@ -47,6 +47,67 @@ inline std::shared_ptr<ngraph::Function> makeConvPoolRelu(std::vector<size_t> in
|
|||||||
return fnPtr;
|
return fnPtr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline std::shared_ptr<ngraph::Function> makeConvPool2Relu2(std::vector<size_t> inputShape = {1, 1, 32, 32},
|
||||||
|
ngraph::element::Type_t ngPrc = ngraph::element::Type_t::f32) {
|
||||||
|
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
|
||||||
|
params.front()->set_friendly_name("Param_1");
|
||||||
|
params.front()->output(0).get_tensor().set_names({"data"});
|
||||||
|
std::vector<size_t> constShape = {inputShape[0], inputShape[2], inputShape[1], inputShape[3]};
|
||||||
|
auto const1 = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{4}, constShape);
|
||||||
|
const1->set_friendly_name("Const_1");
|
||||||
|
const1->output(0).get_tensor().set_names({"const1"});
|
||||||
|
auto reshape1 = std::make_shared<ngraph::opset1::Reshape>(params.front(), const1, false);
|
||||||
|
reshape1->set_friendly_name("Reshape_1");
|
||||||
|
reshape1->output(0).get_tensor().set_names({"reshape1"});
|
||||||
|
auto conv1 = ngraph::builder::makeConvolution(reshape1, ngPrc, {1, 3}, {1, 1}, {0, 0}, {0, 0}, {1, 1},
|
||||||
|
ngraph::op::PadType::EXPLICIT, 4);
|
||||||
|
conv1->set_friendly_name("Conv_1");
|
||||||
|
conv1->output(0).get_tensor().set_names({"conv"});
|
||||||
|
std::vector<size_t> stride{1, 1}, padB{0, 0}, padE = padB, kernel{1, 2};
|
||||||
|
|
||||||
|
ngraph::ResultVector results;
|
||||||
|
{
|
||||||
|
auto pool1 = std::make_shared<ngraph::opset1::MaxPool>(conv1, stride, padB, padE, kernel,
|
||||||
|
ngraph::op::RoundingType::FLOOR,
|
||||||
|
ngraph::op::PadType::EXPLICIT);
|
||||||
|
pool1->output(0).get_tensor().set_names({"pool_0"});
|
||||||
|
pool1->set_friendly_name("Pool_1_0");
|
||||||
|
auto relu1 = std::make_shared<ngraph::opset1::Relu>(pool1);
|
||||||
|
relu1->set_friendly_name("Relu_1_0");
|
||||||
|
relu1->output(0).get_tensor().set_names({"relu_0"});
|
||||||
|
ngraph::Shape reluShape = relu1->outputs()[0].get_tensor().get_shape();
|
||||||
|
std::vector<size_t> constShape2 = {1, ngraph::shape_size(reluShape)};
|
||||||
|
auto const2 = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{2}, constShape2);
|
||||||
|
const2->output(0).get_tensor().set_names({"const2_0"});
|
||||||
|
const2->set_friendly_name("Const_2_0");
|
||||||
|
auto reshape2 = std::make_shared<ngraph::opset1::Reshape>(relu1, const2, false);
|
||||||
|
reshape2->output(0).get_tensor().set_names({"reshape2_0"});
|
||||||
|
reshape2->set_friendly_name("Reshape_2_0");
|
||||||
|
results.push_back(std::make_shared<ngraph::opset1::Result>(reshape2));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
auto pool1 = std::make_shared<ngraph::opset1::MaxPool>(conv1, stride, padB, padE, kernel,
|
||||||
|
ngraph::op::RoundingType::FLOOR,
|
||||||
|
ngraph::op::PadType::EXPLICIT);
|
||||||
|
pool1->output(0).get_tensor().set_names({"pool_1"});
|
||||||
|
pool1->set_friendly_name("Pool_1_1");
|
||||||
|
auto relu1 = std::make_shared<ngraph::opset1::Relu>(pool1);
|
||||||
|
relu1->set_friendly_name("Relu_1_1");
|
||||||
|
relu1->output(0).get_tensor().set_names({"relu_1"});
|
||||||
|
ngraph::Shape reluShape = relu1->outputs()[0].get_tensor().get_shape();
|
||||||
|
std::vector<size_t> constShape2 = {1, ngraph::shape_size(reluShape)};
|
||||||
|
auto const2 = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{2}, constShape2);
|
||||||
|
const2->output(0).get_tensor().set_names({"const2_1"});
|
||||||
|
const2->set_friendly_name("Const_2_1");
|
||||||
|
auto reshape2 = std::make_shared<ngraph::opset1::Reshape>(relu1, const2, false);
|
||||||
|
reshape2->output(0).get_tensor().set_names({"reshape2_1"});
|
||||||
|
reshape2->set_friendly_name("Reshape_2_1");
|
||||||
|
results.push_back(std::make_shared<ngraph::opset1::Result>(reshape2));
|
||||||
|
}
|
||||||
|
std::shared_ptr<ngraph::Function> fnPtr = std::make_shared<ngraph::Function>(results, params);
|
||||||
|
return fnPtr;
|
||||||
|
}
|
||||||
|
|
||||||
inline std::shared_ptr<ngraph::Function> makeConvPoolReluNonZero(std::vector<size_t> inputShape = {1, 1, 32, 32},
|
inline std::shared_ptr<ngraph::Function> makeConvPoolReluNonZero(std::vector<size_t> inputShape = {1, 1, 32, 32},
|
||||||
ngraph::element::Type_t ngPrc = ngraph::element::Type_t::f32) {
|
ngraph::element::Type_t ngPrc = ngraph::element::Type_t::f32) {
|
||||||
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
|
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
|
||||||
|
Loading…
Reference in New Issue
Block a user