Added syntetic hetero tests for dynamic shapes (#8050)

This commit is contained in:
Anton Pankratv 2021-10-23 01:20:49 +03:00 committed by GitHub
parent 8b3a7cfc8e
commit c6eaa5d653
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 191 additions and 25 deletions

View File

@ -22,4 +22,51 @@ INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes, HeteroSyntheticTest,
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_randomMajorNodeFunctions)),
HeteroSyntheticTest::getTestCaseName);
static std::vector<std::function<std::shared_ptr<ngraph::Function>()>> dynamicBuilders = {
[] {return ngraph::builder::subgraph::makeConvPoolReluNonZero();},
};
INSTANTIATE_TEST_SUITE_P(smoke_NonZeroMajorNode_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
dynamicBuilders.front(), {"nonZero_1"}))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_NonZeroMajorNode_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
dynamicBuilders.front(), {"nonZero_1"}, true))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::singleMajorNodeFunctions(
dynamicBuilders))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::randomMajorNodeFunctions(
dynamicBuilders))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::singleMajorNodeFunctions(
dynamicBuilders, true))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::randomMajorNodeFunctions(
dynamicBuilders, true))),
HeteroSyntheticTest::getTestCaseName);
} // namespace

View File

@ -303,8 +303,12 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
auto output = input.get_source_output();
output.remove_target_input(input);
auto result = std::make_shared<ngraph::op::Result>(output);
result->set_friendly_name(output.get_node()->get_friendly_name()
+ "_" + std::to_string(output.get_index()) + "_result");
ngraph::copy_runtime_info(output.get_node_shared_ptr(), result);
auto parameter = std::make_shared<ngraph::op::Parameter>(output.get_element_type(), output.get_partial_shape());
parameter->set_friendly_name(input.get_node()->get_friendly_name()
+ "_" + std::to_string(input.get_index()) + "_parameter");
ngraph::copy_runtime_info(input.get_node()->shared_from_this(), parameter);
input.replace_source_output(parameter->output(0));
results.push_back(result);
@ -408,6 +412,40 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
itClonedOutput->second->setLayout(externalOutput.second->getLayout());
}
}
auto toLegacyType = [] (const ngraph::element::Type& ngraph_type) {
return (ngraph_type == ngraph::element::f16 || ngraph_type == ngraph::element::bf16) ?
ngraph::element::f32 : ngraph_type;
};
// CNNNetwork converts input and output types to preserve legacy behaviour
// Here io types are reverted to ngraph types with some common plugin behaviour assumption
// defined in `toLegacyType()`
for (auto&& input : clonedInputs) {
if (!InferenceEngine::details::contains(externalInputsData, input.first)) {
for (auto&& parameter : subgraph._parameters) {
auto name = parameter->get_friendly_name();
if (parameter->get_friendly_name() == input.first) {
input.second->setPrecision(
InferenceEngine::details::convertPrecision(
toLegacyType(parameter->get_element_type())));
}
}
}
}
for (auto&& output : clonedOutputs) {
if (!InferenceEngine::details::contains(externalOutputsData, output.first)) {
for (auto&& result : subgraph._results) {
auto source_output = result->input_value(0);
auto output_name = ngraph::op::util::create_ie_output_name(source_output);
if (output_name == output.first) {
output.second->setPrecision(
InferenceEngine::details::convertPrecision(
toLegacyType(source_output.get_element_type())));
}
}
}
}
++id;
}
if (dumpDotFile) {

View File

@ -40,27 +40,25 @@ void HeteroInferRequest::CreateInferRequest(const std::unordered_map<std::string
IE_THROW() << "Internal error: no information about network's output/input";
}
auto requestBlob([&](const std::string& blobName, InferenceEngine::SoIInferRequestInternal& r) {
auto requestBlob([&](const std::string& blobName, InferenceEngine::SoIInferRequestInternal& r, bool output) {
std::string intermediateBlobName = blobName;
auto itName = subgraphInputToOutputBlobNames.find(blobName);
if (itName != subgraphInputToOutputBlobNames.end()) {
intermediateBlobName = itName->second;
}
BlobMap::iterator itBlob;
bool emplaced = false;
std::tie(itBlob, emplaced) = _blobs.emplace(intermediateBlobName, Blob::Ptr{});
if (emplaced) {
if (InferenceEngine::details::contains(_networkInputs, blobName)) {
if (output) {
if (InferenceEngine::details::contains(_networkOutputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r._ptr.get());
_blobs.erase(intermediateBlobName);
} else if (InferenceEngine::details::contains(_networkOutputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r._ptr.get());
_blobs.erase(intermediateBlobName);
} else {
itBlob->second = r->GetBlob(blobName);
auto blob = r->GetBlob(blobName);
_blobs.emplace(intermediateBlobName, r->GetBlob(blobName));
}
} else {
r->SetBlob(blobName, itBlob->second);
if (InferenceEngine::details::contains(_networkInputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r._ptr.get());
} else {
r->SetBlob(blobName, _blobs.at(intermediateBlobName));
}
}
});
@ -69,14 +67,14 @@ void HeteroInferRequest::CreateInferRequest(const std::unordered_map<std::string
desc._request = {desc._network._so, desc._network->CreateInferRequest()};
// go over all inputs and get blobs from subnet infer requests
for (auto&& outputInfo : desc._network->GetOutputsInfo()) {
requestBlob(outputInfo.first, desc._request);
requestBlob(outputInfo.first, desc._request, true);
}
}
// go over all outputs and get blobs from subnet infer requests
for (auto&& desc : _inferRequests) {
for (auto&& inputInfo : desc._network->GetInputsInfo()) {
requestBlob(inputInfo.first, desc._request);
requestBlob(inputInfo.first, desc._request, false);
}
}
}

View File

@ -21,6 +21,8 @@ struct PluginParameter {
struct FunctionParameter {
std::unordered_set<std::string> _majorPluginNodeIds;
std::shared_ptr<ngraph::Function> _function;
bool _dynamic_batch;
uint32_t _seed;
};
using HeteroSyntheticTestParameters = std::tuple<
@ -38,6 +40,14 @@ struct HeteroSyntheticTest : public testing::WithParamInterface<HeteroSyntheticT
static std::string getTestCaseName(const ::testing::TestParamInfo<HeteroSyntheticTestParameters>& obj);
static std::vector<FunctionParameter> _singleMajorNodeFunctions;
static std::vector<FunctionParameter> _randomMajorNodeFunctions;
static std::vector<FunctionParameter> singleMajorNodeFunctions(
const std::vector<std::function<std::shared_ptr<ngraph::Function>()>>& builders, bool dynamic_batch = false);
static std::vector<FunctionParameter> randomMajorNodeFunctions(
const std::vector<std::function<std::shared_ptr<ngraph::Function>()>>& builders, bool dynamic_batch = false, uint32_t seed = 0);
static std::vector<FunctionParameter> withMajorNodesFunctions(
const std::function<std::shared_ptr<ngraph::Function>()>& builder,
const std::unordered_set<std::string>& majorNodes,
bool dynamic_batch = false);
std::vector<std::string> _registredPlugins;
};

View File

@ -8,6 +8,7 @@
#include "ngraph_functions/builders.hpp"
#include "ngraph_functions/subgraph_builders.hpp"
#include <random>
#include "ie_algorithm.hpp"
namespace HeteroTests {
static std::vector<std::function<std::shared_ptr<ngraph::Function>()>> builders = {
@ -17,7 +18,19 @@ static std::vector<std::function<std::shared_ptr<ngraph::Function>()>> builders
[] {return ngraph::builder::subgraph::makeSplitConvConcatNestedInBranchNestedOut();},
};
std::vector<FunctionParameter> HeteroSyntheticTest::_singleMajorNodeFunctions{[] {
std::vector<FunctionParameter> HeteroSyntheticTest::withMajorNodesFunctions(
const std::function<std::shared_ptr<ngraph::Function>()>& builder,
const std::unordered_set<std::string>& majorNodes,
bool dynamic_batch) {
auto function = builder();
std::vector<FunctionParameter> result;
result.push_back(FunctionParameter{majorNodes, function, dynamic_batch, 0});
return result;
}
std::vector<FunctionParameter> HeteroSyntheticTest::singleMajorNodeFunctions(
const std::vector<std::function<std::shared_ptr<ngraph::Function>()>>& builders,
bool dynamic_batch) {
std::vector<FunctionParameter> result;
for (auto&& builder : builders) {
auto function = builder();
@ -25,17 +38,23 @@ std::vector<FunctionParameter> HeteroSyntheticTest::_singleMajorNodeFunctions{[]
if (!ngraph::op::is_constant(node) &&
!(ngraph::op::is_parameter(node)) &&
!(ngraph::op::is_output(node))) {
result.push_back(FunctionParameter{{node->get_friendly_name()}, function});
result.push_back(FunctionParameter{{node->get_friendly_name()}, function, dynamic_batch, 0});
}
}
}
return result;
} ()};
}
std::vector<FunctionParameter> HeteroSyntheticTest::_randomMajorNodeFunctions{[] {
std::vector<FunctionParameter> HeteroSyntheticTest::randomMajorNodeFunctions(
const std::vector<std::function<std::shared_ptr<ngraph::Function>()>>& builders,
bool dynamic_batch,
uint32_t seed) {
std::vector<FunctionParameter> results;
for (auto p = 0.2; p < 1.; p+=0.2) {
std::mt19937 e{std::random_device {} ()};
while (seed == 0) {
seed = std::random_device {}();
}
std::mt19937 e{seed};
std::bernoulli_distribution d{p};
for (auto&& builder : builders) {
auto function = builder();
@ -54,12 +73,18 @@ std::vector<FunctionParameter> HeteroSyntheticTest::_randomMajorNodeFunctions{[]
})) {
continue;
}
results.push_back(FunctionParameter{majorPluginNodeIds, function});
results.push_back(FunctionParameter{majorPluginNodeIds, function, dynamic_batch, seed});
}
}
}
return results;
} ()};
}
std::vector<FunctionParameter> HeteroSyntheticTest::_singleMajorNodeFunctions
= HeteroSyntheticTest::singleMajorNodeFunctions(builders);
std::vector<FunctionParameter> HeteroSyntheticTest::_randomMajorNodeFunctions
= HeteroSyntheticTest::randomMajorNodeFunctions(builders);
std::string HeteroSyntheticTest::getTestCaseName(const ::testing::TestParamInfo<HeteroSyntheticTestParameters>& obj) {
std::vector<PluginParameter> pluginParameters;
@ -108,6 +133,14 @@ void HeteroSyntheticTest::SetUp() {
--num;
}
function = std::get<Function>(param)._function;
if (std::get<Function>(param)._dynamic_batch) {
for (auto&& input : function->inputs()) {
auto shape = input.get_partial_shape();
targetStaticShapes.emplace_back(1, shape.to_shape());
shape[0] = ov::Dimension(1, 16);
inputDynamicShapes.push_back(shape);
}
}
}
void HeteroSyntheticTest::TearDown() {
@ -135,10 +168,11 @@ std::string HeteroSyntheticTest::SetUpAffinity() {
affinity = pluginParameters.at(1)._name;
}
node->get_rt_info()["affinity"] = std::make_shared<ngraph::VariantWrapper<std::string>>(affinity);
affinities += "\t{" + node->get_friendly_name() + ",\t\t" + affinity + "}\n";
affinities += "\t{\"" + node->get_friendly_name() + "\",\t\t\"" + affinity + "\"}\n";
}
}
affinities += "}";
affinities += "\nseed = " + std::to_string(std::get<Function>(param)._seed);
return affinities;
}

View File

@ -47,6 +47,45 @@ inline std::shared_ptr<ngraph::Function> makeConvPoolRelu(std::vector<size_t> in
return fnPtr;
}
inline std::shared_ptr<ngraph::Function> makeConvPoolReluNonZero(std::vector<size_t> inputShape = {1, 1, 32, 32},
ngraph::element::Type_t ngPrc = ngraph::element::Type_t::f32) {
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
params.front()->set_friendly_name("Param_1");
params.front()->output(0).get_tensor().set_names({"data"});
auto conv1 = ngraph::builder::makeConvolution(params.front(), ngPrc, {1, 3}, {1, 1}, {0, 0}, {0, 0}, {1, 1},
ngraph::op::PadType::EXPLICIT, 4);
conv1->set_friendly_name("Conv_1");
conv1->output(0).get_tensor().set_names({"conv"});
std::vector<size_t> stride{1, 1}, padB{0, 0}, padE = padB, kernel{1, 2};
auto pool1 = std::make_shared<ngraph::opset1::MaxPool>(conv1, stride, padB, padE, kernel,
ngraph::op::RoundingType::FLOOR,
ngraph::op::PadType::EXPLICIT);
pool1->output(0).get_tensor().set_names({"pool"});
pool1->set_friendly_name("Pool_1");
auto relu1 = std::make_shared<ngraph::opset1::Relu>(pool1);
relu1->set_friendly_name("Relu_1");
relu1->output(0).get_tensor().set_names({"relu"});
auto nonZero = std::make_shared<ngraph::op::NonZero>(relu1);
nonZero->set_friendly_name("nonZero_1");
nonZero->output(0).get_tensor().set_names({"nonZero"});
auto gatherIndices = std::make_shared<ngraph::op::Constant>(ngraph::element::i64,
ngraph::Shape{1},
std::vector<int64_t>{0});
gatherIndices->set_friendly_name("gatherIndices_1");
gatherIndices->output(0).get_tensor().set_names({"gatherIndices"});
auto gatherAxis = std::make_shared<ngraph::op::Constant>(ngraph::element::i64,
ngraph::Shape{1},
std::vector<int64_t>{1});
gatherAxis->set_friendly_name("gatherAxis_1");
gatherAxis->output(0).get_tensor().set_names({"gatherAxis"});
auto gather = std::make_shared<ngraph::opset1::Gather>(nonZero->output(0), gatherIndices, gatherAxis);
gather->set_friendly_name("gather_1");
gather->output(0).get_tensor().set_names({"gather"});
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(gather)};
std::shared_ptr<ngraph::Function> fnPtr = std::make_shared<ngraph::Function>(results, params);
return fnPtr;
}
inline std::shared_ptr<ngraph::Function> makeSplitConvConcat(std::vector<size_t> inputShape = {1, 4, 20, 20},
ngraph::element::Type_t ngPrc = ngraph::element::Type_t::f32) {
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});