From a889acec5392662879ec530f8396ef3b108c8cbc Mon Sep 17 00:00:00 2001 From: Elizaveta Lobanova Date: Fri, 27 Nov 2020 15:52:44 +0300 Subject: [PATCH] [GNA] Fixes for concat with multiple const inputs (#3130) --- .../gna_plugin/frontend/layer_quantizer.hpp | 11 ++-- .../gna_plugin/optimizer/gna_pass_manager.cpp | 16 ++++-- .../skip_tests_config.cpp | 4 ++ .../subgraph_tests/concat_multi_input.hpp | 7 +++ .../src/subgraph_tests/concat_multi_input.cpp | 54 +++++++++++++++++-- 5 files changed, 80 insertions(+), 12 deletions(-) diff --git a/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp b/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp index f50aa9007f2..4cbdd494fd4 100644 --- a/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp +++ b/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp @@ -476,7 +476,8 @@ class DataQuantizer : public DataQuantizerBas if (LayerInfo(*cnnLayer).isActivation() || LayerInfo(*cnnLayer).isCopy() || LayerInfo(*cnnLayer).isNonFunctional() || - LayerInfo(*cnnLayer).isPermute()) { + LayerInfo(*cnnLayer).isPermute() || + LayerInfo(*cnnLayer).isConst()) { // precision of activation layers is always equal input precision for (auto &&outData : cnnLayer->outData) { outData->setPrecision(Desc::mandatory().getInputPrecision()); @@ -485,8 +486,12 @@ class DataQuantizer : public DataQuantizerBas } cnnLayer->precision = Desc::mandatory().getInputPrecision(); - if (cnnLayer->type == "Const") { - if (cnnLayer->blobs["custom"]->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP16) { + if (LayerInfo(*cnnLayer).isConst()) { + auto initial_precision = cnnLayer->blobs["custom"]->getTensorDesc().getPrecision(); + // TODO I32 must be handled separately when it'll be supported + IE_ASSERT(initial_precision != InferenceEngine::Precision::I32); + + if (initial_precision == InferenceEngine::Precision::FP16) { cnnLayer->blobs["custom"] = make_fp32_blob(cnnLayer->blobs["custom"]); } auto const_scale_factor = InferenceEngine::getInjectedData(*cnnLayer)->_dst_quant.GetScale(); diff --git a/inference-engine/src/gna_plugin/optimizer/gna_pass_manager.cpp b/inference-engine/src/gna_plugin/optimizer/gna_pass_manager.cpp index ff1ac35a3cc..26b2150e80e 100644 --- a/inference-engine/src/gna_plugin/optimizer/gna_pass_manager.cpp +++ b/inference-engine/src/gna_plugin/optimizer/gna_pass_manager.cpp @@ -1668,11 +1668,17 @@ void FuseMultipleIdentitiesPass::run() { }; auto prevLayersReached = CNNNetGetPrevLayersSkip(l, isFunctional); - prevLayersReached.erase(std::remove_if(prevLayersReached.begin(), - prevLayersReached.end(), - [] (const std::pair & candidate) { - return LayerInfo(candidate.first).isLink(); - }), prevLayersReached.end()); + if (!prevLayersReached.empty()) { + prevLayersReached.erase(std::remove_if(prevLayersReached.begin(), + prevLayersReached.end(), + [] (const std::pair & candidate) { + return LayerInfo(candidate.first).isLink(); + }), prevLayersReached.end()); + if (prevLayersReached.empty()) { + gnalog() << ", connected to link output only" << std::endl; + continue; + } + } if (prevLayersReached.size() != 1) { std::stringstream layers; diff --git a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/skip_tests_config.cpp b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/skip_tests_config.cpp index 685d9d0e96c..0205fe3802c 100644 --- a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/skip_tests_config.cpp +++ b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/skip_tests_config.cpp @@ -43,6 +43,10 @@ std::vector disabledTestPatterns() { // TODO: Issue 39358 R"(.*unaligned.*MultipleConcatTest.*)", R"(.*ActivationConcatsEltwise.*CS=35.*)", + // TODO: Issue 38974 + R"(.*ConcatMultiInput.CompareWithRefConstOnly.*IS=\(1.8\).*)", + R"(.*ConcatMultiInput.CompareWithRefConstOnly.*IS=\(1.16\).*)", + R"(.*ConcatMultiInput.CompareWithRefConstOnly.*IS=\(1.32\).*)", // TODO: Issue: 40960 R"(.*(ConstantResultSubgraphTest).*)", // TODO: Issue: 29577 diff --git a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_multi_input.hpp b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_multi_input.hpp index 8f0e06d41e4..eb7147bd117 100644 --- a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_multi_input.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_multi_input.hpp @@ -24,7 +24,14 @@ namespace LayerTestsDefinitions { class ConcatMultiInput : public testing::WithParamInterface, virtual public LayerTestsUtils::LayerTestsCommon { +private: + std::vector paramSize; + ngraph::element::Type ngPrc; + std::vector> inputShapes; + public: + void GenerateStridedSliceModel(); + void GenerateConstOnlyModel(); static std::string getTestCaseName(testing::TestParamInfo obj); protected: diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/concat_multi_input.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/concat_multi_input.cpp index 1d70dfe0448..8c51603c381 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/concat_multi_input.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/concat_multi_input.cpp @@ -38,17 +38,19 @@ std::string ConcatMultiInput::getTestCaseName(testing::TestParamInfo> inputShapes; InferenceEngine::Precision netPrecision; std::map additional_config; std::tie(inputShapes, netPrecision, targetDevice, additional_config) = this->GetParam(); configuration.insert(additional_config.begin(), additional_config.end()); - auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); - std::vector paramSize = { 1, 0 }; + ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); + paramSize = { 1, 0 }; for (const auto& val : inputShapes) { paramSize[1] += val[1]; } +} + +void ConcatMultiInput::GenerateStridedSliceModel() { auto params = ngraph::builder::makeParams(ngPrc, { paramSize }); auto stride = std::make_shared(ngraph::element::i64, ngraph::Shape{ 2 }, std::vector{ 1, 1 }); @@ -80,9 +82,53 @@ void ConcatMultiInput::SetUp() { function = std::make_shared(results, params, "ConcatMultiInput"); } -TEST_P(ConcatMultiInput, CompareWithRefImpl) { +void ConcatMultiInput::GenerateConstOnlyModel() { + ngraph::OutputVector concatInputs; + + const int seed = 0; + std::mt19937 gen(static_cast(seed)); + + auto generateFloatNumbers = [gen](std::size_t vec_len, float min, float max) mutable { + std::vector res; + + std::uniform_real_distribution dist(min, max); + for (int i = 0; i < vec_len; i++) + res.emplace_back(static_cast(dist(gen))); + + return res; + }; + ngraph::ParameterVector input_vector; + for (size_t i = 0; i < inputShapes.size(); ++i) { + size_t total_size = 1; + for (auto dim : inputShapes[i]) { + total_size *= dim; + } + if (i == 0) { + input_vector = ngraph::builder::makeParams(ngPrc, {{1, total_size}}); + auto relu = ngraph::builder::makeActivation(input_vector[0], ngPrc, ngraph::helpers::ActivationTypes::Relu); + concatInputs.push_back(relu); + } else { + auto min_max = (i % 2 == 0) ? 2 : 30; + auto const_values = generateFloatNumbers(total_size, -min_max, min_max); + auto const_node = ngraph::builder::makeConstant(ngPrc, {1, total_size}, const_values); + concatInputs.push_back(const_node); + } + } + + auto concat = ngraph::builder::makeConcat(concatInputs, 1); + + ngraph::ResultVector results{ std::make_shared(concat) }; + function = std::make_shared(results, input_vector, "ConcatConstOnly"); +} + +TEST_P(ConcatMultiInput, CompareWithRefStridedSlice) { + GenerateStridedSliceModel(); Run(); }; +TEST_P(ConcatMultiInput, CompareWithRefConstOnly) { + GenerateConstOnlyModel(); + Run(); +}; } // namespace LayerTestsDefinitions