[GNA] Issue 39975 - cascade concat fix (#2486)
* concat input not used fix rough implementation * [GNA] Cascade concat input not assigned fix * reduce copying in recursive function * [GNA] Aligned cascade concat test
This commit is contained in:
parent
0e62e5e17f
commit
4c1ae9b339
@ -443,8 +443,9 @@ class ScaleFactorPerLayer<InferenceEngine::ConcatLayer*> {
|
||||
}
|
||||
|
||||
// find a source quant value
|
||||
// - 1st candidate - non-activation layer with non-1 scale factor
|
||||
// - 2nd candidate - 1st layer with non-1 scale factor
|
||||
// - 1st candidate - input layer
|
||||
// - 2nd candidate - non-activation layer with non-1 scale factor
|
||||
// - 3rd candidate - 1st layer with non-1 scale factor
|
||||
auto sourceLayerCheck = [&fp32eq](InferenceEngine::CNNLayerPtr& inputLayer) {
|
||||
auto quantParams = InferenceEngine::getInjectedData<QuantizedLayerParams>(inputLayer);
|
||||
LayerInfo info(inputLayer);
|
||||
@ -454,16 +455,17 @@ class ScaleFactorPerLayer<InferenceEngine::ConcatLayer*> {
|
||||
static std::map<std::string, size_t> restarted_counter;
|
||||
auto restartedCountIt = restarted_counter.find(concatLayer->name);
|
||||
if (restartedCountIt == restarted_counter.end()) {
|
||||
auto pos = restarted_counter.insert({ "concatLayer->name", 0 });
|
||||
auto pos = restarted_counter.insert({ concatLayer->name, 0 });
|
||||
restartedCountIt = pos.first;
|
||||
}
|
||||
|
||||
if (restartedCountIt->second % 2 == 1) {
|
||||
if (((restartedCountIt->second) / 2) % 2 == 1) {
|
||||
std::reverse(inputLayers.begin(), inputLayers.end());
|
||||
}
|
||||
++restartedCountIt->second;
|
||||
|
||||
auto sourceLayerIt = std::find_if(inputLayers.begin(), inputLayers.end(), sourceLayerCheck);
|
||||
auto sourceLayerIt = (firstInputIt != inputLayers.end()) ? firstInputIt
|
||||
: std::find_if(inputLayers.begin(), inputLayers.end(), sourceLayerCheck);
|
||||
if (sourceLayerIt == inputLayers.end()) {
|
||||
auto nonDefaultScaleFactor = [&fp32eq](InferenceEngine::CNNLayerPtr& inputLayer) {
|
||||
auto quantParams = InferenceEngine::getInjectedData<QuantizedLayerParams>(inputLayer);
|
||||
@ -527,7 +529,7 @@ class ScaleFactorPerLayer<InferenceEngine::ConcatLayer*> {
|
||||
gnalog() << "[UFS] from : " << concatLayer->name << " reached: " << layer->name;
|
||||
// found that direct input to concat is a indirect parent of align filter - so no link required
|
||||
auto info = LayerInfo(layer);
|
||||
if (!info.isWeightable() && !info.isActivation()) {
|
||||
if (!info.isWeightable() && !info.isActivation() && !info.isConst()) {
|
||||
gnalog() << "... skipped\n";
|
||||
return;
|
||||
}
|
||||
@ -549,7 +551,10 @@ class ScaleFactorPerLayer<InferenceEngine::ConcatLayer*> {
|
||||
// requantize activation by just changing it's output scale factor
|
||||
quantDataForConCatInput->_dst_quant.scale = sourceQuantParams->_dst_quant.scale;
|
||||
}
|
||||
|
||||
if (restarLayerInfo.isConst()) {
|
||||
gnalog() << "... warning const layer will be requantized\n";
|
||||
quantDataForConCatInput->_dst_quant.scale = sourceQuantParams->_dst_quant.scale;
|
||||
}
|
||||
result = ScaleFactorUpdateResult(restartedLayer.get());
|
||||
}
|
||||
|
||||
|
@ -1971,20 +1971,29 @@ void GNAGraphCompiler::connectOutput(InferenceEngine::CNNLayerPtr layer, void *p
|
||||
if (included == concat_connection.end()) {
|
||||
gnamem->reserve_ptr(&concatLayerInfoItem.gna_ptr, ALIGN64(concatLayerInfoItem.reserved_size), 64);
|
||||
|
||||
size_t concatInputIdx = 0;
|
||||
for (auto &&inputLayer : concatLayerInfoItem.concatInputLayers) {
|
||||
// skipping non functional and reshape layer, as in that case input might be not connected to anything
|
||||
auto realConcatInputs = CNNNetGetPrevLayersSkip(concat, [](CNNLayerPtr l) {
|
||||
return !LayerInfo(l).isNonFunctional() && !LayerInfo(l).isSplit();
|
||||
}, concatInputIdx++);
|
||||
std::function<void(GNAConcatLayer, GNAPluginNS::InputDesc&, ConcatConnection&)> allocate_input_recursively =
|
||||
[&allocate_input_recursively](GNAConcatLayer clayer, GNAPluginNS::InputDesc& inputDesc, ConcatConnection& concat_connection) {
|
||||
size_t concatInputIdx = 0;
|
||||
for (auto &&inputLayer : clayer.concatInputLayers) {
|
||||
// skipping non functional and reshape layer, as in that case input might be not connected to anything
|
||||
auto realConcatInputs = CNNNetGetPrevLayersSkip(clayer.getConcat(), [](CNNLayerPtr l) {
|
||||
return !LayerInfo(l).isNonFunctional() && !LayerInfo(l).isSplit();
|
||||
}, concatInputIdx++);
|
||||
|
||||
for (auto rInput : realConcatInputs) {
|
||||
if (LayerInfo(rInput.first).isInput()) {
|
||||
inputDesc->bytes_allocated_for_input[rInput.first->name] += inputLayer.tensorSize;
|
||||
for (auto rInput : realConcatInputs) {
|
||||
if (LayerInfo(rInput.first).isInput()) {
|
||||
inputDesc.bytes_allocated_for_input[rInput.first->name] += inputLayer.tensorSize + inputLayer.offset;
|
||||
}
|
||||
if (LayerInfo(rInput.first).isConcat()) {
|
||||
auto concatLayerInfo = concat_connection.find(rInput.first->name);
|
||||
allocate_input_recursively(concatLayerInfo->second, inputDesc, concat_connection);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
concatLayerInfoItem.input_allocated = true;
|
||||
clayer.input_allocated = true;
|
||||
};
|
||||
|
||||
allocate_input_recursively(concatLayerInfoItem, *inputDesc, concat_connection);
|
||||
}
|
||||
concatLayerInfo->second.output_allocation_flag = true;
|
||||
}
|
||||
|
@ -40,5 +40,7 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(.*IEClassGetAvailableDevices.*)",
|
||||
// TODO: Issue 32923
|
||||
R"(.*IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK.*)",
|
||||
// TODO: Issue 39358
|
||||
R"(.*unaligned.*MultipleConcatTest.*)",
|
||||
};
|
||||
}
|
||||
|
@ -0,0 +1,70 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
#include <subgraph_tests/multiple_concat.hpp>
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
namespace SubgraphTestsDefinitions {
|
||||
namespace {
|
||||
std::vector<size_t> input_sizes_aligned = {
|
||||
64,
|
||||
576,
|
||||
};
|
||||
|
||||
std::vector<size_t> constant_sizes_aligned = {
|
||||
64,
|
||||
32,
|
||||
};
|
||||
|
||||
std::vector<size_t> input_sizes_unaligned = {
|
||||
26,
|
||||
99
|
||||
};
|
||||
|
||||
std::vector<size_t> constant_sizes_unaligned = {
|
||||
26,
|
||||
99
|
||||
};
|
||||
|
||||
std::map<std::string, std::string> additional_config = {
|
||||
{"GNA_COMPACT_MODE", "NO"},
|
||||
{"GNA_DEVICE_MODE", "GNA_SW_EXACT"},
|
||||
{"GNA_SCALE_FACTOR_0", "3276.8"},
|
||||
};
|
||||
} // namespace
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(I_aligned_C_aligned, MultipleConcatTest,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_GNA),
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::ValuesIn(input_sizes_aligned),
|
||||
::testing::ValuesIn(constant_sizes_aligned),
|
||||
::testing::Values(additional_config)),
|
||||
MultipleConcatTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(I_aligned_C_unaligned, MultipleConcatTest,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_GNA),
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::ValuesIn(input_sizes_aligned),
|
||||
::testing::ValuesIn(constant_sizes_unaligned),
|
||||
::testing::Values(additional_config)),
|
||||
MultipleConcatTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(I_unaligned_C_aligned, MultipleConcatTest,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_GNA),
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::ValuesIn(input_sizes_unaligned),
|
||||
::testing::ValuesIn(constant_sizes_aligned),
|
||||
::testing::Values(additional_config)),
|
||||
MultipleConcatTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(I_unaligned_C_unaligned, MultipleConcatTest,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_GNA),
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::ValuesIn(input_sizes_unaligned),
|
||||
::testing::ValuesIn(constant_sizes_unaligned),
|
||||
::testing::Values(additional_config)),
|
||||
MultipleConcatTest::getTestCaseName);
|
||||
} // namespace SubgraphTestsDefinitions
|
@ -0,0 +1,25 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
#pragma once
|
||||
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "functional_test_utils/layer_test_utils.hpp"
|
||||
#include <ie_core.hpp>
|
||||
|
||||
namespace SubgraphTestsDefinitions {
|
||||
typedef std::tuple<
|
||||
std::string, // Target device name
|
||||
InferenceEngine::Precision, // Network precision
|
||||
size_t, // Input size
|
||||
size_t, // Const size
|
||||
std::map<std::string, std::string> // Configuration
|
||||
> multipleConcatParams;
|
||||
|
||||
class MultipleConcatTest : public LayerTestsUtils::LayerTestsCommon,
|
||||
public testing::WithParamInterface<multipleConcatParams> {
|
||||
protected:
|
||||
void SetUp() override;
|
||||
public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<multipleConcatParams> &obj);
|
||||
};
|
||||
} // namespace SubgraphTestsDefinitions
|
@ -0,0 +1,84 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <tuple>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
|
||||
#include "ie_core.hpp"
|
||||
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "functional_test_utils/blob_utils.hpp"
|
||||
#include "functional_test_utils/precision_utils.hpp"
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "ngraph_functions/utils/ngraph_helpers.hpp"
|
||||
#include "ngraph_functions/builders.hpp"
|
||||
#include "subgraph_tests/multiple_concat.hpp"
|
||||
|
||||
namespace SubgraphTestsDefinitions {
|
||||
|
||||
std::string MultipleConcatTest::getTestCaseName(const testing::TestParamInfo<multipleConcatParams> &obj) {
|
||||
std::string targetDevice;
|
||||
InferenceEngine::Precision netPrecision;
|
||||
size_t inputSize;
|
||||
size_t constantSize;
|
||||
std::map<std::string, std::string> config;
|
||||
std::tie(targetDevice, netPrecision, inputSize, constantSize, config) = obj.param;
|
||||
std::ostringstream result;
|
||||
|
||||
result << "netPrecision=" << netPrecision.name() << "_";
|
||||
result << "IS=" << inputSize << "_";
|
||||
result << "CS=" << constantSize << "_";
|
||||
result << "targetDevice=" << targetDevice;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void MultipleConcatTest::SetUp() {
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::map<std::string, std::string> config;
|
||||
size_t inputSize;
|
||||
size_t constantSize;
|
||||
std::tie(targetDevice, netPrecision, inputSize, constantSize, config) = this->GetParam();
|
||||
configuration.insert(config.begin(), config.end());
|
||||
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
|
||||
|
||||
std::vector<size_t> input_dims { 1, inputSize };
|
||||
std::vector<size_t> constant_dims {1, constantSize};
|
||||
|
||||
const int seed = 0;
|
||||
std::mt19937 gen(static_cast<float>(seed));
|
||||
|
||||
auto generateFloatNumbers = [gen](std::size_t vec_len, float min, float max) mutable {
|
||||
std::vector<float> res;
|
||||
|
||||
std::uniform_real_distribution<float> dist(min, max);
|
||||
for (int i = 0; i < vec_len; i++)
|
||||
res.emplace_back(static_cast<float>(dist(gen)));
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
auto concat_1_vals = generateFloatNumbers(constantSize, -2.0f, 2.0f);
|
||||
auto concat_2_vals = generateFloatNumbers(constantSize, -5.0f, 5.0f);
|
||||
|
||||
auto input_parameter = ngraph::builder::makeParams(ngPrc, {input_dims});
|
||||
|
||||
auto const_1 = ngraph::builder::makeConstant(ngPrc, constant_dims, concat_1_vals);
|
||||
auto concat_1 = ngraph::builder::makeConcat({const_1, input_parameter[0]}, 1);
|
||||
|
||||
auto const_2 = ngraph::builder::makeConstant(ngPrc, constant_dims, concat_1_vals);
|
||||
auto concat_2 = ngraph::builder::makeConcat({concat_1, const_2}, 1);
|
||||
|
||||
auto act = ngraph::builder::makeActivation(concat_2, ngPrc, ngraph::helpers::ActivationTypes::Relu);
|
||||
|
||||
function = std::make_shared<ngraph::Function>(act, input_parameter, "multiple_concat");
|
||||
}
|
||||
|
||||
TEST_P(MultipleConcatTest, CompareWithRefs) {
|
||||
Run();
|
||||
};
|
||||
} // namespace SubgraphTestsDefinitions
|
Loading…
Reference in New Issue
Block a user