[GNA] Handling input orientation (#1851)

Added test

Add fix
This commit is contained in:
Andrey Dmitriev 2020-09-08 10:46:10 +03:00 committed by GitHub
parent 063c7ef6b9
commit 8e6d9470bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 155 additions and 39 deletions

View File

@ -398,6 +398,11 @@ void GNAGraphCompiler::ConvolutionPrimitive(InferenceEngine::CNNLayerPtr layer)
ptr_weights,
ptr_biases);
if (inputs->getLayout() == Layout::NHWC) {
currentComponent.orientation_in = kDnnInterleavedOrientation;
currentComponent.orientation_out = kDnnInterleavedOrientation;
}
size_t num_data_bytes_out =
InferenceEngine::details::product(begin(outputs->getDims()), end(outputs->getDims()))
* outputs->getPrecision().size();

View File

@ -699,54 +699,35 @@ void GNAPlugin::LoadNetwork(ICNNNetwork & _network) {
}
}
if (withConv) {
for (auto &layer : sortedNet) {
for (int i = 0; CNNNetHasPrevLayer(layer.get(), i); i++) {
auto prevLayer = CNNNetPrevLayer(layer.get(), i);
if (!skippedLayers.count(prevLayer->name)) {
if (CNNNetHasPrevLayer(prevLayer.get())) {
continue;
}
for (auto &inputLayer : sortedNet) {
if (!LayerInfo(inputLayer).isInput()) {
continue;
}
auto doesntHaveGnaMapping = [this] (CNNLayerPtr l) {
auto dnnLayer = graphCompiler.dnnComponents.findComponent(l);
return dnnLayer == nullptr;
};
// we are in the one of input layers
if (LayerInfo(prevLayer).isMemory()) {
continue;
}
}
auto dnnLayer = graphCompiler.dnnComponents.findComponent(layer);
string inputName = prevLayer->name;
std::vector<string> inputs;
if (skippedLayers.count(prevLayer->name)) {
inputs = skippedLayers[prevLayer->name];
} else {
inputs.push_back(inputName);
}
auto nextLayers = CNNNetGetAllNextLayersSkipCertain(inputLayer, -1, doesntHaveGnaMapping);
for (auto &nextLayer : nextLayers) {
auto dnnLayer = graphCompiler.dnnComponents.findComponent(nextLayer);
// non functional layer - skipped by gna
if (nullptr == dnnLayer) {
// storing input name for skipped layer
if (skippedLayers[inputName].size() == 0) {
skippedLayers[layer->name].push_back(inputName);
} else {
skippedLayers[layer->name] = skippedLayers[inputName];
}
continue;
THROW_GNA_LAYER_EXCEPTION(inputLayer) << " gna mapped layer search connection failed";
}
// input orientation might be already initialized, thus verify that it matches
for (auto input : inputs) {
if (!inputsDesc->orientation_in.count(input)) {
inputsDesc->orientation_in[input] = dnnLayer->orientation_in;
} else {
if (inputsDesc->orientation_in[input] != dnnLayer->orientation_in) {
THROW_GNA_EXCEPTION << "orientation for input layer: " << input << "cannot be calculated";
}
if (!inputsDesc->orientation_in.count(inputLayer->name)) {
inputsDesc->orientation_in[inputLayer->name] = dnnLayer->orientation_in;
} else {
if (inputsDesc->orientation_in[inputLayer->name] != dnnLayer->orientation_in) {
THROW_GNA_EXCEPTION << "orientation for input layer: " << inputLayer->name << "cannot be calculated";
}
}
}
}
} else {
for (auto& inputLayer : inputLayers) {
for (auto &inputLayer : inputLayers) {
inputsDesc->orientation_in[inputLayer->name] = kDnnInterleavedOrientation;
}
}

View File

@ -1290,9 +1290,11 @@ void FuseMultipleIdentitiesPass::run() {
if (LayerInfo(l).isNonFunctional() || LayerInfo(l).has32BInput())
continue;
gnalog() << "CNNNetPrevLayer skip non functional from :: " << l->name;
auto prevLayersReached = CNNNetGetPrevLayersSkip(l, [](CNNLayerPtr ptr) {
auto isFunctional = [](CNNLayerPtr ptr) {
return !LayerInfo(ptr).isNonFunctional();
});
};
auto prevLayersReached = CNNNetGetPrevLayersSkip(l, isFunctional);
prevLayersReached.erase(std::remove_if(prevLayersReached.begin(),
prevLayersReached.end(),
[] (const std::pair<CNNLayerPtr, int> & candidate) {

View File

@ -0,0 +1,30 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include "subgraph_tests/handling_orientation_conv.hpp"
#include "common_test_utils/test_constants.hpp"
using namespace LayerTestsDefinitions;
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
};
const std::vector<std::map<std::string, std::string>> configs = {
{
{"GNA_SCALE_FACTOR_0", "1"},
{"GNA_SCALE_FACTOR_1", "1"},
{"GNA_COMPACT_MODE", "NO"},
}
};
INSTANTIATE_TEST_CASE_P(handling_orientation, HandlingOrientationClass,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_GNA),
::testing::ValuesIn(configs)),
HandlingOrientationClass::getTestCaseName);

View File

@ -0,0 +1,31 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <tuple>
#include <vector>
#include <string>
#include <memory>
#include "functional_test_utils/layer_test_utils.hpp"
#include "ngraph_functions/utils/ngraph_helpers.hpp"
#include "ngraph_functions/builders.hpp"
namespace LayerTestsDefinitions {
typedef std::tuple<
InferenceEngine::Precision, //Network precision
std::string, //Device name
std::map<std::string, std::string> //Configuration
> HandlingOrientationParams;
class HandlingOrientationClass : public testing::WithParamInterface<HandlingOrientationParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(const testing::TestParamInfo<HandlingOrientationParams> &obj);
protected:
void SetUp() override;
};
} // namespace LayerTestsDefinitions

View File

@ -0,0 +1,67 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <tuple>
#include <string>
#include <vector>
#include <memory>
#include <debug.h>
#include "functional_test_utils/precision_utils.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "subgraph_tests/handling_orientation_conv.hpp"
namespace LayerTestsDefinitions {
std::string HandlingOrientationClass::getTestCaseName(const testing::TestParamInfo<HandlingOrientationParams> &obj) {
InferenceEngine::Precision netPrecision;
std::string targetName;
std::map<std::string, std::string> configuration;
std::tie(netPrecision, targetName, configuration) = obj.param;
std::ostringstream results;
results << "netPRC=" << netPrecision.name() << "_";
results << "targetDevice=" << targetName << "_";
return results.str();
}
void HandlingOrientationClass::SetUp() {
InferenceEngine::Precision netPrecision;
std::tie(netPrecision, targetDevice, configuration) = this->GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, { {1, 336} , {1, 336}});
std::vector<size_t> outFormShapes1 = { 1, 1, 168, 2 };
std::vector<size_t> outFormShapes2 = { 1, 336, 1, 1 };
auto pattern1 = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{ 4 }, outFormShapes1);
auto reshape1 = std::make_shared<ngraph::opset1::Reshape>(params[0], pattern1, false);
auto pattern2 = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{ 4 }, outFormShapes2);
auto reshape2 = std::make_shared<ngraph::opset1::Reshape>(params[1], pattern2, false);
auto permute1 = std::make_shared<ngraph::opset1::Transpose>(reshape1,
ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{ 4 }, { 0, 3, 1, 2 }));
auto conv1 = ngraph::builder::makeConvolution(permute1, ngPrc, { 1, 8 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
ngraph::op::PadType::VALID, 12);
auto permute2 = std::make_shared<ngraph::opset1::Transpose>(conv1,
ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{ 4 }, { 0, 2, 3, 1 }));
auto conv2 = ngraph::builder::makeConvolution(reshape2, ngPrc, { 1, 1 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
ngraph::op::PadType::VALID, 336);
std::vector<size_t> outFormShapes3 = { 1, 1932 };
std::vector<size_t> outFormShapes4 = { 1, 336 };
auto pattern3 = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{ 2 }, outFormShapes3);
auto pattern4 = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{ 2 }, outFormShapes4);
auto reshape3 = std::make_shared<ngraph::opset1::Reshape>(permute2, pattern3, false);
auto reshape4 = std::make_shared<ngraph::opset1::Reshape>(conv2, pattern4, false);
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3),
std::make_shared<ngraph::opset1::Result>(reshape4)};
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
}
TEST_P(HandlingOrientationClass, CompareWithRefs){
Run();
};
} // namespace LayerTestsDefinitions