Add fix for multiple_activations test (#2984)

* Add fix for multiple_activations test

   - Add forbid activation fusing pass for GNA2 library
   - Fix get all prev layers fnct
   - To enable GNA_NoRegression.smoke_multiple_activations_onGNA_INT16

* Apply Bartek review
This commit is contained in:
Krzysztof Bruniecki 2020-11-10 11:40:28 +01:00 committed by GitHub
parent 93ad181c8b
commit b6e2cd692b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 47 additions and 0 deletions

View File

@ -220,9 +220,14 @@ inline std::pair<InferenceEngine::CNNLayerPtr, int> CNNNetCheckNextLayerSkipCer
separate_layers(getInputTo(layer->outData[i]));
}
std::set< CNNLayerPtr > visited;
while (!currentSet.empty()) {
auto currentLayer = currentSet.front();
currentSet.pop_front();
if (visited.count(currentLayer)) {
continue;
}
visited.insert(currentLayer);
for (auto && oData : currentLayer->outData) {
separate_layers(getInputTo(oData));
}

View File

@ -421,6 +421,9 @@ void GNAPlugin::LoadNetwork(ICNNNetwork & _network) {
passes->registerPass<InsertCopyLayerPass>();
passes->registerPass<InsertDiagonalLayerPass>();
passes->registerPass<HandleMultipleActivationsForTheLayerPass>();
#if GNA_LIB_VER == 2
passes->registerPass<ForbidActivationFusingPass>();
#endif
passes->registerPass<SubstituteScaleShiftBroadCastPass>();
passes->registerPass<FuseMultipleIdentitiesPass>();
passes->registerPass<BroadcastConstPass>();

View File

@ -26,6 +26,7 @@
#include <layers/gna_copy_layer.hpp>
#include "gna_plugin_log.hpp"
#include "frontend/quantization.h"
#include "frontend/quantized_layer_params.hpp"
#include <layers/gna_copy_layer.hpp>
#include "gna_graph_tools.hpp"
@ -268,6 +269,36 @@ void HandleMultipleActivationsForTheLayerPass::run() {
}
}
void ForbidActivationFusingPass::run() {
for (auto& l : *pLayers) {
if (LayerInfo(l).isActivation()) {
auto prevLayer = CNNNetPrevLayer(l);
if (LayerInfo(prevLayer).has32BOutput()) {
// find all layers directly connected to the outputs of the previous layer
const auto allUsingPrev = CNNNetGetAllNextLayersSkipCertain(prevLayer, -1,
[&](CNNLayerPtr nextLayer) -> bool {
for (const auto& input : nextLayer->insData) {
for (const auto& output : prevLayer->outData) {
if (areEqualDatas(input.lock(), output) &&
areEqualDatas(l->insData[0].lock(), output) &&
(LayerInfo(nextLayer).isEltwiseSum() || nextLayer == l)) {
return false;
}
}
}
return true;
});
if (allUsingPrev.size() > 1) {
// the weights of MAX_VAL_2B_WEIGHT are used to enforce 1.0 scale factor
// so the scores are more correct
insertDiagonalLayerBetween(prevLayer, l, getPassManager(), MAX_VAL_2B_WEIGHT);
}
continue;
}
}
}
}
void ReorderMaxPoolPass::run() {
// detecting following pattern
// conv->relu->maxpooling

View File

@ -123,6 +123,14 @@ DECL_PASS(ReorderMaxPool);
*/
DECL_PASS(HandleMultipleActivationsForTheLayer);
/**
* @brief GNA doesn't provide intermediate results (sums) when the layer is fused with activation.
* When more layers use the sums as inputs (beside the activation) then the diagonal layer
* is inserted before the activation to forbid the fusing and make the sums exposed.
* This is observed in the multiple_activations_onGNA_INT16 test.
*/
DECL_PASS(ForbidActivationFusing);
/**
* @brief copy layer insertion required in cases where input layer does not have output memory
*/