[FIX][OV]GNA]Fix GNA plugin crashes when running conformance tests (#13988)

* [GNA] added dims size validation to avoid segfault in orientation helper
* [GNA] fixed matmul transformation test to not access data from empty container
* [OV] fixed padd Tile to TileIE and added assertion to ngraph to CNN conversion.
This commit is contained in:
Marcin Kusmierski
2022-11-23 11:15:47 +01:00
committed by GitHub
parent 2bd2d266f1
commit 3f1f798911
4 changed files with 30 additions and 6 deletions

View File

@@ -2076,10 +2076,12 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
auto params = graph->get_parameters();
for ( const auto &param : params ) {
const std::string input_name = param->get_friendly_name();
auto &thisInputData = *thisInputDataMap[input_name];
resultInputDataMap[input_name]->setPrecision(thisInputData.getPrecision());
resultInputDataMap[input_name]->setLayout(thisInputData.getLayout());
resultInputDataMap[input_name]->getPreProcess() = thisInputData.getPreProcess();
auto thisInputDataItr = thisInputDataMap.find(input_name);
IE_ASSERT(thisInputDataItr != thisInputDataMap.end())
<< "Internal issue with model handling. Improper input name: " << input_name;
resultInputDataMap[input_name]->setPrecision(thisInputDataItr->second->getPrecision());
resultInputDataMap[input_name]->setLayout(thisInputDataItr->second->getLayout());
resultInputDataMap[input_name]->getPreProcess() = thisInputDataItr->second->getPreProcess();
}
}

View File

@@ -81,8 +81,11 @@ ngraph::pass::ConvertTileToLegacyMatcher::ConvertTileToLegacyMatcher() {
++tiles_it;
}
// TODO: If last_node points to input layer, its name will be changed to the tile layer name. There will be
// issue with conversion from ngraph::Function to CNNNetwork.
last_node.get_node_shared_ptr()->set_friendly_name(tile->get_friendly_name());
ngraph::copy_runtime_info(tile, new_ops);
ngraph::replace_node(tile, {last_node});
return true;
};

View File

@@ -22,6 +22,13 @@ void updateModelInputOrientationWithoutConvolution(const InferenceEngine::CNNLay
return;
}
auto dims = input->dims;
if (dims.empty()) {
// If input is scalar there is no sense to update orientation.
return;
}
auto rowsNum = dims[0];
auto doesntHaveGnaMapping = [=](InferenceEngine::CNNLayerPtr l) {
auto dnnLayer = components.findComponent(l);
return dnnLayer == nullptr;
@@ -33,8 +40,6 @@ void updateModelInputOrientationWithoutConvolution(const InferenceEngine::CNNLay
return;
}
auto dims = input->dims;
auto rowsNum = dims[0];
auto columnProduct = std::accumulate(std::next(std::begin(dims)), std::end(dims), 1, std::multiplies<int>());
// does not make sense to check if further if any of sizes is equal to 1

View File

@@ -80,6 +80,20 @@ void InsertTranspose(std::shared_ptr<ngraph::Node> prev_node, const std::string&
bool VerifyReshape(const ngraph::Output<ngraph::Node>& reshape_out) {
auto in_shape = reshape_out.get_node_shared_ptr()->get_input_shape(0);
auto out_shape = reshape_out.get_node_shared_ptr()->get_output_shape(0);
const auto is_input_scalar = in_shape.empty();
const auto is_output_scalar = out_shape.empty();
if (is_input_scalar && is_output_scalar) {
// If both are scalar it means we don't need reshape.
return false;
}
if (is_input_scalar || is_output_scalar) {
// If one is scalar it means we need reshape.
return true;
}
return in_shape[0] != out_shape[0];
}