Gna padding tests (#6541)

* [GNA] Use built in ngraph padding calculation

* [GNA] Use less channels for tests

* [GNA] Rename tests file

* [GNA] Add ngraph reference tests and some minor fixes in the transformation itself

* [GNA] Add invalid ngraph reference tests

* [GNA] Align brace style

* [GNA] Remove unnecessary condition
This commit is contained in:
Szymon Irzabek 2021-07-08 10:49:07 +02:00 committed by GitHub
parent baef88c8fb
commit afe60b3263
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 462 additions and 94 deletions

View File

@ -29,19 +29,12 @@ struct ConvData {
size_t input_height;
size_t input_width;
size_t input_channel_count;
size_t filter_height;
size_t filter_width;
size_t filter_count;
size_t filter_dilation_width;
size_t filter_dilation_height;
size_t filter_stride_width;
size_t filter_stride_height;
size_t pads_begin_width;
size_t pads_begin_height;
size_t pads_end_width;
size_t pads_end_height;
ngraph::op::PadType padding_type;
ngraph::Shape output_shape;
ngraph::element::Type element_type;
};
@ -55,27 +48,18 @@ static bool VerifyAndGetConvParams(std::shared_ptr<ngraph::opset7::Convolution>
return false;
}
conv_data.output_shape = conv->get_output_shape(0);
conv_data.padding_type = conv->get_auto_pad();
conv_data.input_channel_count = conv->input_value(0).get_shape()[1];
conv_data.input_height = conv->input_value(0).get_shape()[2];
conv_data.input_width = conv->input_value(0).get_shape()[3];
conv_data.filter_count = conv->input_value(1).get_shape()[0];
conv_data.filter_height = conv->input_value(1).get_shape()[2];
conv_data.filter_width = conv->input_value(1).get_shape()[3];
conv_data.filter_dilation_height = conv->get_dilations()[0];
conv_data.filter_dilation_width = conv->get_dilations()[1];
conv_data.filter_stride_height = conv->get_strides()[0];
conv_data.filter_stride_width = conv->get_strides()[1];
conv_data.pads_begin_height = conv->get_pads_begin()[0];
conv_data.pads_begin_width = conv->get_pads_begin()[1];
conv_data.pads_end_height = conv->get_pads_end()[0];
conv_data.pads_end_width = conv->get_pads_end()[1];
conv_data.element_type = conv->get_element_type();
IE_ASSERT(conv_data.filter_count == conv_data.output_shape[1]);
return true;
return conv_data.pads_begin_height || conv_data.pads_end_height || conv_data.pads_begin_width || conv_data.pads_end_width;
}
static bool TransposeOrderMatches(std::shared_ptr<ngraph::opset7::Transpose> transpose, std::vector<size_t> order) {
@ -117,75 +101,9 @@ static bool VerifyMaxPool(std::shared_ptr<ngraph::opset7::MaxPool> max_pool) {
auto pool_kernel = max_pool->get_kernel();
// Check if MaxPool vertical stride == pool size
// (TODO: remove when 50386 and 50379 are fixed and also verify pool_kernel[0] > 8 limitation below, gna_limitations can be used then)
// Check if padding is VALID
return (max_pool->get_auto_pad() == ngraph::op::PadType::VALID &&
pool_kernel.size() == 2 && pool_strides.size() == 2 &&
pool_kernel[0] == pool_strides[0] && pool_kernel[0] <= 8);
}
static size_t GetRequiredInputPadding(size_t input_size, size_t filter_size, size_t stride_size, size_t dilation_size, size_t output_size) {
size_t partial_padding_size = (output_size - 1) * stride_size + (filter_size - 1) * dilation_size + 1;
// This way of padding size calculation avoids problem with fractional numbers
return (partial_padding_size > input_size) ? (partial_padding_size - input_size) : 0;
}
static size_t CalculateOutputSize(size_t input_size, size_t filter_size, size_t stride_size, size_t dilation_size, size_t padding_size) {
return (input_size + padding_size - ((filter_size - 1) * dilation_size + 1)) / stride_size + 1;
}
static bool CalculatePadding(ConvData& conv_data) {
size_t output_height{ 0 };
size_t output_width{ 0 };
switch (conv_data.padding_type) {
case ngraph::op::PadType::EXPLICIT:
// all paddings already set
break;
case ngraph::op::PadType::VALID:
conv_data.pads_begin_height = 0;
conv_data.pads_begin_width = 0;
conv_data.pads_end_height = 0;
conv_data.pads_end_width = 0;
break;
case ngraph::op::PadType::SAME_LOWER:
case ngraph::op::PadType::SAME_UPPER:
{
output_height = conv_data.output_shape[2];
output_width = conv_data.output_shape[3];
size_t pads_width = GetRequiredInputPadding(conv_data.input_width, conv_data.filter_width,
conv_data.filter_stride_width, conv_data.filter_dilation_width, output_width);
size_t pads_height = GetRequiredInputPadding(conv_data.input_height, conv_data.filter_height,
conv_data.filter_stride_height, conv_data.filter_dilation_height, output_height);
conv_data.pads_begin_width = conv_data.pads_end_width = pads_width / 2;
conv_data.pads_begin_height = conv_data.pads_end_height = pads_height / 2;
if (conv_data.padding_type == ngraph::op::PadType::SAME_LOWER) {
conv_data.pads_begin_width += (pads_width % 2);
conv_data.pads_begin_height += (pads_height % 2);
} else {
conv_data.pads_end_width += (pads_width % 2);
conv_data.pads_end_height += (pads_height % 2);
}
break;
}
default:
break;
}
output_width = CalculateOutputSize(conv_data.input_width, conv_data.filter_width, conv_data.filter_stride_width,
conv_data.filter_dilation_width, conv_data.pads_begin_width + conv_data.pads_end_width);
output_height = CalculateOutputSize(conv_data.input_height, conv_data.filter_height, conv_data.filter_stride_height,
conv_data.filter_dilation_height, conv_data.pads_begin_height + conv_data.pads_end_height);
IE_ASSERT(output_width == conv_data.output_shape[3]);
IE_ASSERT(output_height == conv_data.output_shape[2]);
// Check if any calculated padding is non-zero, otherwise there is no need to decompose such convolution
return conv_data.pads_begin_height || conv_data.pads_end_height || conv_data.pads_begin_width || conv_data.pads_end_width;
pool_kernel.size() == 2 && pool_strides.size() == 2);
}
static std::shared_ptr<ngraph::opset7::StridedSlice> FlatCrop(ngraph::Output<ngraph::Node> input, size_t offset, size_t size) {
@ -227,7 +145,7 @@ static std::shared_ptr<ngraph::Node> CreatePaddedNet(std::shared_ptr<ngraph::ops
ngraph::opset7::Constant::create(ngraph::element::i64, ngraph::Shape{ 2 },
ngraph::Shape{ 1ull, shape_size(leading_transpose->input_value(0).get_shape()) }), false);
// zero padding
// Constant with zero padding
auto const_holding_padding = std::make_shared<ngraph::opset7::Constant>(conv_data.element_type, ngraph::Shape{ 1, biggest_padding }, 0);
copy_runtime_info(conv, const_holding_padding);
@ -342,9 +260,6 @@ static bool Convert(std::shared_ptr<ngraph::Node> leading_transpose,
if (max_pool && !VerifyMaxPool(std::dynamic_pointer_cast<ngraph::opset7::MaxPool>(max_pool)))
return false;
if (!CalculatePadding(conv_data))
return false;
GeneratePadding(std::dynamic_pointer_cast<ngraph::opset7::Transpose>(leading_transpose),
std::dynamic_pointer_cast<ngraph::opset7::Convolution>(conv), conv_data);

View File

@ -247,10 +247,10 @@ const std::vector<std::map<std::string, std::string>> configs2D = {
};
const std::vector<op::PadType> padTypes = {
op::PadType::VALID,
op::PadType::EXPLICIT,
op::PadType::SAME_LOWER,
op::PadType::SAME_UPPER,
op::PadType::VALID
op::PadType::SAME_UPPER
};
const std::vector<modelType> models = {
@ -277,14 +277,14 @@ const std::vector<std::vector<size_t >> maxpool1DPools = { {1, 2} };
const std::vector<std::vector<size_t >> maxpool1DStrides = { {1, 1} };
const std::vector<std::vector<size_t>> input2DNHWC = { {1, 16, 16, 32} };
const std::vector<std::vector<size_t >> kernels2D = { {2, 2}, {4, 1}, {1, 3}};
const std::vector<std::vector<size_t >> kernels2D = { {2, 2}, {4, 1}, {1, 3} };
const std::vector<std::vector<size_t >> strides2D = { {1, 1}, {1, 2}, {2, 1}, {2, 2} };
const std::vector<std::vector<ptrdiff_t>> padBegins2D = { {1, 2} };
const std::vector<std::vector<ptrdiff_t>> padEnds2D = { {3, 1} };
const std::vector<std::vector<size_t >> dilations2D = { {1, 1} };
const std::vector<size_t> numOutChannels2D = { 32 };
const std::vector<std::vector<size_t >> biases2D = { {1, 32, 1, 1} };
const std::vector<std::vector<size_t >> transpBiases2D = { {1, 1, 1, 32} };
const std::vector<size_t> numOutChannels2D = { 8 };
const std::vector<std::vector<size_t >> biases2D = { {1, 8, 1, 1} };
const std::vector<std::vector<size_t >> transpBiases2D = { {1, 1, 1, 8} };
const std::vector<std::vector<size_t >> maxpool2DPools = { {2, 2} };
const std::vector<std::vector<size_t >> maxpool2DStrides = { {2, 1} };

View File

@ -0,0 +1,453 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include <tuple>
#include "transformations/convert_padded2valid_conv.hpp"
#include "common_test_utils/ngraph_test_utils.hpp"
#include <ngraph/function.hpp>
#include <ngraph/opsets/opset7.hpp>
#include <ngraph/pass/manager.hpp>
#include <transformations/init_node_info.hpp>
namespace testing {
namespace {
enum class modelType {
TranspConvTransp = 0, /* Transpose(NHWC->NCHW) => Conv => Transpose(NCHW->NHWC) */
TranspConvBcastAddTransp, /* Transpose(NHWC->NCHW) => Conv => Broadcasted Add (Bias) => Transpose(NCHW->NHWC) */
TranspConvBcastAddMaxPoolTransp, /* Transpose(NHWC->NCHW) => Conv => Broadcasted Add (Bias) => MaxPooling => Transpose(NCHW->NHWC) (2D Max Pool case) */
TranspConvBcastAddActTransp, /* Transpose(NHWC->NCHW) => Conv => Broadcasted Add (Bias) => Activation Function => Transpose(NCHW->NHWC) */
TranspConvBcastAddMaxPoolActTransp, /* Transpose(NHWC->NCHW) => Conv => Broadcasted Add (Bias) => MaxPool => Activation Function => Transpose(NCHW->NHWC) */
TranspConvTranspBcastAdd, /* Transpose(NHWC->NCHW) => conv => Transpose(NCHW->NHWC) => Bias */
TranspConvTranspBcastAddAct /* Transpose(NHWC->NCHW) => Conv => Transpose(NCHW->NHWC) => Bias => Activation Function */
};
struct ConvData {
size_t input_height;
size_t input_width;
size_t input_channel_count;
size_t pads_begin_width;
size_t pads_begin_height;
size_t pads_end_width;
size_t pads_end_height;
};
void GetConvParams(std::shared_ptr<ngraph::opset7::Convolution> conv, ConvData& conv_data) {
conv_data.input_channel_count = conv->input_value(0).get_shape()[1];
conv_data.input_height = conv->input_value(0).get_shape()[2];
conv_data.input_width = conv->input_value(0).get_shape()[3];
conv_data.pads_begin_height = conv->get_pads_begin()[0];
conv_data.pads_begin_width = conv->get_pads_begin()[1];
conv_data.pads_end_height = conv->get_pads_end()[0];
conv_data.pads_end_width = conv->get_pads_end()[1];
}
std::shared_ptr<ngraph::opset7::Result> createFunction(const modelType& model,
const ngraph::Output<ngraph::Node>& input_node,
const ngraph::Shape& filters_shape,
const ngraph::Strides& conv_stride,
const ngraph::CoordinateDiff& pads_begin,
const ngraph::CoordinateDiff& pads_end,
const ngraph::Strides& conv_dilation,
const ngraph::Shape& bias_shape,
const ngraph::Strides& maxpool_stride,
const ngraph::Shape& maxpool_shape,
const ngraph::op::PadType& pad_type,
ConvData* conv_data) {
auto transpose_in_order = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64, ngraph::Shape{4}, ngraph::Shape{0, 3, 1, 2});
auto transpose_in = std::make_shared<ngraph::opset7::Transpose>(input_node, transpose_in_order);
auto filters = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64,
ngraph::Shape{4, input_node.get_shape()[3], filters_shape[0], filters_shape[1]});
auto conv = std::make_shared<ngraph::opset7::Convolution>(transpose_in, filters, conv_stride, pads_begin, pads_end, conv_dilation, pad_type);
if (conv_data)
GetConvParams(conv, *conv_data);
auto transpose_out_order = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64, ngraph::Shape{4}, ngraph::Shape{0, 2, 3, 1});
auto bias_const = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64, bias_shape);
ngraph::Output<ngraph::Node> last_op = std::make_shared<ngraph::opset7::Transpose>(conv, transpose_out_order);
switch (model) {
case modelType::TranspConvBcastAddTransp:
{
auto bcast_add = std::make_shared<ngraph::opset7::Add>(conv, bias_const);
last_op = std::make_shared<ngraph::opset7::Transpose>(bcast_add, transpose_out_order);
}
break;
case modelType::TranspConvBcastAddMaxPoolTransp:
{
auto bcast_add = std::make_shared<ngraph::opset7::Add>(conv, bias_const);
auto maxpool = std::make_shared<ngraph::opset7::MaxPool>(bcast_add, maxpool_stride, ngraph::Shape{0, 0}, ngraph::Shape{0, 0}, maxpool_shape,
ngraph::op::RoundingType::FLOOR, ngraph::op::PadType::VALID);
auto transpose = std::make_shared<ngraph::opset7::Transpose>(maxpool, transpose_out_order);
last_op = std::make_shared<ngraph::opset7::Relu>(transpose);
}
break;
case modelType::TranspConvBcastAddActTransp:
{
auto bcast_add = std::make_shared<ngraph::opset7::Add>(conv, bias_const);
auto activation = std::make_shared<ngraph::opset7::Relu>(bcast_add);
last_op = std::make_shared<ngraph::opset7::Transpose>(activation, transpose_out_order);
}
break;
case modelType::TranspConvBcastAddMaxPoolActTransp:
{
auto bcast_add = std::make_shared<ngraph::opset7::Add>(conv, bias_const);
auto maxpool = std::make_shared<ngraph::opset7::MaxPool>(bcast_add, maxpool_stride, ngraph::Shape{0, 0}, ngraph::Shape{0, 0}, maxpool_shape,
ngraph::op::RoundingType::FLOOR, ngraph::op::PadType::VALID);
auto activation = std::make_shared<ngraph::opset7::Relu>(maxpool);
last_op = std::make_shared<ngraph::opset7::Transpose>(activation, transpose_out_order);
}
break;
case modelType::TranspConvTranspBcastAdd:
{
last_op = std::make_shared<ngraph::opset7::Add>(last_op, bias_const);
}
break;
case modelType::TranspConvTranspBcastAddAct:
{
auto bcast_add = std::make_shared<ngraph::opset7::Add>(last_op, bias_const);
last_op = std::make_shared<ngraph::opset7::Relu>(bcast_add);
}
break;
case modelType::TranspConvTransp:
default:
break;
}
return std::make_shared<ngraph::opset7::Result>(last_op);
}
std::shared_ptr<ngraph::Function> get_initial_function(const modelType& model,
const ngraph::PartialShape& input_shape,
const ngraph::Shape& filters_shape,
const ngraph::Strides& conv_stride,
const ngraph::CoordinateDiff& pads_begin,
const ngraph::CoordinateDiff& pads_end,
const ngraph::Strides& conv_dilation,
const ngraph::Shape& bias_shape,
const ngraph::Strides& maxpool_stride,
const ngraph::Shape& maxpool_shape,
const ngraph::op::PadType& pad_type,
ConvData& conv_data) {
auto inputParams = std::make_shared<ngraph::opset7::Parameter>(ngraph::element::i64, input_shape);
auto result = createFunction(model, inputParams, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation, bias_shape,
maxpool_stride, maxpool_shape, pad_type, &conv_data);
return std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{inputParams});
}
// ---------------------------------------------------------------------------------------------------------------------
class ConvertPadded2ValidConvTestInvalidFixture : public CommonTestUtils::TestsCommon,
public ::testing::WithParamInterface<std::tuple<modelType, ngraph::PartialShape, ngraph::Shape, ngraph::Strides,
ngraph::CoordinateDiff, ngraph::CoordinateDiff,
ngraph::Strides, ngraph::Shape,
ngraph::Strides, ngraph::Shape,
ngraph::op::PadType>> {
public:
void SetUp() override;
public:
std::shared_ptr<ngraph::Function> function, reference_function;
modelType model;
};
void ConvertPadded2ValidConvTestInvalidFixture::SetUp() {
ngraph::PartialShape input_shape;
ngraph::Shape filters_shape, bias_shape, maxpool_shape;
ngraph::Strides conv_stride, conv_dilation, maxpool_stride;
ngraph::CoordinateDiff pads_begin, pads_end;
ngraph::op::PadType pad_type;
ConvData conv_data;
std::tie(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type) = this->GetParam();
function = get_initial_function(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type, conv_data);
reference_function = get_initial_function(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type, conv_data);
}
// ---------------------------------------------------------------------------------------------------------------------
class ConvertPadded2ValidConvTestFixture: public CommonTestUtils::TestsCommon,
public ::testing::WithParamInterface<std::tuple<modelType, ngraph::PartialShape, ngraph::Shape, ngraph::Strides,
ngraph::CoordinateDiff, ngraph::CoordinateDiff,
ngraph::Strides, ngraph::Shape,
ngraph::Strides, ngraph::Shape,
ngraph::op::PadType>> {
public:
void SetUp() override;
std::shared_ptr<ngraph::Function> get_reference(const modelType& model,
const ngraph::PartialShape& input_shape,
const ngraph::Shape& filters_shape,
const ngraph::Strides& conv_stride,
const ngraph::CoordinateDiff& pads_begin,
const ngraph::CoordinateDiff& pads_end,
const ngraph::Strides& conv_dilation,
const ngraph::Shape& bias_shape,
const ngraph::Strides& maxpool_stride,
const ngraph::Shape& maxpool_shape,
const ngraph::op::PadType& pad_type,
const ConvData& conv_data);
public:
std::shared_ptr<ngraph::Function> function, reference_function;
modelType model;
};
void ConvertPadded2ValidConvTestFixture::SetUp() {
ngraph::PartialShape input_shape;
ngraph::Shape filters_shape, bias_shape, maxpool_shape;
ngraph::Strides conv_stride, conv_dilation, maxpool_stride;
ngraph::CoordinateDiff pads_begin, pads_end;
ngraph::op::PadType pad_type;
ConvData conv_data;
std::tie(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type) = this->GetParam();
function = get_initial_function(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type, conv_data);
reference_function = get_reference(model, input_shape, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation,
bias_shape, maxpool_stride, maxpool_shape, pad_type, conv_data);
}
std::shared_ptr<ngraph::opset7::StridedSlice> FlatCrop(ngraph::Output<ngraph::Node> input, size_t offset, size_t size) {
return std::make_shared<ngraph::opset7::StridedSlice>(
input, // data
ngraph::opset7::Constant::create(ngraph::element::i64, ngraph::Shape{2}, {(size_t)0, offset}), // begin sice index
ngraph::opset7::Constant::create(ngraph::element::i64, ngraph::Shape{2}, {(size_t)0, offset + size}), // end slice index
ngraph::opset7::Constant::create(ngraph::element::i64, ngraph::Shape{2}, {(size_t)1, (size_t)1}), // strides
std::vector<int64_t>{1, 0}, // begin mask
std::vector<int64_t>{1, 0}); // end mask
}
void InsertPadding(ngraph::OutputVector& input_rows_to_concat, size_t size,
const std::shared_ptr<ngraph::opset7::Constant> padding_const, size_t biggest_padding) {
if (size == biggest_padding) {
input_rows_to_concat.push_back(padding_const);
} else {
auto slice = FlatCrop(padding_const, 0, size);
input_rows_to_concat.push_back(slice);
}
}
std::shared_ptr<ngraph::Node> CreatePaddedNet(const ngraph::Output<ngraph::Node>& input_node,
const ConvData& conv_data) {
size_t flat_left_padding = conv_data.input_channel_count * conv_data.pads_begin_width;
size_t flat_right_padding = conv_data.input_channel_count * conv_data.pads_end_width;
size_t padded_row_size = flat_left_padding + conv_data.input_channel_count * conv_data.input_width + flat_right_padding;
size_t flat_top_padding = padded_row_size * conv_data.pads_begin_height;
size_t flat_bottom_padding = padded_row_size * conv_data.pads_end_height;
size_t biggest_padding = std::max(std::max(flat_left_padding, flat_right_padding), std::max(flat_top_padding, flat_bottom_padding));
if (conv_data.input_height > 1 && (flat_top_padding > 1 || flat_bottom_padding > 1)) {
biggest_padding = biggest_padding > padded_row_size ? biggest_padding : padded_row_size;
}
if (!biggest_padding)
return nullptr;
auto flat_input = std::make_shared<ngraph::opset7::Reshape>(input_node,
ngraph::opset7::Constant::create(ngraph::element::i64, ngraph::Shape{2},
ngraph::Shape{1ull, shape_size(input_node.get_shape())}), false);
// Constant with zero padding
auto const_holding_padding = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64, ngraph::Shape{1, biggest_padding}, 0);
std::shared_ptr<ngraph::Node> original_row = flat_input;
ngraph::OutputVector input_rows_to_concat;
// Add top padding
for (size_t p = 0; p < conv_data.pads_begin_height; p++) {
InsertPadding(input_rows_to_concat, padded_row_size, const_holding_padding, biggest_padding);
}
if (flat_left_padding || flat_right_padding) {
// Pad every row of input plain if neccessary
for (size_t h = 0; h < conv_data.input_height; h++) {
// left padding input right padding
// | | |
// +--------------+-----------+
// |
// concat
if (conv_data.input_height > 1)
original_row = FlatCrop(flat_input, h * conv_data.input_width * conv_data.input_channel_count,
conv_data.input_width * conv_data.input_channel_count);
ngraph::OutputVector single_row_concat_inputs;
if (flat_left_padding) {
InsertPadding(single_row_concat_inputs, flat_left_padding, const_holding_padding, biggest_padding);
}
single_row_concat_inputs.push_back(original_row);
if (flat_right_padding) {
InsertPadding(single_row_concat_inputs, flat_right_padding, const_holding_padding, biggest_padding);
}
auto padded_row_concat = std::make_shared<ngraph::opset7::Concat>(single_row_concat_inputs, 1);
input_rows_to_concat.push_back(padded_row_concat);
}
} else {
input_rows_to_concat.push_back(original_row);
}
// Bottom padding
for (size_t p = 0; p < conv_data.pads_end_height; p++) {
InsertPadding(input_rows_to_concat, padded_row_size, const_holding_padding, biggest_padding);
}
auto padded_input_plane = std::make_shared<ngraph::opset7::Concat>(input_rows_to_concat, 1);
return padded_input_plane;
}
std::shared_ptr<ngraph::Function> ConvertPadded2ValidConvTestFixture::get_reference(const modelType& model,
const ngraph::PartialShape& input_shape,
const ngraph::Shape& filters_shape,
const ngraph::Strides& conv_stride,
const ngraph::CoordinateDiff& pads_begin,
const ngraph::CoordinateDiff& pads_end,
const ngraph::Strides& conv_dilation,
const ngraph::Shape& bias_shape,
const ngraph::Strides& maxpool_stride,
const ngraph::Shape& maxpool_shape,
const ngraph::op::PadType& pad_type,
const ConvData& conv_data) {
auto inputParams = std::make_shared<ngraph::opset7::Parameter>(ngraph::element::i64, input_shape);
// Add padding where neccessary
// padding
// padding
// ... row ...
// ... row ...
// ...........
// ... row ...
// padding
// padding
auto padded_input_plane = CreatePaddedNet(inputParams, conv_data);
std::shared_ptr<ngraph::opset7::Result> result;
if (padded_input_plane) {
auto shape_const = std::make_shared<ngraph::opset7::Constant>(ngraph::element::i64, ngraph::Shape{4},
ngraph::Shape{static_cast<size_t>(1),
conv_data.pads_begin_height + conv_data.input_height + conv_data.pads_end_height,
conv_data.pads_begin_width + conv_data.input_width + conv_data.pads_end_width,
conv_data.input_channel_count});
auto padded_input_plane_reshaped = std::make_shared<ngraph::opset7::Reshape>(padded_input_plane, shape_const, false);
result = createFunction(model, padded_input_plane_reshaped, filters_shape, conv_stride,
ngraph::CoordinateDiff{0, 0}, ngraph::CoordinateDiff{0, 0}, conv_dilation, bias_shape,
maxpool_stride, maxpool_shape, ngraph::op::PadType::EXPLICIT, nullptr);
} else {
// Valid padding
result = createFunction(model, inputParams, filters_shape, conv_stride, pads_begin, pads_end, conv_dilation, bias_shape,
maxpool_stride, maxpool_shape, pad_type, nullptr);
}
return std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{inputParams});
}
// ---------------------------------------------------------------------------------------------------------------------
void execute_test(const modelType& model, std::shared_ptr<ngraph::Function> function, std::shared_ptr<ngraph::Function> reference_function) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::InitNodeInfo>();
switch (model) {
default:
case modelType::TranspConvTransp:
manager.register_pass<GNAPluginNS::ConvertPadded2ValidConv>();
break;
case modelType::TranspConvBcastAddTransp:
manager.register_pass<GNAPluginNS::ConvertPaddedWithBias2ValidConv>();
break;
case modelType::TranspConvBcastAddMaxPoolTransp:
manager.register_pass<GNAPluginNS::ConvertPaddedWithBiasMaxPool2ValidConv>();
break;
case modelType::TranspConvBcastAddActTransp:
manager.register_pass<GNAPluginNS::ConvertPaddedWithBiasAF2ValidConv>();
break;
case modelType::TranspConvBcastAddMaxPoolActTransp:
manager.register_pass<GNAPluginNS::ConvertPaddedWithBiasMaxPoolAF2ValidConv>();
break;
case modelType::TranspConvTranspBcastAdd:
manager.register_pass<GNAPluginNS::ConvertPaddedTransposedWithBias2ValidConv>();
break;
case modelType::TranspConvTranspBcastAddAct:
manager.register_pass<GNAPluginNS::ConvertPaddedTransposedWithBiasAF2ValidConv>();
break;
}
manager.run_passes(function);
const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::ATTRIBUTES);
const FunctionsComparator::Result result = func_comparator(function, reference_function);
ASSERT_TRUE(result.valid);
}
TEST_P(ConvertPadded2ValidConvTestFixture, CompareFunctions) {
execute_test(model, function, reference_function);
}
INSTANTIATE_TEST_SUITE_P(ConvertPadded2ValidConvTestSuite, ConvertPadded2ValidConvTestFixture,
::testing::Values(
std::make_tuple(modelType::TranspConvTransp, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvBcastAddTransp, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvBcastAddMaxPoolTransp, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvBcastAddActTransp, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::SAME_LOWER),
std::make_tuple(modelType::TranspConvBcastAddMaxPoolActTransp, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::SAME_UPPER),
std::make_tuple(modelType::TranspConvTranspBcastAdd, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 1, 1, 4}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvTranspBcastAddAct, ngraph::PartialShape{1, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 1, 1, 4}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT)));
TEST_P(ConvertPadded2ValidConvTestInvalidFixture, CompareFunctions) {
execute_test(model, function, reference_function);
}
INSTANTIATE_TEST_SUITE_P(ConvertPadded2ValidConvInvalidTestSuite, ConvertPadded2ValidConvTestInvalidFixture,
::testing::Values(
std::make_tuple(modelType::TranspConvTransp, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::SAME_UPPER),
std::make_tuple(modelType::TranspConvBcastAddTransp, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvBcastAddMaxPoolTransp, ngraph::PartialShape{2, 16, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{5, 1}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvBcastAddActTransp, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::SAME_LOWER),
std::make_tuple(modelType::TranspConvBcastAddMaxPoolActTransp, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 5}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 4, 1, 1}, ngraph::Strides{1, 1}, ngraph::Shape{1, 4}, ngraph::op::PadType::SAME_UPPER),
std::make_tuple(modelType::TranspConvTranspBcastAdd, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 1, 1, 4}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT),
std::make_tuple(modelType::TranspConvTranspBcastAddAct, ngraph::PartialShape{2, 1, 16, 8}, ngraph::Shape{1, 2}, ngraph::Strides{1, 1},
ngraph::CoordinateDiff{0, 2}, ngraph::CoordinateDiff{0, 3}, ngraph::Strides{1, 1},
ngraph::Shape{1, 1, 1, 4}, ngraph::Strides{1, 1}, ngraph::Shape{1, 2}, ngraph::op::PadType::EXPLICIT)));
} // namespace
} // namespace testing