From 772465da1ebd72f95298bf876e649e5057f4329d Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Tue, 15 Jun 2021 06:08:10 +0200 Subject: [PATCH] Add output shape and output padding for Convolution Backprop SLTs. (#5576) * Create output shape for Convoution Backprop SLTs. * Add output_padding attribute to SLT scope. * Introduce SLT for Serializaton. * Introduce new test layer class ConvolutionBackpropLayerTest which contains output_padding attribute and output_shape input. Old one is deprecated, but cannot be removed due to kmb plugin dependency. * Add ConvolutionBackpropDataLayerTest into TEST_P. * ConvolutionBackpropDataLayerTest left as legacy class used by kmb_plugin. * Remove redundant variables. * Switch to new API for gpu SLTs. * Remove legacy API. * Introduce legacy API to match dependency for KMB and ARM plugins. * Create test cases for output_padding attribute. * Fixing smoke_Deconv tests. --- .../single_layer/convolution_backprop.cpp | 58 ++++++ .../convolution_backprop_data.cpp | 168 ++++++++++++++++-- .../convolution_backprop_data.cpp | 58 ++++-- .../convolution_backprop_data.cpp | 136 ++++++++++++-- .../convolution_backprop.hpp | 15 ++ .../convolution_backprop_data.hpp | 1 + .../single_layer/convolution_backprop.hpp | 49 +++++ .../convolution_backprop_data.hpp | 2 + .../src/single_layer/convolution_backprop.cpp | 71 ++++++++ .../convolution_backprop_data.cpp | 4 +- .../include/ngraph_functions/builders.hpp | 17 ++ .../src/convolution_backprop_data.cpp | 45 ++++- 12 files changed, 579 insertions(+), 45 deletions(-) create mode 100644 inference-engine/tests/functional/inference_engine/serialization/single_layer/convolution_backprop.cpp create mode 100644 inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop.hpp create mode 100644 inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop.hpp create mode 100644 inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop.cpp diff --git a/inference-engine/tests/functional/inference_engine/serialization/single_layer/convolution_backprop.cpp b/inference-engine/tests/functional/inference_engine/serialization/single_layer/convolution_backprop.cpp new file mode 100644 index 00000000000..2f9383ad914 --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/serialization/single_layer/convolution_backprop.cpp @@ -0,0 +1,58 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "shared_test_classes/single_layer/convolution_backprop.hpp" + +using namespace LayerTestsDefinitions; + +namespace { + +TEST_P(ConvolutionBackpropLayerTest, Serialize) { + Serialize(); +} + +const std::vector precisions = { + InferenceEngine::Precision::FP64, InferenceEngine::Precision::FP32, + InferenceEngine::Precision::FP16, InferenceEngine::Precision::BF16, + InferenceEngine::Precision::I8, InferenceEngine::Precision::I16, + InferenceEngine::Precision::I32, InferenceEngine::Precision::I64, + InferenceEngine::Precision::U8, InferenceEngine::Precision::U16, + InferenceEngine::Precision::U32, InferenceEngine::Precision::U64, +}; +const std::vector> kernels = {{3, 3}}; +const std::vector> strides = {{1, 1}}; +const std::vector> padBegins = {{0, 0}}; +const std::vector> padEnds = {{0, 0}}; +const std::vector> dilations = {{1, 1}}; +const std::vector> outPadding = {{}, {1, 1}}; +const std::vector numOutChannels = {8, 16}; +const std::vector pad_types = { + ngraph::op::PadType::EXPLICIT, ngraph::op::PadType::VALID, + ngraph::op::PadType::SAME_LOWER, ngraph::op::PadType::SAME_UPPER}; +const auto inputShapes = std::vector({1, 16, 20, 20}); +const std::vector> emptyOutputShape = {{}}; + +const auto convolutionBackpropData2DParams = ::testing::Combine( + ::testing::ValuesIn(kernels), ::testing::ValuesIn(strides), + ::testing::ValuesIn(padBegins), ::testing::ValuesIn(padEnds), + ::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels), + ::testing::ValuesIn(pad_types), ::testing::ValuesIn(outPadding)); + +INSTANTIATE_TEST_CASE_P( + smoke_convolutionBackpropData2D_Serialization, ConvolutionBackpropLayerTest, + ::testing::Combine( + convolutionBackpropData2DParams, + ::testing::ValuesIn(precisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(inputShapes), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +} // namespace diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp index a8b4c01497f..1a5f3885c93 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp @@ -4,7 +4,7 @@ #include -#include "single_layer_tests/convolution_backprop_data.hpp" +#include "single_layer_tests/convolution_backprop.hpp" #include "common_test_utils/test_constants.hpp" using namespace LayerTestsDefinitions; @@ -17,6 +17,8 @@ const std::vector netPrecisions = { }; const std::vector numOutChannels = {1, 5, 16}; +const std::vector> emptyOutputShape = {{}}; +const std::vector> emptyOutputPadding = {{}}; /* ============= 2D ConvolutionBackpropData ============= */ const std::vector> inputShapes2D = {{1, 3, 30, 30}, @@ -35,7 +37,8 @@ const auto conv2DParams_ExplicitPadding = ::testing::Combine( ::testing::ValuesIn(padEnds2D), ::testing::ValuesIn(dilations2D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); const auto conv2DParams_AutoPadValid = ::testing::Combine( ::testing::ValuesIn(kernels2D), @@ -44,10 +47,11 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine( ::testing::Values(std::vector({0, 0})), ::testing::ValuesIn(dilations2D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::VALID) + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(emptyOutputPadding) ); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, ConvolutionBackpropLayerTest, ::testing::Combine( conv2DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions), @@ -56,10 +60,11 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, Convolu ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, ConvolutionBackpropLayerTest, ::testing::Combine( conv2DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions), @@ -68,8 +73,75 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, Convolutio ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> inputShape2D = {{1, 3, 9, 12}}; +const std::vector> outputShapes2D = {{6, 6}, {4, 9}}; + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_OutputShapeDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv2DParams_AutoPadValid, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShape2D), + ::testing::ValuesIn(outputShapes2D), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> outputPadding2D = {{1, 1}, {2, 2}}; +const std::vector> testStrides2D = {{3, 3}}; + +const auto conv2DParams_ExplicitPadding_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels2D), + ::testing::ValuesIn(testStrides2D), + ::testing::ValuesIn(padBegins2D), + ::testing::ValuesIn(padEnds2D), + ::testing::ValuesIn(dilations2D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(outputPadding2D) +); +const auto conv2DParams_AutoPadValid_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels2D), + ::testing::ValuesIn(testStrides2D), + ::testing::Values(std::vector({0, 0})), + ::testing::Values(std::vector({0, 0})), + ::testing::ValuesIn(dilations2D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(outputPadding2D) +); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv2DParams_AutoPadValid_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv2DParams_ExplicitPadding_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); /* ============= 3D ConvolutionBackpropData ============= */ const std::vector> inputShapes3D = {{1, 3, 10, 10, 10}, @@ -88,7 +160,8 @@ const auto conv3DParams_ExplicitPadding = ::testing::Combine( ::testing::ValuesIn(padEnds3D), ::testing::ValuesIn(dilations3D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); const auto conv3DParams_AutoPadValid = ::testing::Combine( ::testing::ValuesIn(kernels3D), @@ -97,10 +170,11 @@ const auto conv3DParams_AutoPadValid = ::testing::Combine( ::testing::Values(std::vector({0, 0, 0})), ::testing::ValuesIn(dilations3D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::VALID) + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(emptyOutputPadding) ); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, ConvolutionBackpropLayerTest, ::testing::Combine( conv3DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions), @@ -109,10 +183,11 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, Convolu ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, ConvolutionBackpropLayerTest, ::testing::Combine( conv3DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions), @@ -121,7 +196,74 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, Convolutio ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> inputShape3D = {{1, 3, 10, 10, 10}}; +const std::vector> outputShapes3D = {{8, 8, 8}, {10, 10, 10}}; + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_OutputShapeDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv3DParams_AutoPadValid, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShape3D), + ::testing::ValuesIn(outputShapes3D), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> outputPadding3D = {{1, 1, 1}, {2, 2, 2}}; +const std::vector> testStrides3D = {{3, 3, 3}}; + +const auto conv3DParams_ExplicitPadding_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels3D), + ::testing::ValuesIn(testStrides3D), + ::testing::ValuesIn(padBegins3D), + ::testing::ValuesIn(padEnds3D), + ::testing::ValuesIn(dilations3D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(outputPadding3D) +); +const auto conv3DParams_AutoPadValid_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels3D), + ::testing::ValuesIn(testStrides3D), + ::testing::Values(std::vector({0, 0, 0})), + ::testing::Values(std::vector({0, 0, 0})), + ::testing::ValuesIn(dilations3D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(outputPadding3D) +); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv3DParams_AutoPadValid_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv3DParams_ExplicitPadding_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); } // namespace diff --git a/inference-engine/tests/functional/plugin/cpu/single_layer_tests/convolution_backprop_data.cpp b/inference-engine/tests/functional/plugin/cpu/single_layer_tests/convolution_backprop_data.cpp index 5fdcbef5747..f1d144f666e 100755 --- a/inference-engine/tests/functional/plugin/cpu/single_layer_tests/convolution_backprop_data.cpp +++ b/inference-engine/tests/functional/plugin/cpu/single_layer_tests/convolution_backprop_data.cpp @@ -8,18 +8,18 @@ #include "shared_test_classes/base/layer_test_utils.hpp" #include "ngraph_functions/utils/ngraph_helpers.hpp" #include "ngraph_functions/builders.hpp" -#include +#include using namespace InferenceEngine; using namespace CPUTestUtils; namespace CPULayerTestsDefinitions { -using LayerTestsDefinitions::convBackpropDataSpecificParams; -using LayerTestsDefinitions::convBackpropDataLayerTestParamsSet; +using LayerTestsDefinitions::convBackpropSpecificParams; +using LayerTestsDefinitions::convBackpropLayerTestParamsSet; typedef std::tuple< - convBackpropDataLayerTestParamsSet, + convBackpropLayerTestParamsSet, CPUSpecificParams, fusingSpecificParams, std::map > deconvLayerCPUTestParamsSet; @@ -28,14 +28,14 @@ class DeconvolutionLayerCPUTest : public testing::WithParamInterface obj) { - convBackpropDataLayerTestParamsSet basicParamsSet; + convBackpropLayerTestParamsSet basicParamsSet; CPUSpecificParams cpuParams; fusingSpecificParams fusingParams; std::map additionalConfig; std::tie(basicParamsSet, cpuParams, fusingParams, additionalConfig) = obj.param; std::ostringstream result; - result << LayerTestsDefinitions::ConvolutionBackpropDataLayerTest::getTestCaseName(testing::TestParamInfo( + result << LayerTestsDefinitions::ConvolutionBackpropLayerTest::getTestCaseName(testing::TestParamInfo( basicParamsSet, 0)); result << CPUTestsBase::getTestCaseName(cpuParams); @@ -52,7 +52,7 @@ public: } protected: void SetUp() override { - convBackpropDataLayerTestParamsSet basicParamsSet; + convBackpropLayerTestParamsSet basicParamsSet; CPUSpecificParams cpuParams; fusingSpecificParams fusingParams; std::map additionalConfig; @@ -63,10 +63,11 @@ protected: std::tie(inFmts, outFmts, priority, selectedType) = cpuParams; std::tie(postOpMgrPtr, fusedOps) = fusingParams; - convBackpropDataSpecificParams convParams; + convBackpropSpecificParams convParams; std::vector inputShape; + std::vector outputShape; auto netPrecision = InferenceEngine::Precision::UNSPECIFIED; - std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetDevice) = basicParamsSet; + std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, outputShape, targetDevice) = basicParamsSet; if (inPrc == Precision::UNSPECIFIED) { selectedType += std::string("_") + Precision(Precision::FP32).name(); @@ -76,16 +77,22 @@ protected: ngraph::op::PadType padType; InferenceEngine::SizeVector kernel, stride, dilation; - std::vector padBegin, padEnd; + std::vector padBegin, padEnd, outPadding; size_t convOutChannels; - std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams; + std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, outPadding) = convParams; auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); auto inputParams = ngraph::builder::makeParams(ngraph::element::f32, { inputShape }); auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(inputParams)); auto deconvolutionNode = ngraph::builder::makeConvolutionBackpropData(paramOuts.front(), ngPrc, kernel, stride, padBegin, - padEnd, dilation, padType, convOutChannels); + padEnd, dilation, padType, convOutChannels, false, outPadding); + + if (!outputShape.empty()) { + auto outShape = ngraph::opset3::Constant::create(ngraph::element::i64, {outputShape.size()}, outputShape); + deconvolutionNode = ngraph::builder::makeConvolutionBackpropData(paramOuts.front(), outShape, ngPrc, kernel, stride, padBegin, + padEnd, dilation, padType, convOutChannels); + } function = makeNgraphFunction(ngPrc, inputParams, deconvolutionNode, "convolutionBackpropData"); } @@ -108,6 +115,8 @@ const std::vector fusingParamsSet{ const std::map cpuEmptyPluginConfig; const std::map cpuBF16PluginConfig = { { PluginConfigParams::KEY_ENFORCE_BF16, PluginConfigParams::YES } }; +const std::vector emptyOutputShape = { {} }; +const std::vector> emptyOutputPadding = { {} }; /* ============= Deconvolution params (planar layout) ============= */ const SizeVector numOutChannels_Planar = { 6 }; @@ -139,7 +148,8 @@ const auto convParams_ExplicitPadding_Planar_2D = ::testing::Combine( ::testing::ValuesIn(padEnds2d), ::testing::ValuesIn(dilations2d), ::testing::ValuesIn(numOutChannels_Planar), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Planar_FP32, DeconvolutionLayerCPUTest, @@ -152,6 +162,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Planar_FP32, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 12, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_2D})), ::testing::ValuesIn(fusingParamsSet), @@ -168,6 +179,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Planar_BF16, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 12, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_2D})), ::testing::ValuesIn(fusingParamsSet), @@ -182,7 +194,8 @@ const auto convParams_ExplicitPadding_Planar_3D = ::testing::Combine( ::testing::ValuesIn(padEnds3d), ::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels_Planar), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Planar_FP32, DeconvolutionLayerCPUTest, @@ -195,6 +208,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Planar_FP32, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 12, 7, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_3D})), ::testing::ValuesIn(fusingParamsSet), @@ -211,6 +225,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Planar_BF16, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 12, 7, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_3D})), ::testing::ValuesIn(fusingParamsSet), @@ -225,7 +240,8 @@ const auto convParams_ExplicitPadding_Blocked_2D = ::testing::Combine( ::testing::ValuesIn(padEnds2d), ::testing::ValuesIn(dilations2d), ::testing::ValuesIn(numOutChannels_Blocked), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Blocked_FP32, DeconvolutionLayerCPUTest, @@ -238,6 +254,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Blocked_FP32, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D})), ::testing::ValuesIn(fusingParamsSet), @@ -254,6 +271,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_Blocked_BF16, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D})), ::testing::ValuesIn(fusingParamsSet), @@ -268,7 +286,8 @@ const auto convParams_ExplicitPadding_Blocked_3D = ::testing::Combine( ::testing::ValuesIn(padEnds3d), ::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels_Blocked), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Blocked_FP32, DeconvolutionLayerCPUTest, @@ -281,6 +300,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Blocked_FP32, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D})), ::testing::ValuesIn(fusingParamsSet), @@ -297,6 +317,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_3D_Blocked_BF16, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D})), ::testing::ValuesIn(fusingParamsSet), @@ -312,7 +333,8 @@ const auto convParams_ExplicitPadding_1x1_2D = ::testing::Combine( ::testing::Values(std::vector({0, 0})), ::testing::Values(SizeVector({1, 1})), ::testing::ValuesIn(numOutChannels_Blocked), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_1x1_FP32, DeconvolutionLayerCPUTest, @@ -325,6 +347,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_1x1_FP32, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1})), ::testing::ValuesIn(fusingParamsSet), @@ -341,6 +364,7 @@ INSTANTIATE_TEST_CASE_P(smoke_Deconv_2D_1x1_BF16, DeconvolutionLayerCPUTest, ::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY), ::testing::Values(std::vector({ 2, 67, 7, 7 })), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1})), ::testing::ValuesIn(fusingParamsSet), diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp index f18346cde9f..f404d932bc8 100644 --- a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/convolution_backprop_data.cpp @@ -4,7 +4,7 @@ #include -#include "single_layer_tests/convolution_backprop_data.hpp" +#include "single_layer_tests/convolution_backprop.hpp" #include "common_test_utils/test_constants.hpp" using namespace LayerTestsDefinitions; @@ -17,6 +17,8 @@ const std::vector netPrecisions = { }; const std::vector numOutChannels = {1, 5, 16}; +const std::vector> emptyOutputShape = {{}}; +const std::vector> emptyOutputPadding = {{}}; /* ============= 2D ConvolutionBackpropData ============= */ const std::vector netPrecisions2D = { @@ -40,7 +42,8 @@ const auto conv2DParams_ExplicitPadding = ::testing::Combine( ::testing::ValuesIn(padEnds2D), ::testing::ValuesIn(dilations2D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); const auto conv2DParams_AutoPadValid = ::testing::Combine( ::testing::ValuesIn(kernels2D), @@ -49,10 +52,11 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine( ::testing::Values(std::vector({0, 0})), ::testing::ValuesIn(dilations2D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::VALID) + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(emptyOutputPadding) ); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, ConvolutionBackpropLayerTest, ::testing::Combine( conv2DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions2D), @@ -61,10 +65,11 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding, Convolu ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_GPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, ConvolutionBackpropLayerTest, ::testing::Combine( conv2DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions2D), @@ -73,8 +78,59 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadValid, Convolutio ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_GPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> outputPadding2D = {{1, 1}, {2, 2}}; +const std::vector> testStrides2D = {{3, 3}}; + +const auto conv2DParams_ExplicitPadding_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels2D), + ::testing::ValuesIn(testStrides2D), + ::testing::ValuesIn(padBegins2D), + ::testing::ValuesIn(padEnds2D), + ::testing::ValuesIn(dilations2D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(outputPadding2D) +); +const auto conv2DParams_AutoPadValid_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels2D), + ::testing::ValuesIn(testStrides2D), + ::testing::Values(std::vector({0, 0})), + ::testing::Values(std::vector({0, 0})), + ::testing::ValuesIn(dilations2D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(outputPadding2D) +); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_ExplicitPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv2DParams_AutoPadValid_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData2D_AutoPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv2DParams_ExplicitPadding_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes2D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); /* ============= 3D ConvolutionBackpropData ============= */ const std::vector netPrecisions3D = { @@ -96,7 +152,8 @@ const auto conv3DParams_ExplicitPadding = ::testing::Combine( ::testing::ValuesIn(padEnds3D), ::testing::ValuesIn(dilations3D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::EXPLICIT) + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(emptyOutputPadding) ); const auto conv3DParams_AutoPadValid = ::testing::Combine( ::testing::ValuesIn(kernels3D), @@ -105,10 +162,11 @@ const auto conv3DParams_AutoPadValid = ::testing::Combine( ::testing::Values(std::vector({0, 0, 0})), ::testing::ValuesIn(dilations3D), ::testing::ValuesIn(numOutChannels), - ::testing::Values(ngraph::op::PadType::VALID) + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(emptyOutputPadding) ); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, ConvolutionBackpropLayerTest, ::testing::Combine( conv3DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions3D), @@ -117,10 +175,11 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding, Convolu ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_GPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); -INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, ConvolutionBackpropDataLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, ConvolutionBackpropLayerTest, ::testing::Combine( conv3DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions3D), @@ -129,7 +188,58 @@ INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadValid, Convolutio ::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(InferenceEngine::Layout::ANY), ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), ::testing::Values(CommonTestUtils::DEVICE_GPU)), - ConvolutionBackpropDataLayerTest::getTestCaseName); + ConvolutionBackpropLayerTest::getTestCaseName); + +const std::vector> outputPadding3D = {{1, 1, 1}, {2, 2, 2}}; +const std::vector> testStrides3D = {{3, 3, 3}}; + +const auto conv3DParams_ExplicitPadding_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels3D), + ::testing::ValuesIn(testStrides3D), + ::testing::ValuesIn(padBegins3D), + ::testing::ValuesIn(padEnds3D), + ::testing::ValuesIn(dilations3D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::ValuesIn(outputPadding3D) +); +const auto conv3DParams_AutoPadValid_output_padding = ::testing::Combine( + ::testing::ValuesIn(kernels3D), + ::testing::ValuesIn(testStrides3D), + ::testing::Values(std::vector({0, 0, 0})), + ::testing::Values(std::vector({0, 0, 0})), + ::testing::ValuesIn(dilations3D), + ::testing::ValuesIn(numOutChannels), + ::testing::Values(ngraph::op::PadType::VALID), + ::testing::ValuesIn(outputPadding3D) +); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_ExplicitPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv3DParams_AutoPadValid_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); + +INSTANTIATE_TEST_CASE_P(smoke_ConvolutionBackpropData3D_AutoPadding_OutputPaddingDefined, ConvolutionBackpropLayerTest, + ::testing::Combine( + conv3DParams_ExplicitPadding_output_padding, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::ValuesIn(inputShapes3D), + ::testing::ValuesIn(emptyOutputShape), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvolutionBackpropLayerTest::getTestCaseName); } // namespace diff --git a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop.hpp b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop.hpp new file mode 100644 index 00000000000..45563d86a34 --- /dev/null +++ b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop.hpp @@ -0,0 +1,15 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "shared_test_classes/single_layer/convolution_backprop.hpp" + +namespace LayerTestsDefinitions { + +TEST_P(ConvolutionBackpropLayerTest, CompareWithRefs) { + Run(); +} + +} diff --git a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop_data.hpp b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop_data.hpp index 67fb0c56efd..3b2947db121 100644 --- a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop_data.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/convolution_backprop_data.hpp @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // +// DEPRECATED, can't be removed currently due to arm and kmb-plugin dependency (#55568) #pragma once #include "shared_test_classes/single_layer/convolution_backprop_data.hpp" diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop.hpp new file mode 100644 index 00000000000..794782396da --- /dev/null +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop.hpp @@ -0,0 +1,49 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include +#include +#include + +#include "shared_test_classes/base/layer_test_utils.hpp" +#include "ngraph_functions/builders.hpp" +#include "ngraph_functions/utils/ngraph_helpers.hpp" + +namespace LayerTestsDefinitions { + +typedef std::tuple< + InferenceEngine::SizeVector, // Kernel size + InferenceEngine::SizeVector, // Strides + std::vector, // Pad begin + std::vector, // Pad end + InferenceEngine::SizeVector, // Dilation + size_t, // Num out channels + ngraph::op::PadType, // Padding type + std::vector // Output padding +> convBackpropSpecificParams; +typedef std::tuple< + convBackpropSpecificParams, + InferenceEngine::Precision, // Net precision + InferenceEngine::Precision, // Input precision + InferenceEngine::Precision, // Output precision + InferenceEngine::Layout, // Input layout + InferenceEngine::Layout, // Output layout + InferenceEngine::SizeVector, // Input shapes + InferenceEngine::SizeVector, // Output shapes + LayerTestsUtils::TargetDevice // Device name +> convBackpropLayerTestParamsSet; + +class ConvolutionBackpropLayerTest : public testing::WithParamInterface, + virtual public LayerTestsUtils::LayerTestsCommon { +public: + static std::string getTestCaseName(testing::TestParamInfo obj); + +protected: + void SetUp() override; +}; + +} // namespace LayerTestsDefinitions diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop_data.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop_data.hpp index ecfd6e4f1f7..9aeb9a1a2be 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop_data.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/convolution_backprop_data.hpp @@ -2,6 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // +// DEPRECATED, can't be removed currently due to arm and kmb-plugin dependency (#55568) + #pragma once #include diff --git a/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop.cpp b/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop.cpp new file mode 100644 index 00000000000..55aae5e0a21 --- /dev/null +++ b/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop.cpp @@ -0,0 +1,71 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "shared_test_classes/single_layer/convolution_backprop.hpp" + +namespace LayerTestsDefinitions { + +std::string ConvolutionBackpropLayerTest::getTestCaseName(testing::TestParamInfo obj) { + convBackpropSpecificParams convBackpropDataParams; + InferenceEngine::Precision netPrecision; + InferenceEngine::Precision inPrc, outPrc; + InferenceEngine::Layout inLayout, outLayout; + InferenceEngine::SizeVector inputShapes; + InferenceEngine::SizeVector outputShapes; + std::string targetDevice; + std::tie(convBackpropDataParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShapes, outputShapes, targetDevice) = obj.param; + ngraph::op::PadType padType; + InferenceEngine::SizeVector kernel, stride, dilation; + std::vector padBegin, padEnd, outPadding; + size_t convOutChannels; + std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, outPadding) = convBackpropDataParams; + + std::ostringstream result; + result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_"; + result << "OS=" << CommonTestUtils::vec2str(outputShapes) << "_"; + result << "K" << CommonTestUtils::vec2str(kernel) << "_"; + result << "S" << CommonTestUtils::vec2str(stride) << "_"; + result << "PB" << CommonTestUtils::vec2str(padBegin) << "_"; + result << "PE" << CommonTestUtils::vec2str(padEnd) << "_"; + result << "D=" << CommonTestUtils::vec2str(dilation) << "_"; + result << "OP=" << CommonTestUtils::vec2str(outPadding) << "_"; + result << "O=" << convOutChannels << "_"; + result << "AP=" << padType << "_"; + result << "netPRC=" << netPrecision.name() << "_"; + result << "inPRC=" << inPrc.name() << "_"; + result << "outPRC=" << outPrc.name() << "_"; + result << "inL=" << inLayout << "_"; + result << "outL=" << outLayout << "_"; + result << "trgDev=" << targetDevice; + return result.str(); +} + +void ConvolutionBackpropLayerTest::SetUp() { + convBackpropSpecificParams convBackpropDataParams; + std::vector inputShape; + std::vector outputShape; + auto netPrecision = InferenceEngine::Precision::UNSPECIFIED; + std::tie(convBackpropDataParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, outputShape, targetDevice) = this->GetParam(); + ngraph::op::PadType padType; + InferenceEngine::SizeVector kernel, stride, dilation; + std::vector padBegin, padEnd, outPadding; + size_t convOutChannels; + std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, outPadding) = convBackpropDataParams; + auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); + auto params = ngraph::builder::makeParams(ngPrc, {inputShape}); + auto paramOuts = ngraph::helpers::convert2OutputVector( + ngraph::helpers::castOps2Nodes(params)); + auto convBackpropData = std::dynamic_pointer_cast( + ngraph::builder::makeConvolutionBackpropData(paramOuts[0], ngPrc, kernel, stride, padBegin, + padEnd, dilation, padType, convOutChannels, false, outPadding)); + if (!outputShape.empty()) { + auto outShape = ngraph::opset3::Constant::create(ngraph::element::i64, {outputShape.size()}, outputShape); + convBackpropData = std::dynamic_pointer_cast( + ngraph::builder::makeConvolutionBackpropData(paramOuts[0], outShape, ngPrc, kernel, stride, padBegin, + padEnd, dilation, padType, convOutChannels)); + } + ngraph::ResultVector results{std::make_shared(convBackpropData)}; + function = std::make_shared(results, params, "convolutionBackpropData"); +} +} // namespace LayerTestsDefinitions diff --git a/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop_data.cpp b/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop_data.cpp index c6730a3aaec..f2656a3c2ab 100644 --- a/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop_data.cpp +++ b/inference-engine/tests/functional/shared_test_classes/src/single_layer/convolution_backprop_data.cpp @@ -2,6 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // +// DEPRECATED, can't be removed currently due to arm and kmb-plugin dependency (#55568) + #include "shared_test_classes/single_layer/convolution_backprop_data.hpp" namespace LayerTestsDefinitions { @@ -54,7 +56,7 @@ void ConvolutionBackpropDataLayerTest::SetUp() { ngraph::helpers::castOps2Nodes(params)); auto convBackpropData = std::dynamic_pointer_cast( ngraph::builder::makeConvolutionBackpropData(paramOuts[0], ngPrc, kernel, stride, padBegin, - padEnd, dilation, padType, convOutChannels)); + padEnd, dilation, padType, convOutChannels)); ngraph::ResultVector results{std::make_shared(convBackpropData)}; function = std::make_shared(results, params, "convolutionBackpropData"); } diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp index 802535430cd..1643f31f761 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp @@ -125,6 +125,7 @@ std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &outputPadding = {}, const std::vector &filterWeights = {}, const std::vector &biasesWeights = {}); @@ -137,6 +138,22 @@ std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &dilations, const op::PadType &autoPad, bool addBiases = false, + const std::vector &outputPadding = {}, + const std::vector &biasesWeights = {}); + +std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in, + const ngraph::Output &outputShape, + const element::Type &type, + const std::vector &filterSize, + const std::vector &strides, + const std::vector &padsBegin, + const std::vector &padsEnd, + const std::vector &dilations, + const op::PadType &autoPad, + size_t numOutChannels, + bool addBiases = false, + const std::vector &outputPadding = {}, + const std::vector &filterWeights = {}, const std::vector &biasesWeights = {}); std::shared_ptr makeCTCGreedyDecoder( diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/convolution_backprop_data.cpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/convolution_backprop_data.cpp index 0edf339ce7f..91d6c0fc085 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/convolution_backprop_data.cpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/convolution_backprop_data.cpp @@ -20,6 +20,7 @@ std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in const op::PadType &autoPad, size_t numOutChannels, bool addBiases, + const std::vector &outputPadding, const std::vector &filterWeights, const std::vector &biasesWeights) { bool randomFilterWeights = filterWeights.empty(); @@ -28,7 +29,7 @@ std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end()); auto filterWeightsNode = makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights); - return makeConvolutionBackpropData(in, filterWeightsNode, type, strides, padsBegin, padsEnd, dilations, autoPad, addBiases, biasesWeights); + return makeConvolutionBackpropData(in, filterWeightsNode, type, strides, padsBegin, padsEnd, dilations, autoPad, addBiases, outputPadding, biasesWeights); } std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in, @@ -40,9 +41,51 @@ std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in const std::vector &dilations, const op::PadType &autoPad, bool addBiases, + const std::vector &outputPadding, const std::vector &biasesWeights) { auto deconv = std::make_shared(in, weights, strides, padsBegin, padsEnd, dilations, autoPad); + if (!outputPadding.empty()) { + deconv = std::make_shared(in, weights, strides, padsBegin, padsEnd, dilations, autoPad, outputPadding); + } + + if (addBiases) { + bool randomBiases = biasesWeights.empty(); + auto biasesWeightsNode = makeConstant(type, {}, biasesWeights, randomBiases); + auto add = std::make_shared(deconv, biasesWeightsNode); + return add; + } else { + return deconv; + } +} + +std::shared_ptr makeConvolutionBackpropData(const ngraph::Output &in, + const ngraph::Output &outputShape, + const element::Type &type, + const std::vector &filterSize, + const std::vector &strides, + const std::vector &padsBegin, + const std::vector &padsEnd, + const std::vector &dilations, + const op::PadType &autoPad, + size_t numOutChannels, + bool addBiases, + const std::vector &outputPadding, + const std::vector &filterWeights, + const std::vector &biasesWeights) { + bool randomFilterWeights = filterWeights.empty(); + auto shape = in.get_shape(); + std::vector filterWeightsShape = {shape[1], numOutChannels}; + filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end()); + auto filterWeightsNode = makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights); + + auto deconv = std::make_shared(in, filterWeightsNode, outputShape, strides, padsBegin, padsEnd, dilations, autoPad); + + if (!outputPadding.empty()) { + deconv = std::make_shared(in, filterWeightsNode, outputShape, strides, padsBegin, + padsEnd, dilations, autoPad, outputPadding); + } + if (addBiases) { bool randomBiases = biasesWeights.empty(); auto biasesWeightsNode = makeConstant(type, {}, biasesWeights, randomBiases);