Add output_padding and output_shape into GroupConvolutionBackprop SLTs. (#6302)

* Add output_padding and output_shape into GroupConvolutionBackprop SLTs.

* Add legacy API for arm-plugin dependency.

* Replaced templated input, output and filter with one template.

* Apply correct format.

* Change to use INSTANTIATE_TEST_SUITE_P macro.
This commit is contained in:
Szymon Durawa 2021-06-29 14:34:29 +02:00 committed by GitHub
parent f6a0195e18
commit 055a70bdf9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 458 additions and 66 deletions

View File

@ -10,7 +10,7 @@ using namespace LayerTestsDefinitions;
namespace {
TEST_P(GroupConvBackpropDataLayerTest, Serialize) {
TEST_P(GroupConvBackpropLayerTest, Serialize) {
Serialize();
}
@ -26,6 +26,7 @@ const std::vector<std::vector<size_t>> kernels = {{3, 3}};
const std::vector<std::vector<size_t>> strides = {{1, 1}};
const std::vector<std::vector<ptrdiff_t>> padBegins = {{0, 0}};
const std::vector<std::vector<ptrdiff_t>> padEnds = {{0, 0}};
const std::vector<std::vector<ptrdiff_t>> outputPadding = {{}, {1, 1}};
const std::vector<std::vector<size_t>> dilations = {{1, 1}};
const std::vector<size_t> numOutChannels = {8, 16};
const std::vector<size_t> numGroups = {2, 8};
@ -33,15 +34,17 @@ const std::vector<ngraph::op::PadType> pad_types = {
ngraph::op::PadType::EXPLICIT, ngraph::op::PadType::VALID,
ngraph::op::PadType::SAME_LOWER, ngraph::op::PadType::SAME_UPPER};
const auto inputShapes = std::vector<size_t>({1, 16, 30, 30});
const std::vector<std::vector<size_t >> emptyOutputShape = {{}};
const auto groupConvBackpropData2DParams = ::testing::Combine(
::testing::ValuesIn(kernels), ::testing::ValuesIn(strides),
::testing::ValuesIn(padBegins), ::testing::ValuesIn(padEnds),
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups), ::testing::ValuesIn(pad_types));
::testing::ValuesIn(numGroups), ::testing::ValuesIn(pad_types),
::testing::ValuesIn(outputPadding));
INSTANTIATE_TEST_SUITE_P(
smoke_GroupConvBackpropData2D_Serialization, GroupConvBackpropDataLayerTest,
smoke_GroupConvBackpropData2D_Serialization, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams,
::testing::ValuesIn(precisions),
@ -50,7 +53,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(inputShapes),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
} // namespace

View File

@ -18,6 +18,8 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
const std::vector<size_t> numOutChannels = {16, 32};
const std::vector<size_t> numGroups = {2, 8, 16};
const std::vector<std::vector<size_t >> emptyOutputShape = {{}};
const std::vector<std::vector<ptrdiff_t >> emptyOutputPadding = {{}};
/* ============= 1D GroupConvolution ============= */
const std::vector<std::vector<size_t >> inputShapes1D = {{1, 16, 32}};
@ -36,7 +38,8 @@ const auto groupConvBackpropData1DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(dilations1D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
const auto groupConvBackpropData1DParams_AutoPadValid = ::testing::Combine(
@ -47,10 +50,11 @@ const auto groupConvBackpropData1DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(dilations1D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID)
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData1D_ExplicitPadding, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData1D_ExplicitPadding, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData1DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -59,10 +63,11 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData1D_ExplicitPadding, GroupCon
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes1D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData1D_AutoPadValid, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData1D_AutoPadValid, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData1DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
@ -71,8 +76,9 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData1D_AutoPadValid, GroupConvBa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes1D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
/* ============= 2D GroupConvolution ============= */
const std::vector<std::vector<size_t >> inputShapes2D = {{1, 16, 10, 10},
@ -91,7 +97,8 @@ const auto groupConvBackpropData2DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
const auto groupConvBackpropData2DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels2D),
@ -101,10 +108,11 @@ const auto groupConvBackpropData2DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID)
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -113,10 +121,11 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupCon
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
@ -125,8 +134,77 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
const std::vector<std::vector<size_t >> inputShape2D = {{1, 16, 9, 12}};
const std::vector<std::vector<size_t >> outputShapes2D = {{6, 6}, {4, 9}};
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_OutputShapeDefined, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShape2D),
::testing::ValuesIn(outputShapes2D),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
const std::vector<std::vector<ptrdiff_t>> outputPadding2D = {{1, 1}, {2, 2}};
const std::vector<std::vector<size_t >> testStrides2D = {{3, 3}};
const auto conv2DParams_ExplicitPadding_output_padding = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(testStrides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(outputPadding2D)
);
const auto conv2DParams_AutoPadValid_output_padding = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(testStrides2D),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(outputPadding2D)
);
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_ExplicitPadding_OutputPaddingDefined, GroupConvBackpropLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid_output_padding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_AutoPadding_OutputPaddingDefined, GroupConvBackpropLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding_output_padding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
/* ============= 3D GroupConvolution ============= */
const std::vector<std::vector<size_t >> inputShapes3D = {{1, 16, 5, 5, 5},
@ -145,7 +223,8 @@ const auto groupConvBackpropData3DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
const auto groupConvBackpropData3DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels3D),
@ -155,10 +234,11 @@ const auto groupConvBackpropData3DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID)
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -167,10 +247,11 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupCon
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
@ -179,7 +260,76 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
const std::vector<std::vector<size_t >> inputShape3D = {{1, 16, 10, 10, 10}};
const std::vector<std::vector<size_t >> outputShapes3D = {{8, 8, 8}, {10, 10, 10}};
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_OutputShapeDefined, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShape3D),
::testing::ValuesIn(outputShapes3D),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
const std::vector<std::vector<ptrdiff_t>> outputPadding3D = {{1, 1, 1}, {2, 2, 2}};
const std::vector<std::vector<size_t >> testStrides3D = {{3, 3, 3}};
const auto conv3DParams_ExplicitPadding_output_padding = ::testing::Combine(
::testing::ValuesIn(kernels3D),
::testing::ValuesIn(testStrides3D),
::testing::ValuesIn(padBegins3D),
::testing::ValuesIn(padEnds3D),
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(outputPadding3D)
);
const auto conv3DParams_AutoPadValid_output_padding = ::testing::Combine(
::testing::ValuesIn(kernels3D),
::testing::ValuesIn(testStrides3D),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(outputPadding3D)
);
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_ExplicitPadding_OutputPaddingDefined, GroupConvBackpropLayerTest,
::testing::Combine(
conv3DParams_AutoPadValid_output_padding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_AutoPadding_OutputPaddingDefined, GroupConvBackpropLayerTest,
::testing::Combine(
conv3DParams_ExplicitPadding_output_padding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
GroupConvBackpropLayerTest::getTestCaseName);
} // namespace

View File

@ -12,8 +12,8 @@ using namespace CPUTestUtils;
namespace CPULayerTestsDefinitions {
using groupConvBackpropDataLayerTestParamsSet = LayerTestsDefinitions::groupConvBackpropDataLayerTestParamsSet;
using groupConvBackpropDataSpecificParams = LayerTestsDefinitions::groupConvBackpropDataSpecificParams;
using groupConvBackpropDataLayerTestParamsSet = LayerTestsDefinitions::groupConvBackpropLayerTestParamsSet;
using groupConvBackpropDataSpecificParams = LayerTestsDefinitions::groupConvBackpropSpecificParams;
typedef std::tuple<
groupConvBackpropDataLayerTestParamsSet,
@ -32,7 +32,7 @@ public:
std::tie(basicParamsSet, cpuParams, fusingParams, additionalConfig) = obj.param;
std::ostringstream result;
result << LayerTestsDefinitions::GroupConvBackpropDataLayerTest::getTestCaseName(testing::TestParamInfo<groupConvBackpropDataLayerTestParamsSet>(
result << LayerTestsDefinitions::GroupConvBackpropLayerTest::getTestCaseName(testing::TestParamInfo<groupConvBackpropDataLayerTestParamsSet>(
basicParamsSet, 0));
result << CPUTestsBase::getTestCaseName(cpuParams);
@ -62,9 +62,9 @@ protected:
std::tie(postOpMgrPtr, fusedOps) = fusingParams;
groupConvBackpropDataSpecificParams groupConvParams;
std::vector<size_t> inputShape;
std::vector<size_t> inputShape, outputShape;
auto netPrecision = InferenceEngine::Precision::UNSPECIFIED;
std::tie(groupConvParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetDevice) = basicParamsSet;
std::tie(groupConvParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, outputShape, targetDevice) = basicParamsSet;
if (inPrc == Precision::UNSPECIFIED) {
selectedType += std::string("_") + Precision(Precision::FP32).name();
@ -74,17 +74,25 @@ protected:
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd;
std::vector<ptrdiff_t> padBegin, padEnd, outputPadding;
size_t convOutChannels, numGroups;
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType) = groupConvParams;
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType, outputPadding) = groupConvParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto groupConv = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[0], ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups));
std::shared_ptr<ngraph::op::v1::GroupConvolutionBackpropData> groupConv;
if (!outputShape.empty()) {
auto outShape = ngraph::opset3::Constant::create(ngraph::element::i64, {outputShape.size()}, outputShape);
groupConv = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[0], outShape, ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups, false, outputPadding));
} else {
groupConv = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[0], ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups, false, outputPadding));
}
function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolutionBackpropData");
}
};
@ -128,6 +136,9 @@ std::vector<fusingSpecificParams> fusingParamsSet {
const std::map<std::string, std::string> cpuEmptyPluginConfig;
const std::map<std::string, std::string> cpuBF16PluginConfig = { { PluginConfigParams::KEY_ENFORCE_BF16, PluginConfigParams::YES } };
const std::vector<std::vector<size_t >> emptyOutputShape = {{}};
const std::vector<std::vector<ptrdiff_t>> emptyOutputPadding = {{}};
/* ============= GroupConvolution params (planar layout) ============= */
const SizeVector numOutChannels_Planar = {6};
const SizeVector numGroups_Planar = {2, 3};
@ -166,7 +177,8 @@ const auto groupConvParams_ExplicitPadding_Planar_2D = ::testing::Combine(
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels_Planar),
::testing::ValuesIn(numGroups_Planar),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Planar_FP32, GroupDeconvolutionLayerCPUTest,
@ -179,6 +191,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Planar_FP32, GroupDeconvolutionLay
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_2D})),
::testing::ValuesIn(fusingParamsSet),
@ -195,6 +208,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Planar_BF16, GroupDeconvolutionLay
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_2D})),
::testing::ValuesIn(fusingParamsSet),
@ -210,7 +224,8 @@ const auto groupConvParams_ExplicitPadding_Planar_3D = ::testing::Combine(
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels_Planar),
::testing::ValuesIn(numGroups_Planar),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Planar_FP32, GroupDeconvolutionLayerCPUTest,
@ -223,6 +238,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Planar_FP32, GroupDeconvolutionLay
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_3D})),
::testing::ValuesIn(fusingParamsSet),
@ -239,6 +255,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Planar_BF16, GroupDeconvolutionLay
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_gemm_3D})),
::testing::ValuesIn(fusingParamsSet),
@ -254,7 +271,8 @@ const auto groupConvParams_ExplicitPadding_Blocked_2D = ::testing::Combine(
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels_Blocked),
::testing::ValuesIn(numGroups_Blocked),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Blocked_FP32, GroupDeconvolutionLayerCPUTest,
@ -267,6 +285,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Blocked_FP32, GroupDeconvolutionLa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D})),
::testing::ValuesIn(fusingParamsSet),
@ -283,6 +302,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_Blocked_BF16, GroupDeconvolutionLa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D})),
::testing::ValuesIn(fusingParamsSet),
@ -298,7 +318,8 @@ const auto groupConvParams_ExplicitPadding_Blocked_3D = ::testing::Combine(
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels_Blocked),
::testing::ValuesIn(numGroups_Blocked),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Blocked_FP32, GroupDeconvolutionLayerCPUTest,
@ -311,6 +332,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Blocked_FP32, GroupDeconvolutionLa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D})),
::testing::ValuesIn(fusingParamsSet),
@ -327,6 +349,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_3D_Blocked_BF16, GroupDeconvolutionLa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D})),
::testing::ValuesIn(fusingParamsSet),
@ -342,7 +365,8 @@ const auto groupConvParams_ExplicitPadding_DW_2D = ::testing::Combine(
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels_DW),
::testing::ValuesIn(numGroups_DW),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_DW_FP32, GroupDeconvolutionLayerCPUTest,
@ -355,6 +379,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_DW_FP32, GroupDeconvolutionLayerCP
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 32, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_dw_2D})),
::testing::ValuesIn(fusingParamsSet),
@ -371,6 +396,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupDeconv_2D_DW_BF16, GroupDeconvolutionLayerCP
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 32, 7, 7 })),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_dw_2D})),
::testing::ValuesIn(fusingParamsSet),

View File

@ -15,6 +15,9 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32
};
const std::vector<std::vector<size_t >> emptyOutputShape = {{}};
const std::vector<std::vector<ptrdiff_t>> emptyOutputPadding = {{}};
const std::vector<size_t> numOutChannels = {16, 32};
const std::vector<size_t> numGroups = {2, 8, 16};
@ -35,7 +38,8 @@ const auto groupConvBackpropData2DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
const auto groupConvBackpropData2DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels2D),
@ -45,10 +49,11 @@ const auto groupConvBackpropData2DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID)
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -57,10 +62,11 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_ExplicitPadding, GroupCon
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
@ -69,8 +75,9 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData2D_AutoPadValid, GroupConvBa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes2D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
/* ============= 3D GroupConvolution ============= */
const std::vector<std::vector<size_t >> inputShapes3D = {{1, 16, 5, 5, 5},
@ -89,7 +96,8 @@ const auto groupConvBackpropData3DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::Values(ngraph::op::PadType::EXPLICIT),
::testing::ValuesIn(emptyOutputPadding)
);
const auto groupConvBackpropData3DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels3D),
@ -99,10 +107,11 @@ const auto groupConvBackpropData3DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(dilations3D),
::testing::ValuesIn(numOutChannels),
::testing::ValuesIn(numGroups),
::testing::Values(ngraph::op::PadType::VALID)
::testing::Values(ngraph::op::PadType::VALID),
::testing::ValuesIn(emptyOutputPadding)
);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -111,10 +120,11 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_ExplicitPadding, GroupCon
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBackpropDataLayerTest,
INSTANTIATE_TEST_CASE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBackpropLayerTest,
::testing::Combine(
groupConvBackpropData3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
@ -123,7 +133,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvBackpropData3D_AutoPadValid, GroupConvBa
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapes3D),
::testing::ValuesIn(emptyOutputShape),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
GroupConvBackpropDataLayerTest::getTestCaseName);
GroupConvBackpropLayerTest::getTestCaseName);
} // namespace

View File

@ -8,8 +8,13 @@
namespace LayerTestsDefinitions {
// DEPRECATED, remove this old API when KMB (#58495) and ARM (#58496) plugins are migrated to new API
TEST_P(GroupConvBackpropDataLayerTest, CompareWithRefs) {
Run();
}
} // namespace LayerTestsDefinitions
TEST_P(GroupConvBackpropLayerTest, CompareWithRefs) {
Run();
}
} // namespace LayerTestsDefinitions

View File

@ -15,6 +15,7 @@
namespace LayerTestsDefinitions {
// DEPRECATED, remove this old API when KMB (#58495) and ARM (#58496) plugins are migrated to new API
using groupConvBackpropDataSpecificParams = std::tuple<
InferenceEngine::SizeVector, // kernels
InferenceEngine::SizeVector, // strides
@ -43,4 +44,34 @@ protected:
void SetUp() override;
};
} // namespace LayerTestsDefinitions
using groupConvBackpropSpecificParams = std::tuple<
InferenceEngine::SizeVector, // kernels
InferenceEngine::SizeVector, // strides
std::vector<ptrdiff_t>, // pad begins
std::vector<ptrdiff_t>, // pad ends
InferenceEngine::SizeVector, // dilations
size_t, // num output channels
size_t, // num groups
ngraph::op::PadType, // padding type
std::vector<ptrdiff_t>>; // output padding
using groupConvBackpropLayerTestParamsSet = std::tuple<
groupConvBackpropSpecificParams,
InferenceEngine::Precision, // Network precision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
InferenceEngine::SizeVector, // Input shape
InferenceEngine::SizeVector, // Output shapes
LayerTestsUtils::TargetDevice>; // Device name
class GroupConvBackpropLayerTest : public testing::WithParamInterface<groupConvBackpropLayerTestParamsSet>,
virtual public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(testing::TestParamInfo<groupConvBackpropLayerTestParamsSet> obj);
protected:
void SetUp() override;
};
} // namespace LayerTestsDefinitions

View File

@ -6,6 +6,8 @@
namespace LayerTestsDefinitions {
// DEPRECATED, remove this old API when KMB (#58495) and ARM (#58496) plugins are migrated to new API
std::string GroupConvBackpropDataLayerTest::getTestCaseName(testing::TestParamInfo<groupConvBackpropDataLayerTestParamsSet> obj) {
groupConvBackpropDataSpecificParams groupConvBackpropDataParams;
InferenceEngine::Precision netPrecision;
@ -59,4 +61,68 @@ void GroupConvBackpropDataLayerTest::SetUp() {
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(groupConvBackpropData)};
function = std::make_shared<ngraph::Function>(results, params, "GroupConvolutionBackpropData");
}
std::string GroupConvBackpropLayerTest::getTestCaseName(testing::TestParamInfo<groupConvBackpropLayerTestParamsSet> obj) {
groupConvBackpropSpecificParams groupConvBackpropDataParams;
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
InferenceEngine::SizeVector inputShapes, outputShapes;
std::string targetDevice;
std::tie(groupConvBackpropDataParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShapes, outputShapes, targetDevice) = obj.param;
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd, outPadding;
size_t convOutChannels, numGroups;
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType, outPadding) = groupConvBackpropDataParams;
std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
result << "OS=" << CommonTestUtils::vec2str(outputShapes) << "_";
result << "K" << CommonTestUtils::vec2str(kernel) << "_";
result << "S" << CommonTestUtils::vec2str(stride) << "_";
result << "PB" << CommonTestUtils::vec2str(padBegin) << "_";
result << "PE" << CommonTestUtils::vec2str(padEnd) << "_";
result << "D=" << CommonTestUtils::vec2str(dilation) << "_";
result << "OP=" << CommonTestUtils::vec2str(outPadding) << "_";
result << "O=" << convOutChannels << "_";
result << "G=" << numGroups << "_";
result << "AP=" << padType << "_";
result << "netPRC=" << netPrecision.name() << "_";
result << "inPRC=" << inPrc.name() << "_";
result << "outPRC=" << outPrc.name() << "_";
result << "inL=" << inLayout << "_";
result << "outL=" << outLayout << "_";
result << "trgDev=" << targetDevice;
return result.str();
}
void GroupConvBackpropLayerTest::SetUp() {
groupConvBackpropSpecificParams groupConvBackpropDataParams;
std::vector<size_t> inputShape, outputShape;
auto netPrecision = InferenceEngine::Precision::UNSPECIFIED;
std::tie(groupConvBackpropDataParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, outputShape, targetDevice) = this->GetParam();
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd, outPadding;
size_t convOutChannels, numGroups;
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType, outPadding) = groupConvBackpropDataParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::shared_ptr<ngraph::op::v1::GroupConvolutionBackpropData> groupConvBackpropData;
if (!outputShape.empty()) {
auto outShape = ngraph::opset3::Constant::create(ngraph::element::i64, {outputShape.size()}, outputShape);
groupConvBackpropData = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[0], outShape, ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups, false, outPadding));
} else {
groupConvBackpropData = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[0], ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups, false, outPadding));
}
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(groupConvBackpropData)};
function = std::make_shared<ngraph::Function>(results, params, "GroupConvolutionBackpropData");
}
} // namespace LayerTestsDefinitions

View File

@ -196,6 +196,7 @@ std::shared_ptr<ngraph::Node> makeGroupConvolutionBackpropData(const ngraph::Out
size_t numOutChannels,
size_t numGroups,
bool addBiases = false,
const std::vector<ptrdiff_t> &outputPadding = {},
const std::vector<float> &filterWeights = {},
const std::vector<float> &biasesWeights = {});
@ -208,6 +209,23 @@ std::shared_ptr<ngraph::Node> makeGroupConvolutionBackpropData(const ngraph::Out
const std::vector<size_t> &dilations,
const op::PadType &autoPad,
bool addBiases = false,
const std::vector<ptrdiff_t> &outputPadding = {},
const std::vector<float> &biasesWeights = {});
std::shared_ptr<ngraph::Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node> &in,
const ngraph::Output<Node> &outputShape,
const element::Type &type,
const std::vector<size_t> &filterSize,
const std::vector<size_t> &strides,
const std::vector<ptrdiff_t> &padsBegin,
const std::vector<ptrdiff_t> &padsEnd,
const std::vector<size_t> &dilations,
const op::PadType &autoPad,
size_t numOutChannels,
size_t numGroups,
bool addBiases = false,
const std::vector<ptrdiff_t> &outputPadding = {},
const std::vector<float> &filterWeights = {},
const std::vector<float> &biasesWeights = {});
std::shared_ptr<ngraph::Node> makeBinaryConvolution(const ngraph::Output<Node> &in,

View File

@ -21,6 +21,7 @@ std::shared_ptr<Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node
size_t numOutChannels,
size_t numGroups,
bool addBiases,
const std::vector<ptrdiff_t> &outputPadding,
const std::vector<float> &filterWeights,
const std::vector<float> &biasesWeights) {
bool randomFilterWeights = filterWeights.empty();
@ -34,7 +35,8 @@ std::shared_ptr<Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node
filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end());
auto filterWeightsNode = makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights);
return makeGroupConvolutionBackpropData(in, filterWeightsNode, type, strides, padsBegin, padsEnd, dilations, autoPad, addBiases, biasesWeights);
return makeGroupConvolutionBackpropData(in, filterWeightsNode, type, strides, padsBegin, padsEnd, dilations, autoPad, addBiases,
outputPadding, biasesWeights);
}
std::shared_ptr<Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node> &in,
@ -46,8 +48,56 @@ std::shared_ptr<Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node
const std::vector<size_t> &dilations,
const op::PadType &autoPad,
bool addBiases,
const std::vector<ptrdiff_t> &outputPadding,
const std::vector<float> &biasesWeights) {
auto deconv = std::make_shared<opset1::GroupConvolutionBackpropData>(in, weights, strides, padsBegin, padsEnd, dilations, autoPad);
if (!outputPadding.empty()) {
deconv = std::make_shared<opset1::GroupConvolutionBackpropData>(in, weights, strides, padsBegin, padsEnd, dilations, autoPad, outputPadding);
}
if (addBiases) {
bool randomBiases = biasesWeights.empty();
auto biasesWeightsNode = makeConstant(type, {}, biasesWeights, randomBiases);
auto add = std::make_shared<ngraph::opset1::Add>(deconv, biasesWeightsNode);
return add;
} else {
return deconv;
}
}
std::shared_ptr<Node> makeGroupConvolutionBackpropData(const ngraph::Output<Node> &in,
const ngraph::Output<Node> &outputShape,
const element::Type &type,
const std::vector<size_t> &filterSize,
const std::vector<size_t> &strides,
const std::vector<ptrdiff_t> &padsBegin,
const std::vector<ptrdiff_t> &padsEnd,
const std::vector<size_t> &dilations,
const op::PadType &autoPad,
size_t numOutChannels,
size_t numGroups,
bool addBiases,
const std::vector<ptrdiff_t> &outputPadding,
const std::vector<float> &filterWeights,
const std::vector<float> &biasesWeights) {
bool randomFilterWeights = filterWeights.empty();
auto shape = in.get_shape();
std::vector<size_t> filterWeightsShape = {shape[1], numOutChannels};
if (filterWeightsShape[0] % numGroups || filterWeightsShape[1] % numGroups)
throw std::runtime_error("incorrect shape for GroupConvolutionBackpropData");
filterWeightsShape[0] /= numGroups;
filterWeightsShape[1] /= numGroups;
filterWeightsShape.insert(filterWeightsShape.begin(), numGroups);
filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end());
auto filterWeightsNode = makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights);
auto deconv = std::make_shared<opset1::GroupConvolutionBackpropData>(in, filterWeightsNode, outputShape, strides, padsBegin, padsEnd, dilations, autoPad);
if (!outputPadding.empty()) {
deconv = std::make_shared<opset1::GroupConvolutionBackpropData>(in, filterWeightsNode, outputShape, strides, padsBegin,
padsEnd, dilations, autoPad, outputPadding);
}
if (addBiases) {
bool randomBiases = biasesWeights.empty();
auto biasesWeightsNode = makeConstant(type, {}, biasesWeights, randomBiases);

View File

@ -134,25 +134,23 @@ namespace ngraph
pads_end);
}
template <typename INPUT,
typename FILTER,
typename OUTPUT,
typename ACCU = typename widen<OUTPUT>::type>
void group_convolution_backprop_data(const INPUT* in,
const FILTER* f,
OUTPUT* out,
template <typename T>
void group_convolution_backprop_data(const T* in,
const T* f,
T* out,
const Shape& in_shape,
const Shape& filter_shape,
const Shape& out_shape,
const Strides& strides,
const Strides& dilation,
const CoordinateDiff& pads_begin,
const CoordinateDiff& pads_end)
const CoordinateDiff& pads_end,
const CoordinateDiff& output_padding)
{
const size_t group_count = filter_shape[filter_group_axis];
const INPUT* group_batch = in;
const T* group_batch = in;
const Shape group_batch_shape = [&]() {
Shape new_shape{in_shape};
new_shape[in_batch_axis] = 1;
@ -161,14 +159,14 @@ namespace ngraph
}();
const size_t group_batch_size = shape_size(group_batch_shape);
const FILTER* group_filter = f;
const T* group_filter = f;
const Shape group_filter_shape = [&]() {
Shape new_shape{++filter_shape.begin(), filter_shape.end()};
return new_shape;
}();
const size_t group_filter_size = shape_size(group_filter_shape);
OUTPUT* group_out = out;
T* group_out = out;
const Shape group_out_shape = [&]() {
Shape new_shape{out_shape};
new_shape[out_batch_axis] = 1;
@ -178,7 +176,6 @@ namespace ngraph
const size_t group_out_size = shape_size(group_out_shape);
Strides in_dilation(in_shape.size(), 1);
const ngraph::CoordinateDiff output_padding(in_shape.size() - 2, 0);
for (size_t batch_idx = 0; batch_idx < in_shape[in_batch_axis]; ++batch_idx)
{
group_filter = f;
@ -202,6 +199,40 @@ namespace ngraph
}
}
}
// DEPRECATED, can't be removed currently due to arm-plugin dependency
template <typename OUTPUT,
typename FILTER,
typename INPUT,
typename ACCUMULATION = typename widen<INPUT>::type>
NGRAPH_DEPRECATED(
"group_convolution_backprop_data function without output_paddings is deprecated, "
"use the one with output_padding.")
void group_convolution_backprop_data(const INPUT* in,
const FILTER* f,
OUTPUT* out,
const Shape& in_shape,
const Shape& filter_shape,
const Shape& out_shape,
const Strides& strides,
const Strides& dilation,
const CoordinateDiff& pads_begin,
const CoordinateDiff& pads_end)
{
const ngraph::CoordinateDiff output_padding(in_shape.size() - 2, 0);
group_convolution_backprop_data(in,
f,
out,
in_shape,
filter_shape,
out_shape,
strides,
dilation,
pads_begin,
pads_end,
output_padding);
}
} // namespace reference
} // namespace runtime

View File

@ -334,7 +334,8 @@ namespace
op->get_strides(),
op->get_dilations(),
op->get_pads_begin(),
op->get_pads_end());
op->get_pads_end(),
op->get_output_padding());
return true;
}