diff --git a/docs/ops/pooling/MaxPool_1.md b/docs/ops/pooling/MaxPool_1.md index e730b7892ca..6c54d387913 100644 --- a/docs/ops/pooling/MaxPool_1.md +++ b/docs/ops/pooling/MaxPool_1.md @@ -52,15 +52,16 @@ * *floor* * **Type**: string * **Default value**: *floor* + * **Required**: *no* * *auto_pad* * **Description**: *auto_pad* how the padding is calculated. Possible values: - * None (not specified): use explicit padding values. + * *explicit*: use explicit padding values. * *same_upper (same_lower)* the input is padded to match the output size. In case of odd padding value an extra padding is added at the end (at the beginning). * *valid* - do not use padding. * **Type**: string - * **Default value**: None + * **Default value**: *explicit* * **Required**: *no* * **Note**: *pads_begin* and *pads_end* attributes are ignored when *auto_pad* is specified. @@ -70,9 +71,9 @@ **Mathematical Formulation** -\f[ -output_{j} = MAX\{ x_{0}, ... x_{i}\} -\f] + \f[ + output_{j} = max(x_{0}, ..., x_{i}) + \f] **Example** diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/pooling.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/pooling.cpp index 87a0f1f4528..98bb97e8487 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/pooling.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/pooling.cpp @@ -25,12 +25,18 @@ const std::vector netPrecisions = { const std::vector> kernels = {{3, 3}, {3, 5}}; +const std::vector> kernel3D = {{2, 2, 2}}; + const std::vector> strides = {{1, 1}, {1, 2}}; +const std::vector> strides3D = {{1, 1, 1}, + {2, 2, 2}}; const std::vector> padBegins = {{0, 0}, {0, 2}}; +const std::vector> padBegins3D = {{0, 0, 0}}; const std::vector> padEnds = {{0, 0}, {0, 2}}; +const std::vector> padEnds3D = {{0, 0, 0}}; const std::vector roundingTypes = {ngraph::op::RoundingType::CEIL, ngraph::op::RoundingType::FLOOR}; ////* ========== Max Polling ========== */ @@ -46,7 +52,7 @@ const auto maxPool_ExplicitPad_FloorRounding_Params = ::testing::Combine( ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling ); -INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_FloorRpunding, PoolingLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_FloorRounding, PoolingLayerTest, ::testing::Combine( maxPool_ExplicitPad_FloorRounding_Params, ::testing::ValuesIn(netPrecisions), @@ -58,6 +64,126 @@ INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_FloorRpunding, PoolingLayerTes ::testing::Values(CommonTestUtils::DEVICE_CPU)), PoolingLayerTest::getTestCaseName); +/* +========== Same Upper Pad Floor Rounding ========== */ +const auto maxPool_SameUpperPad_FloorRounding_Params = ::testing::Combine( + ::testing::Values(ngraph::helpers::PoolingTypes::MAX), + ::testing::ValuesIn(kernels), + ::testing::ValuesIn(strides), + ::testing::ValuesIn(padBegins), + ::testing::ValuesIn(padEnds), + ::testing::Values(ngraph::op::RoundingType::FLOOR), + ::testing::Values(ngraph::op::PadType::SAME_UPPER), + ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling +); + +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_SameUpperPad_FloorRounding, PoolingLayerTest, + ::testing::Combine( + maxPool_SameUpperPad_FloorRounding_Params, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(std::vector({1, 3, 30, 30})), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + PoolingLayerTest::getTestCaseName); + +/* +========== Same Lower Pad Floor Rounding ========== */ +const auto maxPool_SameLowerPad_FloorRounding_Params = ::testing::Combine( + ::testing::Values(ngraph::helpers::PoolingTypes::MAX), + ::testing::ValuesIn(kernels), + ::testing::ValuesIn(strides), + ::testing::ValuesIn(padBegins), + ::testing::ValuesIn(padEnds), + ::testing::Values(ngraph::op::RoundingType::FLOOR), + ::testing::Values(ngraph::op::PadType::SAME_LOWER), + ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling +); + +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_SameLowerPad_FloorRounding, PoolingLayerTest, + ::testing::Combine( + maxPool_SameUpperPad_FloorRounding_Params, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(std::vector({1, 3, 30, 30})), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + PoolingLayerTest::getTestCaseName); + +/* ========== Explicit Pad Floor Rounding 5D input========== */ +const auto maxPool_ExplicitPad_FloorRounding_5Dinput_Params = ::testing::Combine( + ::testing::Values(ngraph::helpers::PoolingTypes::MAX), + ::testing::ValuesIn(kernel3D), + ::testing::ValuesIn(strides3D), + ::testing::ValuesIn(padBegins3D), + ::testing::ValuesIn(padEnds3D), + ::testing::Values(ngraph::op::RoundingType::FLOOR), + ::testing::Values(ngraph::op::PadType::EXPLICIT), + ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling +); + +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_FloorRounding_5Dinput, PoolingLayerTest, + ::testing::Combine( + maxPool_ExplicitPad_FloorRounding_5Dinput_Params, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(std::vector({32, 32, 2, 2, 2})), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + PoolingLayerTest::getTestCaseName); + +/* ========== Same Upper Pad Floor Rounding 5D input========== */ +const auto maxPool_SameUpperPad_FloorRounding_5Dinput_Params = ::testing::Combine( + ::testing::Values(ngraph::helpers::PoolingTypes::MAX), + ::testing::ValuesIn(kernel3D), + ::testing::ValuesIn(strides3D), + ::testing::ValuesIn(padBegins3D), + ::testing::ValuesIn(padEnds3D), + ::testing::Values(ngraph::op::RoundingType::FLOOR), + ::testing::Values(ngraph::op::PadType::SAME_UPPER), + ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling +); + +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_SameUpperPad_FloorRounding_5Dinput, PoolingLayerTest, + ::testing::Combine( + maxPool_SameUpperPad_FloorRounding_5Dinput_Params, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(std::vector({32, 32, 2, 2, 2})), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + PoolingLayerTest::getTestCaseName); + +/* ========== Same Lower Pad Ceil Rounding 5D input========== */ +const auto maxPool_SameLowerPad_CeilRounding_5Dinput_Params = ::testing::Combine( + ::testing::Values(ngraph::helpers::PoolingTypes::MAX), + ::testing::ValuesIn(kernel3D), + ::testing::ValuesIn(strides3D), + ::testing::ValuesIn(padBegins3D), + ::testing::ValuesIn(padEnds3D), + ::testing::Values(ngraph::op::RoundingType::CEIL), + ::testing::Values(ngraph::op::PadType::SAME_LOWER), + ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling +); + +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_SameLowerPad_CeilRounding_5Dinput, PoolingLayerTest, + ::testing::Combine( + maxPool_SameUpperPad_FloorRounding_5Dinput_Params, + ::testing::ValuesIn(netPrecisions), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Precision::UNSPECIFIED), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(std::vector({32, 32, 2, 2, 2})), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + PoolingLayerTest::getTestCaseName); + /* ========== Explicit Pad Ceil Rounding ========== */ const auto maxPool_ExplicitPad_CeilRounding_Params = ::testing::Combine( ::testing::Values(ngraph::helpers::PoolingTypes::MAX), @@ -70,7 +196,7 @@ const auto maxPool_ExplicitPad_CeilRounding_Params = ::testing::Combine( ::testing::Values(false) // placeholder value - exclude pad not applicable for max pooling ); -INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_CeilRpunding, PoolingLayerTest, +INSTANTIATE_TEST_CASE_P(smoke_MaxPool_ExplicitPad_CeilRounding, PoolingLayerTest, ::testing::Combine( maxPool_ExplicitPad_CeilRounding_Params, ::testing::ValuesIn(netPrecisions), diff --git a/ngraph/core/include/ngraph/op/max_pool.hpp b/ngraph/core/include/ngraph/op/max_pool.hpp index 51fd44ad275..ebb624fd266 100644 --- a/ngraph/core/include/ngraph/op/max_pool.hpp +++ b/ngraph/core/include/ngraph/op/max_pool.hpp @@ -49,24 +49,8 @@ namespace ngraph const Shape& pads_begin, const Shape& pads_end, const Shape& kernel, - op::RoundingType rounding_mode, - const PadType& auto_pad); - - /// \brief Constructs a batched max pooling operation. - /// - /// \param arg The node producing the input data batch tensor. - /// \param strides The strides. - /// \param pads_begin The beginning of padding shape. - /// \param pads_end The end of padding shape. - /// \param kernel The kernel shape. - /// \param rounding_mode Whether to use ceiling or floor rounding type while - /// computing output shape. - MaxPool(const Output& arg, - const Strides& strides, - const Shape& pads_begin, - const Shape& pads_end, - const Shape& kernel, - op::RoundingType rounding_mode); + op::RoundingType rounding_mode = op::RoundingType::FLOOR, + const PadType& auto_pad = op::PadType::EXPLICIT); bool visit_attributes(AttributeVisitor& visitor) override; size_t get_version() const override { return 1; } @@ -108,7 +92,7 @@ namespace ngraph Shape m_pads_begin; Shape m_pads_end; PadType m_auto_pad; - op::RoundingType m_rounding_type{op::RoundingType::FLOOR}; + op::RoundingType m_rounding_type; private: bool update_auto_padding(const PartialShape& in_shape, diff --git a/ngraph/core/include/ngraph/op/util/attr_types.hpp b/ngraph/core/include/ngraph/op/util/attr_types.hpp index 5456247bb83..48117bafe0d 100644 --- a/ngraph/core/include/ngraph/op/util/attr_types.hpp +++ b/ngraph/core/include/ngraph/op/util/attr_types.hpp @@ -66,7 +66,9 @@ namespace ngraph /// Floor(num_dims/2) at the beginning and /// Ceil(num_dims/2) at the end /// VALID - No padding - /// + /// AUTO - Deprecated. User should not use it in the future + /// NOTSET - Deprecated. User should not use it in the future + enum class PadType { EXPLICIT = 0, diff --git a/ngraph/core/src/op/max_pool.cpp b/ngraph/core/src/op/max_pool.cpp index ba7c0b29c5b..c1b34159f9c 100644 --- a/ngraph/core/src/op/max_pool.cpp +++ b/ngraph/core/src/op/max_pool.cpp @@ -68,16 +68,6 @@ op::v1::MaxPool::MaxPool(const Output& arg, constructor_validate_and_infer_types(); } -op::v1::MaxPool::MaxPool(const Output& arg, - const Strides& strides, - const Shape& pads_begin, - const Shape& pads_end, - const Shape& kernel, - op::RoundingType rounding_type) - : v1::MaxPool(arg, strides, pads_begin, pads_end, kernel, rounding_type, op::PadType::EXPLICIT) -{ -} - bool ngraph::op::v1::MaxPool::visit_attributes(AttributeVisitor& visitor) { visitor.on_attribute("strides", m_strides); diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index 38bc076f815..336f9f86f16 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -303,6 +303,7 @@ set(MULTI_TEST_SRC backend/lrn.in.cpp backend/matmul.in.cpp backend/maximum.in.cpp + backend/max_pool.in.cpp backend/minimum.in.cpp backend/multiple_backends.in.cpp backend/multiple_result.in.cpp diff --git a/ngraph/test/backend/max_pool.in.cpp b/ngraph/test/backend/max_pool.in.cpp new file mode 100644 index 00000000000..e310c06b32d --- /dev/null +++ b/ngraph/test/backend/max_pool.in.cpp @@ -0,0 +1,187 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +// clang-format off +#ifdef ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS +#define DEFAULT_FLOAT_TOLERANCE_BITS ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS +#endif + +#ifdef ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS +#define DEFAULT_DOUBLE_TOLERANCE_BITS ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS +#endif +// clang-format on + +#include "gtest/gtest.h" +#include "ngraph/ngraph.hpp" +#include "util/engine/test_engines.hpp" +#include "util/test_case.hpp" +#include "util/test_control.hpp" + +using namespace std; +using namespace ngraph; + +static string s_manifest = "${MANIFEST}"; +using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME}); + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_2d_floor) +{ + Shape in_shape{1, 1, 3, 3}; + Shape out_shape{1, 1, 2, 2}; + const Strides& strides{1, 1}; + const Shape& pads_begin{0, 0}; + const Shape& pads_end{0, 0}; + const Shape& kernel{2, 2}; + const op::RoundingType rounding_type = op::RoundingType::FLOOR; + const op::PadType pad_type = op::PadType::NOTSET; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4, 5, 6, 7, 8, 9}; + std::vector result{5, 6, 8, 9}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_2d_ceil) +{ + Shape in_shape{1, 1, 4, 4}; + Shape out_shape{1, 1, 2, 2}; + const Strides& strides{1, 1}; + const Shape& pads_begin{0, 0}; + const Shape& pads_end{0, 0}; + const Shape& kernel{3, 3}; + const op::RoundingType rounding_type = op::RoundingType::CEIL; + const op::PadType pad_type = op::PadType::NOTSET; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + std::vector result{11, 12, 15, 16}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_2d_pad) +{ + Shape in_shape{1, 1, 2, 2}; + Shape out_shape{1, 1, 3, 3}; + const Strides& strides{1, 1}; + const Shape& pads_begin{1, 1}; + const Shape& pads_end{1, 1}; + const Shape& kernel{2, 2}; + const op::RoundingType rounding_type = op::RoundingType::CEIL; + const op::PadType pad_type = op::PadType::NOTSET; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4}; + std::vector result{1, 2, 2, 3, 4, 4, 3, 4, 4}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_2d_same_upper) +{ + Shape in_shape{1, 1, 3, 3}; + Shape out_shape{1, 1, 3, 3}; + const Strides& strides{1, 1}; + const Shape& pads_begin{0, 0}; + const Shape& pads_end{0, 0}; + const Shape& kernel{2, 2}; + const op::RoundingType rounding_type = op::RoundingType::CEIL; + const op::PadType pad_type = op::PadType::SAME_UPPER; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4, 5, 6, 7, 8, 9}; + std::vector result{5, 6, 6, 8, 9, 9, 8, 9, 9}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_3d) +{ + Shape in_shape{1, 1, 2, 2, 2}; + Shape out_shape{1, 1, 2, 2, 1}; + const Strides& strides{1, 1, 1}; + const Shape& pads_begin{0, 0, 0}; + const Shape& pads_end{0, 0, 0}; + const Shape& kernel{1, 1, 2}; + const op::RoundingType rounding_type = op::RoundingType::CEIL; + const op::PadType pad_type = op::PadType::VALID; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4, 5, 6, 7, 8}; + std::vector result{2, 4, 6, 8}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, max_pool_2d_same_lower) +{ + Shape in_shape{1, 1, 3, 3}; + Shape out_shape{1, 1, 3, 3}; + const Strides& strides{1, 1}; + const Shape& pads_begin{0, 0}; + const Shape& pads_end{0, 0}; + const Shape& kernel{2, 2}; + const op::RoundingType rounding_type = op::RoundingType::CEIL; + const op::PadType pad_type = op::PadType::SAME_LOWER; + + auto A = make_shared(element::f32, in_shape); + auto maxPool = make_shared( + A, strides, pads_begin, pads_end, kernel, rounding_type, pad_type); + auto f = make_shared(maxPool, ParameterVector{A}); + + std::vector a{1, 2, 3, 4, 5, 6, 7, 8, 9}; + std::vector result{1, 2, 3, 4, 5, 6, 7, 8, 9}; + + auto test_case = test::TestCase(f); + test_case.add_input({a}); + test_case.add_expected_output(out_shape, result); + test_case.run(); +} diff --git a/ngraph/test/type_prop/max_pool.cpp b/ngraph/test/type_prop/max_pool.cpp index e274e733f1a..fb9c59403f3 100644 --- a/ngraph/test/type_prop/max_pool.cpp +++ b/ngraph/test/type_prop/max_pool.cpp @@ -99,3 +99,18 @@ TEST(type_prop, max_pool_auto_padding_spatial_dims_dynamic) ASSERT_EQ(mp->get_pads_begin(), (Shape{})); ASSERT_EQ(mp->get_pads_end(), (Shape{})); } + +TEST(type_prop, max_pool_default_values) +{ + const PartialShape arg_shape{1, 3, 32, 32}; + const Strides strides{1, 1}; + const Shape pads_begin{0, 0}; + const Shape pads_end{0, 0}; + const Shape kernel_shape{2, 2}; + + auto arg = make_shared(element::f32, arg_shape); + auto mp = make_shared(arg, strides, pads_begin, pads_end, kernel_shape); + + ASSERT_EQ(mp->get_rounding_type(), op::RoundingType::FLOOR); + ASSERT_EQ(mp->get_auto_pad(), op::PadType::EXPLICIT); +}