From deeb0577f2a0dea3fada3c599f262d88d9b571a1 Mon Sep 17 00:00:00 2001 From: Gabriele Galiero Casay Date: Mon, 31 Aug 2020 18:26:36 +0200 Subject: [PATCH] Dynamic shape support for builder collapse (#1971) --- .../core/include/ngraph/builder/reshape.hpp | 15 +++ ngraph/core/src/builder/matmul_factory.cpp | 17 +--- ngraph/core/src/builder/reshape.cpp | 50 ++++++++++ .../backend/builder_reduce_ops_opset1.in.cpp | 91 +++++++++++++++++++ ngraph/test/runtime/ie/unit_test.manifest | 7 ++ 5 files changed, 165 insertions(+), 15 deletions(-) diff --git a/ngraph/core/include/ngraph/builder/reshape.hpp b/ngraph/core/include/ngraph/builder/reshape.hpp index 1c974feb4ca..5af06e7e5d2 100644 --- a/ngraph/core/include/ngraph/builder/reshape.hpp +++ b/ngraph/core/include/ngraph/builder/reshape.hpp @@ -86,6 +86,21 @@ namespace ngraph NGRAPH_API std::shared_ptr squeeze(const Output& value, std::vector axes = {0}); + + /// \brief Collapse specified axes into single one. + /// + /// \note Collapsed axes create a continuous range starting from outermost axis. + /// + /// \param[in] value The value to be reshaped. + /// \param[in] start_axis The start axis index. + /// \param[in] end_axis The end axis (inclusive) index. + /// + /// \return The node with collapsed specified axes. + /// + NGRAPH_API + std::shared_ptr collapse(const Output& value, + const std::size_t start_axis, + const std::size_t end_axis); } } // namespace builder } // namespace ngraph diff --git a/ngraph/core/src/builder/matmul_factory.cpp b/ngraph/core/src/builder/matmul_factory.cpp index effe790c201..a34f34c7c33 100644 --- a/ngraph/core/src/builder/matmul_factory.cpp +++ b/ngraph/core/src/builder/matmul_factory.cpp @@ -73,19 +73,6 @@ Output builder::MatmulFactory::get_right() OutputVector builder::MatmulFactory::make_matmul_op() { - auto collapse = [](const Output& value, const size_t start_axis, const size_t end_axis) { - auto shape = value.get_shape(); - size_t collapsed_axis_size = accumulate(next(begin(shape), start_axis), - next(begin(shape), end_axis + 1), - 1UL, - multiplies()); - - Shape output_shape{collapsed_axis_size}; - output_shape.insert(end(output_shape), next(begin(shape), end_axis + 1), end(shape)); - return make_shared( - value, get_default_order(value.get_shape().size()), output_shape) - ->add_provenance_group_members_above({value}); - }; auto left = get_left(); auto right = get_right(); @@ -120,11 +107,11 @@ OutputVector builder::MatmulFactory::make_matmul_op() // This will make easier further dot product calculations. if (left_shape.size() > 3) { - left = collapse(left, 0, left_shape.size() - 3); + left = builder::opset1::collapse(left, 0, left_shape.size() - 3); } if (right_shape.size() > 3) { - right = collapse(right, 0, right_shape.size() - 3); + right = builder::opset1::collapse(right, 0, right_shape.size() - 3); } // Perform multiple small dot products diff --git a/ngraph/core/src/builder/reshape.cpp b/ngraph/core/src/builder/reshape.cpp index 99995e2a5fe..97837a51d25 100644 --- a/ngraph/core/src/builder/reshape.cpp +++ b/ngraph/core/src/builder/reshape.cpp @@ -202,3 +202,53 @@ shared_ptr builder::opset1::squeeze(const Output& value, vector builder::opset1::collapse(const Output& value, + const size_t start_axis, + const size_t end_axis) +{ + if (start_axis == end_axis) + { + return value.get_node_shared_ptr(); + } + + if (value.get_partial_shape().is_static()) + { + auto shape = value.get_shape(); + // Multiply all alements of shape from start_axis to end_axis inclusive + size_t collapsed_axis_size = accumulate(next(begin(shape), start_axis), + next(begin(shape), end_axis + 1), + 1UL, + multiplies()); + Shape output_shape{}; + output_shape.insert(begin(output_shape), begin(shape), next(begin(shape), start_axis)); + output_shape.insert(end(output_shape), collapsed_axis_size); + output_shape.insert(end(output_shape), next(begin(shape), end_axis + 1), end(shape)); + return builder::opset1::reshape(value, output_shape); + } + + const auto shape = make_shared(value); + const auto rank = make_shared(shape); + + // Split lengths used in VariadicSplit + const auto start_axis_node = ngraph::opset1::Constant::create(element::i64, {1}, {start_axis}); + const auto end_axis_node = ngraph::opset1::Constant::create(element::i64, {1}, {end_axis + 1}); + const auto collapsed_axis = + make_shared(end_axis_node, start_axis_node); + const auto post_axis = make_shared(rank, end_axis_node); + + const auto split_lengths = make_shared( + OutputVector{start_axis_node, collapsed_axis, post_axis}, 0); + const auto split_axis = ngraph::opset1::Constant::create(element::i64, {}, {0}); + const auto split_node = + make_shared(shape, split_axis, split_lengths); + + const auto reduced_axis = ngraph::opset1::Constant::create(element::i64, {1}, {0}); + const auto collapsed_axis_size = + make_shared(split_node->output(1), reduced_axis, true); + + const auto collapsed_shape = make_shared( + OutputVector{split_node->output(0), collapsed_axis_size, split_node->output(2)}, 0); + + return make_shared(value, collapsed_shape, false); +} diff --git a/ngraph/test/backend/builder_reduce_ops_opset1.in.cpp b/ngraph/test/backend/builder_reduce_ops_opset1.in.cpp index d2f16a4716d..8dbfef14c12 100644 --- a/ngraph/test/backend/builder_reduce_ops_opset1.in.cpp +++ b/ngraph/test/backend/builder_reduce_ops_opset1.in.cpp @@ -81,3 +81,94 @@ NGRAPH_TEST(${BACKEND_NAME}, builder_opset1_mean_dynamic_2) test_case.run(); } + +NGRAPH_TEST(${BACKEND_NAME}, builder_opset1_collapse_5d_to_3d) +{ + Shape shape_input{1, 2, 3, 4, 5}; + Shape shape_r{1, 24, 5}; + + const auto elems_in_tensor = shape_size(shape_input); + + const auto A = make_shared(element::f32, shape_input); + const auto builder_collapse = builder::opset1::collapse(A, 1, shape_input.size() - 2); + const auto f = make_shared(builder_collapse, ParameterVector{A}); + + vector a(elems_in_tensor, 1); + vector b(elems_in_tensor, 1); + + auto test_case = test::TestCase(f); + + test_case.add_input(shape_input, {a}); + test_case.add_expected_output(shape_r, b); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, builder_opset1_collapse_all_dims) +{ + Shape shape_input{1, 2, 3, 4, 5, 6}; + Shape shape_r{720}; + + const auto elems_in_tensor = shape_size(shape_input); + + const auto A = make_shared(element::f32, shape_input); + const auto builder_collapse = builder::opset1::collapse(A, 0, shape_input.size() - 1); + const auto f = make_shared(builder_collapse, ParameterVector{A}); + + vector a(elems_in_tensor, 1); + vector b(elems_in_tensor, 1); + + auto test_case = test::TestCase(f); + + test_case.add_input(shape_input, {a}); + test_case.add_expected_output(shape_r, b); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, builder_opset1_collapse_none) +{ + Shape shape_input{1, 2, 3, 4, 5, 6}; + + const auto elems_in_tensor = shape_size(shape_input); + + const auto A = make_shared(element::f32, shape_input); + const auto builder_collapse = builder::opset1::collapse(A, 2, shape_input.size() - 4); + const auto f = make_shared(builder_collapse, ParameterVector{A}); + + vector a(elems_in_tensor, 1); + vector b(elems_in_tensor, 1); + + auto test_case = test::TestCase(f); + + test_case.add_input(shape_input, {a}); + test_case.add_expected_output(shape_input, b); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, builder_opset1_collapse_dyn_shape) +{ + PartialShape pshape_input{1, 2, 3, 4, 5, Dimension()}; + PartialShape pshape_output{1, 24, 5, Dimension()}; + + const auto A = make_shared(element::f32, pshape_input); + EXPECT_TRUE(A->get_output_partial_shape(0).same_scheme( + PartialShape{1, 2, 3, 4, 5, Dimension::dynamic()})); + const auto builder_collapse = builder::opset1::collapse(A, 1, 3); + const auto f = make_shared(builder_collapse, ParameterVector{A}); + + auto test_case = test::TestCase(f); + + const size_t NUM_DIMENSIONS_TO_TEST = 5; + for (size_t dim = 1; dim < NUM_DIMENSIONS_TO_TEST; dim++) + { + Shape shape_input{1, 2, 3, 4, 5, dim}; + Shape shape_output{1, 24, 5, dim}; + const auto elems_in_tensor = shape_size(shape_input); + + std::vector input_values(elems_in_tensor, 1); + std::vector expected_values(elems_in_tensor, 1); + + test_case.add_input(shape_input, {input_values}); + test_case.add_expected_output(shape_output, expected_values); + test_case.run(); + } +} diff --git a/ngraph/test/runtime/ie/unit_test.manifest b/ngraph/test/runtime/ie/unit_test.manifest index 54e6787d1ce..9662ea27120 100644 --- a/ngraph/test/runtime/ie/unit_test.manifest +++ b/ngraph/test/runtime/ie/unit_test.manifest @@ -79,6 +79,7 @@ bool_const_op onnx_model_tile onnx_model_tile_static onnx_model_softmax_0D +builder_opset1_collapse_none # nGraph function's output number 0 was not found in the CNNNetwork built from it. onnx_model_split_equal_parts_2d @@ -1078,6 +1079,9 @@ IE_CPU.atanh IE_CPU.asinh IE_CPU.acosh +# Unsupported collapse op with dynamic shape +IE_CPU.builder_opset1_collapse_dyn_shape + # Interpolate-1 in linear mode # 2.666666507720947266 is not close to 3 at index 1 IE_CPU.interpolate_down_scales_const_linear @@ -1434,6 +1438,9 @@ IE_GPU.matmul_2x3_3x3 IE_GPU.matmul_3x2_3x3_transpose IE_GPU.matmul_3x2_2x3_transpose +# Unsupported collapse op with dynamic shape +IE_GPU.builder_opset1_collapse_dyn_shape + IE_GPU.onnx_model_fake_quantize_const_inputs_infer IE_GPU.onnx_model_fake_quantize_nonconst_inputs_infer