From 78f95ddea432e41d83f76371469f62e80c5a3070 Mon Sep 17 00:00:00 2001 From: Pawel Raasz Date: Fri, 2 Dec 2022 16:48:51 +0100 Subject: [PATCH] Fix reshape evaluate to use tensor input shape (#14307) --- src/core/src/op/reshape.cpp | 9 ++++----- src/core/tests/eval.cpp | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/src/core/src/op/reshape.cpp b/src/core/src/op/reshape.cpp index 3e0f1f359a6..725338fcb2b 100644 --- a/src/core/src/op/reshape.cpp +++ b/src/core/src/op/reshape.cpp @@ -8,6 +8,7 @@ #include #include +#include "compare.hpp" #include "itt.hpp" #include "ngraph/op/constant.hpp" #include "ngraph/runtime/opt_kernel/reshape.hpp" @@ -417,18 +418,16 @@ void op::v1::Reshape::calculate_output_shape(vector& reshape_pattern, ov::PartialShape output_pshape(output_shape); if (input_pshape.is_static() && output_pshape.is_static()) { - size_t zero_dims = std::count_if(reshape_pattern.begin(), reshape_pattern.end(), [](Dimension dim) { - return dim.get_max_length() == 0 && dim.get_min_length() == 0; - }); + size_t zero_dims = std::count_if(reshape_pattern.begin(), reshape_pattern.end(), cmp::Equal(0)); bool backward_compatible_check = (zero_dims && get_special_zero()) || minus_one_idx != -1; - bool in_out_elements_equal = shape_size(get_input_shape(0)) == shape_size(output_pshape.to_shape()); + bool in_out_elements_equal = shape_size(input_pshape.get_shape()) == shape_size(output_pshape.to_shape()); NODE_VALIDATION_CHECK(this, backward_compatible_check || in_out_elements_equal, "Requested output shape ", output_shape, " is incompatible with input shape ", - get_input_shape(0)); + input_pshape); } } diff --git a/src/core/tests/eval.cpp b/src/core/tests/eval.cpp index 80ef4c71ffe..611692daa4e 100644 --- a/src/core/tests/eval.cpp +++ b/src/core/tests/eval.cpp @@ -7,6 +7,7 @@ #include #include +#include "common_test_utils/test_assertions.hpp" #include "engines_util/execute_tools.hpp" #include "engines_util/test_case.hpp" #include "gmock/gmock.h" @@ -70,6 +71,7 @@ NGRAPH_SUPPRESS_DEPRECATED_START using namespace std; using namespace ngraph; +using namespace testing; #define ASSERT_FLOAT_VECTORS_EQ(expected, result) \ ASSERT_EQ(expected.size(), result.size()) << "Array sizes differ."; \ @@ -633,6 +635,39 @@ TEST(eval, evaluate_reshape_v1_pattern_int16) { ASSERT_EQ(computed_val, expected_val); } +TEST(eval, evaluate_reshape_v1_data_dynamic_shape) { + constexpr auto exp_dtype = element::i32; + + auto data = make_shared(exp_dtype, PartialShape::dynamic()); + auto pattern = make_shared(element::i64, Shape{6}); + auto dyn_reshape = make_shared(data, pattern, true); + auto f = make_shared(OutputVector{dyn_reshape}, ParameterVector{data, pattern}); + auto result_tensor = make_shared(); + + ASSERT_TRUE(f->evaluate({result_tensor}, + {make_host_tensor(Shape{2, 2, 2}, {0, 1, 2, 3, 4, 5, 6, 7}), + make_host_tensor(pattern->get_shape(), {2, 0, 1, -1, 1, 1})})); + + EXPECT_EQ(result_tensor->get_element_type(), exp_dtype); + EXPECT_EQ(result_tensor->get_partial_shape(), PartialShape({2, 2, 1, 2, 1, 1})); + EXPECT_THAT(read_vector(result_tensor), ElementsAre(0, 1, 2, 3, 4, 5, 6, 7)); +} + +TEST(eval, evaluate_reshape_v1_not_backward_compatible_and_in_out_size_not_eq) { + constexpr auto exp_dtype = element::i32; + auto data = make_shared(exp_dtype, PartialShape::dynamic()); + auto pattern = make_shared(element::i16, Shape{5}); + auto dyn_reshape = make_shared(data, pattern, true); + auto f = make_shared(OutputVector{dyn_reshape}, ParameterVector{data, pattern}); + auto result_tensor = make_shared(); + + OV_EXPECT_THROW(f->evaluate({result_tensor}, + {make_host_tensor(Shape{2, 2, 2}, {0, 1, 2, 3, 4, 5, 6, 7}), + make_host_tensor(pattern->get_shape(), {2, 1, 1, 1, 1})}), + NodeValidationFailure, + HasSubstr("Requested output shape [2,1,1,1,1] is incompatible with input shape [2,2,2]")); +} + TEST(eval, evaluate_convert) { auto p = make_shared(element::f32, PartialShape{-1, -1}); auto convert = make_shared(p, element::i64);