From 7b45975af8afff011cf29d5813f26fbe17b2d097 Mon Sep 17 00:00:00 2001 From: Mateusz Tabaka Date: Fri, 30 Oct 2020 05:37:15 +0100 Subject: [PATCH] Remove obsoleted v0::ReplaceSlice (#2887) --- .../core/include/ngraph/op/op_version_tbl.hpp | 1 - .../core/include/ngraph/op/replace_slice.hpp | 122 --- ngraph/core/include/ngraph/ops.hpp | 1 - .../runtime/reference/replace_slice.hpp | 72 -- ngraph/core/src/op/replace_slice.cpp | 178 ----- ngraph/test/CMakeLists.txt | 2 - ngraph/test/backend/replace_slice.in.cpp | 259 ------- ngraph/test/op_is.cpp | 9 - .../runtime/interpreter/int_executable.hpp | 14 - ngraph/test/runtime/opset0_tbl.hpp | 1 - ngraph/test/runtime/pass/dyn_elimination.cpp | 1 - ngraph/test/type_prop/replace_slice.cpp | 720 ------------------ 12 files changed, 1380 deletions(-) delete mode 100644 ngraph/core/include/ngraph/op/replace_slice.hpp delete mode 100644 ngraph/core/reference/include/ngraph/runtime/reference/replace_slice.hpp delete mode 100644 ngraph/core/src/op/replace_slice.cpp delete mode 100644 ngraph/test/backend/replace_slice.in.cpp delete mode 100644 ngraph/test/type_prop/replace_slice.cpp diff --git a/ngraph/core/include/ngraph/op/op_version_tbl.hpp b/ngraph/core/include/ngraph/op/op_version_tbl.hpp index d412cab13f9..288e4dd4a23 100644 --- a/ngraph/core/include/ngraph/op/op_version_tbl.hpp +++ b/ngraph/core/include/ngraph/op/op_version_tbl.hpp @@ -146,7 +146,6 @@ NGRAPH_OP(ReduceSum, ngraph::op::v1, 1) NGRAPH_OP(RegionYolo, ngraph::op::v0, 0) NGRAPH_OP(Relu, ngraph::op::v0, 0) NGRAPH_OP(ReorgYolo, ngraph::op::v0, 0) -NGRAPH_OP(ReplaceSlice, ngraph::op::v0, 0) NGRAPH_OP(Reshape, ngraph::op::v0, 0) NGRAPH_OP(Reshape, ngraph::op::v1, 1) NGRAPH_OP(Result, ngraph::op::v0, 0) diff --git a/ngraph/core/include/ngraph/op/replace_slice.hpp b/ngraph/core/include/ngraph/op/replace_slice.hpp deleted file mode 100644 index aefa176f63b..00000000000 --- a/ngraph/core/include/ngraph/op/replace_slice.hpp +++ /dev/null @@ -1,122 +0,0 @@ -//***************************************************************************** -// Copyright 2017-2020 Intel Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//***************************************************************************** - -#pragma once - -#include "ngraph/coordinate.hpp" -#include "ngraph/op/op.hpp" -#include "ngraph/strides.hpp" - -namespace ngraph -{ - namespace op - { - namespace v0 - { - // clang-format off - /// \brief Takes two input tensors of identical rank, with the second tensor no larger than - /// the first in any dimension, and returns a copy of the first input tensor with - /// the specified slice overwritten by the second input tensor. - /// - /// ## Parameters - /// - /// | | Description | - /// | -------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | - /// | `lower_bounds` | The (inclusive) lower-bound coordinates \f$l_i\f$ for the tensor slice to be overwritten. For example, a lower-bound of \f$(1,2)\f$ means to start the slice at row 1 and column 2. | - /// | `upper_bounds` | The (non-inclusive) upper-bound coordinates \f$u_i\f$ for the tensor slice to be overwritten. For example, an upper-bound of \f$(5,4)\f$ means to end the slice before row 4 and column 3. | - /// | `strides` | The strides \f$s_i\f$ for the tensor slice to be overwritten. For example, in the matrix case, strides of \f$(1,3)\f$ means to take every row, and every third column (starting at the lower bound). | - /// - /// ## Inputs - /// - /// | | Type | Description | - /// | ------ | ------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------- | - /// | `arg0` | \f$E[d_1,\dots,d_n]~(n \geq 0)\f$ | A tensor of any shape and element type. | - /// | `arg1` | \f$E[d'_1,\dots,d'_n]\f$ where \f$(d'_i = \lceil(u_i - l_i)\, /\, s_i\rceil\f$ | A tensor of the same element type and rank as `arg0`, whose shape is determined by the lower and upper slice bounds and slice strides. | - /// - /// ## Output - /// - /// | Type | Description | - /// | ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | - /// | \f$E[d_1,\dots,d_n]\f$ | The tensor \f$T\f$ where \f$T[i_1,\dots,i_n] = \texttt{arg1}[j_1,\dots,j_n]\f$ if \f$j_1,\dots,j_n\f$ is in bounds for `arg1` and for all \f$m\f$, \f$i_m = l_m + j_m s_m\f$, otherwise \f$\texttt{arg0}[i_1,\dots,i_n]\f$. | - // clang-format on - class NGRAPH_DEPRECATED( - "This operation is deprecated and will be removed soon. Please do not use it.") - NGRAPH_API ReplaceSlice : public Op - { - NGRAPH_SUPPRESS_DEPRECATED_START - public: - static constexpr NodeTypeInfo type_info{"ReplaceSlice", 0}; - const NodeTypeInfo& get_type_info() const override { return type_info; } - ReplaceSlice() = default; - /// \brief Constructs a tensor slice replacement operation. - /// - /// \param arg0 The tensor to overwrite into. - /// \param arg1 The tensor to write into `arg0`. - /// \param lower_bounds The axiswise lower bounds of the slice (inclusive). - /// \param upper_bounds The axiswise upper bounds of the slice (exclusive). - /// \param strides The slicing strides; for example, strides of `{n,m}` means to - /// take - /// every nth row and every mth column of `arg0` as part of the - /// slice to be replaced. - ReplaceSlice(const Output& arg0, - const Output& arg1, - const Coordinate& lower_bounds, - const Coordinate& upper_bounds, - const Strides& strides); - - /// \brief Constructs a tensor slice replacement operation with unit strides; i.e., - /// every element inside the bounding box will be overwritten. - /// - /// \param arg0 The tensor to overwrite into. - /// \param arg1 The tensor to write into `arg0`. - /// \param lower_bounds The axiswise lower bounds of the slice (inclusive). - /// \param upper_bounds The axiswise upper bounds of the slice (exclusive). - ReplaceSlice(const Output& arg0, - const Output& arg1, - const Coordinate& lower_bounds, - const Coordinate& upper_bounds); - - virtual std::shared_ptr - clone_with_new_inputs(const OutputVector& new_args) const override; - void validate_and_infer_types() override; - - /// \return The inclusive lower-bound coordinates. - const Coordinate& get_lower_bounds() const { return m_lower_bounds; } - void set_lower_bounds(const Coordinate& lower_bounds) - { - m_lower_bounds = lower_bounds; - } - /// \return The exclusive upper-bound coordinates. - const Coordinate& get_upper_bounds() const { return m_upper_bounds; } - void set_uppper_bounds(const Coordinate& upper_bounds) - { - m_upper_bounds = upper_bounds; - } - /// \return The slicing strides. - const Strides& get_strides() const { return m_strides; } - void set_strides(const Strides& strides) { m_strides = strides; } - protected: - Coordinate m_lower_bounds; - Coordinate m_upper_bounds; - Strides m_strides; - NGRAPH_SUPPRESS_DEPRECATED_END - }; - } - NGRAPH_SUPPRESS_DEPRECATED_START - using v0::ReplaceSlice; - NGRAPH_SUPPRESS_DEPRECATED_END - } -} diff --git a/ngraph/core/include/ngraph/ops.hpp b/ngraph/core/include/ngraph/ops.hpp index 45d69bf47db..07c63a276aa 100644 --- a/ngraph/core/include/ngraph/ops.hpp +++ b/ngraph/core/include/ngraph/ops.hpp @@ -126,7 +126,6 @@ #include "ngraph/op/region_yolo.hpp" #include "ngraph/op/relu.hpp" #include "ngraph/op/reorg_yolo.hpp" -#include "ngraph/op/replace_slice.hpp" #include "ngraph/op/reshape.hpp" #include "ngraph/op/result.hpp" #include "ngraph/op/reverse.hpp" diff --git a/ngraph/core/reference/include/ngraph/runtime/reference/replace_slice.hpp b/ngraph/core/reference/include/ngraph/runtime/reference/replace_slice.hpp deleted file mode 100644 index 1580dbcad71..00000000000 --- a/ngraph/core/reference/include/ngraph/runtime/reference/replace_slice.hpp +++ /dev/null @@ -1,72 +0,0 @@ -//***************************************************************************** -// Copyright 2017-2020 Intel Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//***************************************************************************** - -#pragma once - -#include - -#include "ngraph/check.hpp" -#include "ngraph/coordinate_transform.hpp" - -namespace ngraph -{ - namespace runtime - { - namespace reference - { - template - void replace_slice(const T* arg0, // replacement context - const T* arg1, // replacement value - T* out, - const Shape& arg1_shape, - const Coordinate& lower_bounds, - const Coordinate& upper_bounds, - const Strides& strides, - const Shape& out_shape) - { - // Step 1: Copy the entire replacement context to the output. - CoordinateTransform copy_transform(out_shape); - - for (Coordinate copy_coord : copy_transform) - { - out[copy_transform.index(copy_coord)] = arg0[copy_transform.index(copy_coord)]; - } - - // Step 2: Overwrite the slice for replacement. - CoordinateTransform input_transform(arg1_shape); - CoordinateTransform output_transform( - out_shape, lower_bounds, upper_bounds, strides); - - NGRAPH_CHECK(shape_size(input_transform.get_target_shape()) == - shape_size(output_transform.get_target_shape())); - - CoordinateTransform::Iterator output_it = output_transform.begin(); - - for (const Coordinate& input_coord : input_transform) - { - if (output_it == output_transform.end()) - break; - const Coordinate& output_coord = *output_it; - - out[output_transform.index(output_coord)] = - arg1[input_transform.index(input_coord)]; - - ++output_it; - } - } - } - } -} diff --git a/ngraph/core/src/op/replace_slice.cpp b/ngraph/core/src/op/replace_slice.cpp deleted file mode 100644 index 2c5e460d73a..00000000000 --- a/ngraph/core/src/op/replace_slice.cpp +++ /dev/null @@ -1,178 +0,0 @@ -//***************************************************************************** -// Copyright 2017-2020 Intel Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//***************************************************************************** - -#include "ngraph/op/replace_slice.hpp" -#include "ngraph/op/constant.hpp" -#include "ngraph/op/slice.hpp" - -NGRAPH_SUPPRESS_DEPRECATED_START - -using namespace std; -using namespace ngraph; - -constexpr NodeTypeInfo op::ReplaceSlice::type_info; - -op::ReplaceSlice::ReplaceSlice(const Output& arg0, - const Output& arg1, - const Coordinate& lower_bounds, - const Coordinate& upper_bounds, - const Strides& strides) - : Op({arg0, arg1}) - , m_lower_bounds(lower_bounds) - , m_upper_bounds(upper_bounds) - , m_strides(strides) -{ - constructor_validate_and_infer_types(); -} - -op::ReplaceSlice::ReplaceSlice(const Output& arg0, - const Output& arg1, - const Coordinate& lower_bounds, - const Coordinate& upper_bounds) - : Op({arg0, arg1}) - , m_lower_bounds(lower_bounds) - , m_upper_bounds(upper_bounds) - , m_strides(Strides(lower_bounds.size(), 1)) -{ - constructor_validate_and_infer_types(); -} - -void op::ReplaceSlice::validate_and_infer_types() -{ - // An empty stride vector with lower_bounds/upper_bounds filled in means that we need to - // construct the default value. - if (m_strides.size() == 0) - { - m_strides = Strides(m_lower_bounds.size(), 1); - } - - const PartialShape& arg0_shape = get_input_partial_shape(0); - const PartialShape& arg1_shape = get_input_partial_shape(1); - Dimension merged_args_rank; - - NODE_VALIDATION_CHECK(this, - Dimension::merge(merged_args_rank, arg0_shape.rank(), arg1_shape.rank()), - "Argument ranks do not match (arg0 shape: ", - arg0_shape, - ", arg1 shape: ", - arg1_shape, - ")."); - - element::Type arg0_et = get_input_element_type(0); - element::Type arg1_et = get_input_element_type(1); - element::Type merged_args_et; - - NODE_VALIDATION_CHECK(this, - element::Type::merge(merged_args_et, arg0_et, arg1_et), - "Argument element types do not match (arg0 element type: ", - arg0_et, - ", arg1 element type: ", - arg1_et, - ")."); - - NODE_VALIDATION_CHECK(this, - m_lower_bounds.size() == m_upper_bounds.size() && - m_lower_bounds.size() == m_strides.size(), - "Ranks of lower bounds (", - m_lower_bounds, - "), upper bounds (", - m_upper_bounds, - ") and strides (", - m_strides, - ") do not match."); - - size_t output_rank = m_upper_bounds.size(); - - for (size_t i = 0; i < output_rank; i++) - { - NODE_VALIDATION_CHECK(this, - m_lower_bounds[i] <= m_upper_bounds[i], - "Lower bound for slice is greater than upper bound at axis ", - i, - " (lower bounds: ", - m_lower_bounds, - ", upper bounds: ", - m_upper_bounds, - ")."); - - NODE_VALIDATION_CHECK(this, - m_strides[i] != 0, - "Stride for slice is zero at axis ", - i, - " (strides: ", - m_strides, - ")."); - } - - NODE_VALIDATION_CHECK(this, - merged_args_rank.is_dynamic() || - merged_args_rank.get_length() == output_rank, - "Argument ranks do not match the rank of the lower bounds (", - m_lower_bounds, - "), upper bounds (", - m_upper_bounds, - "), and strides (", - m_strides, - ")."); - - std::vector sliced_dims(output_rank); - - for (size_t i = 0; i < output_rank; i++) - { - NODE_VALIDATION_CHECK(this, - arg0_shape.rank().is_dynamic() || arg0_shape[i].is_dynamic() || - m_upper_bounds[i] <= arg0_shape[i].get_length(), - "Upper bound for slice at axis ", - i, - " is out of range ", - "(upper bounds: ", - m_upper_bounds, - ", argument shape: ", - arg0_shape, - ")."); - - size_t sliced_dim = m_upper_bounds[i] - m_lower_bounds[i]; - sliced_dim = sliced_dim / m_strides[i] + ((sliced_dim % m_strides[i] == 0) ? 0 : 1); - sliced_dims[i] = sliced_dim; - } - - PartialShape slice_shape{sliced_dims}; - - NODE_VALIDATION_CHECK(this, - arg1_shape.compatible(slice_shape), - "Shape of replacement tensor (", - arg1_shape, - ") does not match the slice shape ", - "(", - slice_shape, - ")."); - - // Slight corner case here: if arg0 was rank-unknown, we can go ahead and set the output rank - // because the attribs will have given us enough info. - PartialShape result_shape = - (arg0_shape.rank().is_static()) - ? arg0_shape - : PartialShape(std::vector(output_rank, Dimension::dynamic())); - - set_output_type(0, merged_args_et, result_shape); -} - -shared_ptr op::ReplaceSlice::clone_with_new_inputs(const OutputVector& new_args) const -{ - check_new_args_count(this, new_args); - return make_shared( - new_args.at(0), new_args.at(1), m_lower_bounds, m_upper_bounds, m_strides); -} diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index 78be12a3c5a..dd214af4e16 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -159,7 +159,6 @@ set(SRC type_prop/reduce_l1.cpp type_prop/reduce_l2.cpp type_prop/reorg_yolo.cpp - type_prop/replace_slice.cpp type_prop/reshape.cpp type_prop/reverse.cpp type_prop/reverse_sequence.cpp @@ -328,7 +327,6 @@ set(MULTI_TEST_SRC backend/region_yolo.in.cpp backend/relu.in.cpp backend/reorg_yolo.in.cpp - backend/replace_slice.in.cpp backend/reshape.in.cpp backend/reverse_sequence.in.cpp backend/reverse.in.cpp diff --git a/ngraph/test/backend/replace_slice.in.cpp b/ngraph/test/backend/replace_slice.in.cpp deleted file mode 100644 index 073572ab419..00000000000 --- a/ngraph/test/backend/replace_slice.in.cpp +++ /dev/null @@ -1,259 +0,0 @@ -//***************************************************************************** -// Copyright 2017-2020 Intel Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//***************************************************************************** - -#include "gtest/gtest.h" -#include "ngraph/ngraph.hpp" -#include "ngraph/runtime/tensor.hpp" -#include "runtime/backend.hpp" -#include "util/all_close.hpp" -#include "util/all_close_f.hpp" -#include "util/ndarray.hpp" -#include "util/test_control.hpp" -#include "util/test_tools.hpp" - -NGRAPH_SUPPRESS_DEPRECATED_START - -using namespace std; -using namespace ngraph; - -static string s_manifest = "${MANIFEST}"; - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_scalar) -{ - Shape shape_a{}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{}; - auto r = make_shared(A, B, Coordinate{}, Coordinate{}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{312}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{808}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{808}), read_vector(result), MIN_FLOAT_TOLERANCE_BITS)); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_matrix_inplace) -{ - Shape shape_a{4, 4}; - auto A = make_shared(element::f32, shape_a); - auto abs_A = make_shared(A); - - Shape shape_b{3, 2}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{4, 4}; - auto r = make_shared(abs_A, B, Coordinate{0, 1}, Coordinate{3, 3}); - auto abs_r = make_shared(r); - auto f = make_shared(abs_r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{102, 103, 106, 107, 110, 111}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{1, 102, 103, 4, 5, 106, 107, 8, 9, 110, 111, 12, 13, 14, 15, 16}), - read_vector(result), - MIN_FLOAT_TOLERANCE_BITS)); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_matrix) -{ - Shape shape_a{4, 4}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{3, 2}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{4, 4}; - auto r = make_shared(A, B, Coordinate{0, 1}, Coordinate{3, 3}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{102, 103, 106, 107, 110, 111}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{1, 102, 103, 4, 5, 106, 107, 8, 9, 110, 111, 12, 13, 14, 15, 16}), - read_vector(result), - MIN_FLOAT_TOLERANCE_BITS)); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_vector) -{ - Shape shape_a{16}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{12}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{16}; - auto r = make_shared(A, B, Coordinate{2}, Coordinate{14}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{0, 1, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 14, 15}), - read_vector(result), - MIN_FLOAT_TOLERANCE_BITS)); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d) -{ - Shape shape_a{4, 4, 4}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{2, 2, 2}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{4, 4, 4}; - auto r = make_shared(A, B, Coordinate{1, 1, 1}, Coordinate{3, 3, 3}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{921, 922, 925, 926, 937, 938, 941, 942}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 921, 922, 23, 24, 925, 926, 27, 28, 29, 30, 31, - - 32, 33, 34, 35, 36, 937, 938, 39, 40, 941, 942, 43, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}), - read_vector(result))); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d_strided) -{ - Shape shape_a{4, 4, 4}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{2, 2, 2}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{4, 4, 4}; - auto r = make_shared( - A, B, Coordinate{0, 0, 0}, Coordinate{4, 4, 4}, Strides{2, 2, 2}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{900, 902, 908, 910, 932, 934, 940, 942}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{900, 1, 902, 3, 4, 5, 6, 7, 908, 9, 910, 11, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - - 932, 33, 934, 35, 36, 37, 38, 39, 940, 41, 942, 43, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}), - read_vector(result), - MIN_FLOAT_TOLERANCE_BITS)); -} - -NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d_strided_different_strides) -{ - Shape shape_a{4, 4, 4}; - auto A = make_shared(element::f32, shape_a); - Shape shape_b{2, 2, 2}; - auto B = make_shared(element::f32, shape_b); - Shape shape_r{4, 4, 4}; - auto r = make_shared( - A, B, Coordinate{0, 0, 0}, Coordinate{4, 4, 4}, Strides{2, 2, 3}); - auto f = make_shared(r, ParameterVector{A, B}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape_a); - copy_data(a, vector{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}); - auto b = backend->create_tensor(element::f32, shape_b); - copy_data(b, vector{900, 903, 908, 911, 932, 935, 940, 943}); - auto result = backend->create_tensor(element::f32, shape_r); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a, b}); - EXPECT_TRUE(test::all_close_f( - (vector{900, 1, 2, 903, 4, 5, 6, 7, 908, 9, 10, 911, 12, 13, 14, 15, - - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - - 932, 33, 34, 935, 36, 37, 38, 39, 940, 41, 42, 943, 44, 45, 46, 47, - - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}), - read_vector(result), - MIN_FLOAT_TOLERANCE_BITS)); -} diff --git a/ngraph/test/op_is.cpp b/ngraph/test/op_is.cpp index faf6c4cc5d5..3038a88cc27 100644 --- a/ngraph/test/op_is.cpp +++ b/ngraph/test/op_is.cpp @@ -641,15 +641,6 @@ namespace EXPECT_FALSE(op::is_binary_elementwise_logical(&node)); } - void op_is_ReplaceSlice() - { - op::ReplaceSlice node; - EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node)); - EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node)); - EXPECT_FALSE(op::is_binary_elementwise_comparison(&node)); - EXPECT_FALSE(op::is_binary_elementwise_logical(&node)); - } - void op_is_Reshape() { op::Reshape node; diff --git a/ngraph/test/runtime/interpreter/int_executable.hpp b/ngraph/test/runtime/interpreter/int_executable.hpp index 4d62d67fa11..cf685d0643e 100644 --- a/ngraph/test/runtime/interpreter/int_executable.hpp +++ b/ngraph/test/runtime/interpreter/int_executable.hpp @@ -79,7 +79,6 @@ #include "ngraph/runtime/reference/region_yolo.hpp" #include "ngraph/runtime/reference/relu.hpp" #include "ngraph/runtime/reference/reorg_yolo.hpp" -#include "ngraph/runtime/reference/replace_slice.hpp" #include "ngraph/runtime/reference/reshape.hpp" #include "ngraph/runtime/reference/result.hpp" #include "ngraph/runtime/reference/reverse.hpp" @@ -1246,19 +1245,6 @@ protected: args[0]->get_data_ptr(), out[0]->get_data_ptr(), element_count); break; } - case OP_TYPEID::ReplaceSlice: - { - const op::ReplaceSlice* slice = static_cast(&node); - reference::replace_slice(args[0]->get_data_ptr(), - args[1]->get_data_ptr(), - out[0]->get_data_ptr(), - node.get_input_shape(1), - slice->get_lower_bounds(), - slice->get_upper_bounds(), - slice->get_strides(), - node.get_output_shape(0)); - break; - } case OP_TYPEID::Reverse: { const op::Reverse* reverse = static_cast(&node); diff --git a/ngraph/test/runtime/opset0_tbl.hpp b/ngraph/test/runtime/opset0_tbl.hpp index fe953060cc0..30809e0a959 100644 --- a/ngraph/test/runtime/opset0_tbl.hpp +++ b/ngraph/test/runtime/opset0_tbl.hpp @@ -109,7 +109,6 @@ NGRAPH_OP(QuantizedConvolution, ngraph::op) NGRAPH_OP(QuantizedDot, ngraph::op) NGRAPH_OP(Range, ngraph::op) NGRAPH_OP(Relu, ngraph::op) -NGRAPH_OP(ReplaceSlice, ngraph::op) NGRAPH_OP(Reshape, ngraph::op) NGRAPH_OP(Result, ngraph::op) NGRAPH_OP(Reverse, ngraph::op) diff --git a/ngraph/test/runtime/pass/dyn_elimination.cpp b/ngraph/test/runtime/pass/dyn_elimination.cpp index 2ce39a09e76..dbdb7043b1a 100644 --- a/ngraph/test/runtime/pass/dyn_elimination.cpp +++ b/ngraph/test/runtime/pass/dyn_elimination.cpp @@ -19,7 +19,6 @@ #include "dyn_elimination.hpp" #include "ngraph/op/broadcast.hpp" #include "ngraph/op/range.hpp" -#include "ngraph/op/replace_slice.hpp" #include "ngraph/op/reshape.hpp" #include "ngraph/op/slice.hpp" #include "ngraph/op/transpose.hpp" diff --git a/ngraph/test/type_prop/replace_slice.cpp b/ngraph/test/type_prop/replace_slice.cpp deleted file mode 100644 index b236104996d..00000000000 --- a/ngraph/test/type_prop/replace_slice.cpp +++ /dev/null @@ -1,720 +0,0 @@ -//***************************************************************************** -// Copyright 2017-2020 Intel Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//***************************************************************************** - -#include "gtest/gtest.h" -#include "ngraph/ngraph.hpp" -#include "util/type_prop.hpp" - -NGRAPH_SUPPRESS_DEPRECATED_START - -using namespace std; -using namespace ngraph; - -TEST(type_prop, replace_slice_deduce_vector) -{ - auto param0 = make_shared(element::f32, Shape{6}); - auto param1 = make_shared(element::f32, Shape{3}); - auto rsl = make_shared(param0, param1, Coordinate{2}, Coordinate{5}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6})); -} - -TEST(type_prop, replace_slice_deduce_matrix) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{3, 6}); - auto rsl = make_shared(param0, param1, Coordinate{2, 1}, Coordinate{5, 7}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_matrix_strided) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{1, 3}); - auto rsl = make_shared( - param0, param1, Coordinate{2, 1}, Coordinate{5, 7}, Strides{3, 2}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_matrix_strided_uneven) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{1, 2}); - auto rsl = make_shared( - param0, param1, Coordinate{2, 1}, Coordinate{5, 7}, Strides{3, 4}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_vector_edge) -{ - auto param0 = make_shared(element::f32, Shape{6}); - auto param1 = make_shared(element::f32, Shape{6}); - auto rsl = make_shared(param0, param1, Coordinate{0}, Coordinate{6}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6})); -} - -TEST(type_prop, replace_slice_deduce_matrix_edge) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 8}); - auto rsl = make_shared(param0, param1, Coordinate{0, 0}, Coordinate{6, 8}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_matrix_zero_cols) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 0}); - auto rsl = make_shared(param0, param1, Coordinate{0, 0}, Coordinate{6, 0}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_matrix_zero_zero) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{0, 0}); - auto rsl = make_shared(param0, param1, Coordinate{0, 0}, Coordinate{0, 0}); - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_EQ(rsl->get_shape(), (Shape{6, 8})); -} - -TEST(type_prop, replace_slice_deduce_vector_invalid_strides) -{ - auto param0 = make_shared(element::f32, Shape{6}); - auto param1 = make_shared(element::f32, Shape{4}); - try - { - auto sl = make_shared( - param0, param1, Coordinate{0}, Coordinate{7}, Strides{1, 2}); - // Should have thrown, so fail if it didn't - FAIL() << "Invalid slice strides not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Ranks of lower bounds (Coordinate{0}), upper bounds " - "(Coordinate{7}) and strides (Strides{1, 2}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_arg_rank_mismatch) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{3, 6, 5}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{2, 1}, Coordinate{5, 7}); - // Should have thrown, so fail if it didn't - FAIL() << "Argument rank mismatch not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument ranks do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_arg_element_type_mismatch) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::i32, Shape{3, 6}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{2, 1}, Coordinate{5, 7}); - // Should have thrown, so fail if it didn't - FAIL() << "Argument element type mismatch not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument element types do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_slice_shape_mismatch) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{3, 6}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{1, 1}, Coordinate{5, 7}); - // Should have thrown, so fail if it didn't - FAIL() << "Slice shape mismatch not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string( - "Shape of replacement tensor ({3,6}) does not match the slice shape ({4,6})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_slice_shape_mismatch_strided) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{4, 6}); - try - { - auto rsl = make_shared( - param0, param1, Coordinate{1, 1}, Coordinate{5, 7}, Strides{1, 2}); - // Should have thrown, so fail if it didn't - FAIL() << "Slice shape mismatch not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string( - "Shape of replacement tensor ({4,6}) does not match the slice shape ({4,3})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_vector_edge_upper_oob) -{ - auto param0 = make_shared(element::f32, Shape{6}); - auto param1 = make_shared(element::f32, Shape{7}); - try - { - auto rsl = make_shared(param0, param1, Coordinate{0}, Coordinate{7}); - // Should have thrown, so fail if it didn't - FAIL() << "Upper bound out of range not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Upper bound for slice at axis 0 is out of range")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_edge_upper_oob) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 9}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{0, 0}, Coordinate{6, 9}); - // Should have thrown, so fail if it didn't - FAIL() << "Upper bound out of range not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Upper bound for slice at axis 1 is out of range")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_vector_lower_above_upper) -{ - auto param0 = make_shared(element::f32, Shape{6}); - auto param1 = make_shared(element::f32, Shape{0}); - try - { - auto rsl = make_shared(param0, param1, Coordinate{3}, Coordinate{2}); - // Should have thrown, so fail if it didn't - FAIL() << "Lower bound above upper not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Lower bound for slice is greater than upper bound at axis 0")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_lower_above_upper) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 0}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{0, 5}, Coordinate{6, 4}); - // Should have thrown, so fail if it didn't - FAIL() << "Lower bound above upper not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Lower bound for slice is greater than upper bound at axis 1")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_lower_missing) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 6}); - try - { - auto rsl = make_shared(param0, param1, Coordinate{0}, Coordinate{5, 5}); - // Should have thrown, so fail if it didn't - FAIL() << "Missing lower bound coordinate not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Ranks of lower bounds (Coordinate{0}), upper bounds " - "(Coordinate{5, 5}) and strides (Strides{1}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_upper_missing) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 6}); - try - { - auto rsl = make_shared(param0, param1, Coordinate{0, 0}, Coordinate{5}); - // Should have thrown, so fail if it didn't - FAIL() << "Missing upper bound coordinate not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Ranks of lower bounds (Coordinate{0, 0}), upper bounds " - "(Coordinate{5}) and strides (Strides{1, 1}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_lower_extra) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 6}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{0, 0, 0}, Coordinate{5, 5}); - // Should have thrown, so fail if it didn't - FAIL() << "Extra lower bound coordinate not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Ranks of lower bounds (Coordinate{0, 0, " - "0}), upper bounds (Coordinate{5, 5}) and " - "strides (Strides{1, 1, 1}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_deduce_matrix_upper_extra) -{ - auto param0 = make_shared(element::f32, Shape{6, 8}); - auto param1 = make_shared(element::f32, Shape{6, 6}); - try - { - auto rsl = - make_shared(param0, param1, Coordinate{0, 0}, Coordinate{5, 5, 5}); - // Should have thrown, so fail if it didn't - FAIL() << "Extra upper bound coordinate not detected"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Ranks of lower bounds (Coordinate{0, 0}), " - "upper bounds (Coordinate{5, 5, 5}) and " - "strides (Strides{1, 1}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_ok) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - auto rsl = make_shared(param0, param1, lower_bounds, upper_bounds, strides); - - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()})); -} - -TEST(type_prop, - replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_rank_mismatch) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Mismatch of lower-bounds/upper-bounds/strides ranks not detected (argument " - "rank-dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Ranks of lower bounds (Coordinate{1, 2, 3, 4}), upper bounds " - "(Coordinate{1, 3, 5}) and strides (Strides{1, 1, 1, 2}) do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, - replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_bounds_crossing) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 8}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Crossing lower/upper bounds not detected (argument rank-dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING( - error.what(), - std::string("Lower bound for slice is greater than upper bound at axis 3 (lower " - "bounds: Coordinate{1, 2, 3, 8}, upper bounds: Coordinate{1, 3, 5, 7})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_partial_input_rank_static_dynamic_replacement_rank_dynamic_ok) -{ - PartialShape input_shape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - auto rsl = make_shared(param0, param1, lower_bounds, upper_bounds, strides); - - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()})); -} - -TEST(type_prop, - replace_slice_partial_input_rank_static_dynamic_some_dims_known_replacement_rank_dynamic_ok) -{ - PartialShape input_shape{2, 4, 10, Dimension::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - auto rsl = make_shared(param0, param1, lower_bounds, upper_bounds, strides); - - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_TRUE( - rsl->get_output_partial_shape(0).same_scheme(PartialShape{2, 4, 10, Dimension::dynamic()})); -} - -TEST( - type_prop, - replace_slice_partial_input_rank_static_dynamic_replacement_rank_dynamic_attribs_rank_mismatches_input) -{ - PartialShape input_shape{Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Mismatch of attrib ranks with arg ranks not detected (argument rank-static " - "dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Argument ranks do not match the rank of the lower bounds " - "(Coordinate{1, 2, 3, 4}), upper bounds (Coordinate{1, 3, " - "5, 7}), and strides (Strides{1, 1, 1, 2})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST( - type_prop, - replace_slice_partial_input_rank_static_dynamic_some_dims_known_replacement_rank_dynamic_upper_bounds_oob) -{ - PartialShape input_shape{2, 2, 10, Dimension::dynamic()}; - PartialShape replacement_shape{PartialShape::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Upper bounds out of bounds not detected (argument rank-static dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Upper bound for slice at axis 1 is out of " - "range (upper bounds: Coordinate{1, 3, 5, " - "7}, argument shape: {2,2,10,?})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST(type_prop, replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_ok) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - auto rsl = make_shared(param0, param1, lower_bounds, upper_bounds, strides); - - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()})); -} - -TEST(type_prop, - replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_some_dims_known_ok) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{0, Dimension::dynamic(), Dimension::dynamic(), 2}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - auto rsl = make_shared(param0, param1, lower_bounds, upper_bounds, strides); - - ASSERT_EQ(rsl->get_element_type(), element::f32); - ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()})); -} - -TEST( - type_prop, - replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_some_dims_known_attribs_mismatch_replacement_shape) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{1, Dimension::dynamic(), Dimension::dynamic(), 2}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Mismatch of shape inferred from attributes with provided replacement shape not " - "detected (rank-dynamic/rank-static dynamic inputs)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Shape of replacement tensor ({1,?,?,2}) does not match " - "the slice shape ({0,1,2,2})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST( - type_prop, - replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_attribs_rank_mismatches_replacement) -{ - PartialShape input_shape{PartialShape::dynamic()}; - PartialShape replacement_shape{Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Mismatch of attrib ranks with arg ranks not detected (arguments " - "rank-dynamic/rank-static " - "dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), - std::string("Argument ranks do not match the rank of the lower bounds " - "(Coordinate{1, 2, 3, 4}), upper bounds (Coordinate{1, 3, " - "5, 7}), and strides (Strides{1, 1, 1, 2})")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -} - -TEST( - type_prop, - replace_slice_partial_input_rank_static_dynamic_replacement_rank_static_dynamic_argument_ranks_mismatch) -{ - PartialShape input_shape{ - Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}; - PartialShape replacement_shape{Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic(), - Dimension::dynamic()}; - Coordinate lower_bounds{1, 2, 3, 4}; - Coordinate upper_bounds{1, 3, 5, 7}; - Strides strides{1, 1, 1, 2}; - - auto param0 = make_shared(element::f32, input_shape); - auto param1 = make_shared(element::f32, replacement_shape); - try - { - auto rsl = - make_shared(param0, param1, lower_bounds, upper_bounds, strides); - // Should have thrown, so fail if it didn't - FAIL() << "Mismatching input/replacement ranks not detected (arguments both rank-static " - "dynamic)"; - } - catch (const NodeValidationFailure& error) - { - EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument ranks do not match")); - } - catch (...) - { - FAIL() << "Deduced type check failed for unexpected reason"; - } -}