Remove obsoleted v0::ReplaceSlice (#2887)

This commit is contained in:
Mateusz Tabaka 2020-10-30 05:37:15 +01:00 committed by GitHub
parent d6a9ef3a8f
commit 7b45975af8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 0 additions and 1380 deletions

View File

@ -146,7 +146,6 @@ NGRAPH_OP(ReduceSum, ngraph::op::v1, 1)
NGRAPH_OP(RegionYolo, ngraph::op::v0, 0)
NGRAPH_OP(Relu, ngraph::op::v0, 0)
NGRAPH_OP(ReorgYolo, ngraph::op::v0, 0)
NGRAPH_OP(ReplaceSlice, ngraph::op::v0, 0)
NGRAPH_OP(Reshape, ngraph::op::v0, 0)
NGRAPH_OP(Reshape, ngraph::op::v1, 1)
NGRAPH_OP(Result, ngraph::op::v0, 0)

View File

@ -1,122 +0,0 @@
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "ngraph/coordinate.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/strides.hpp"
namespace ngraph
{
namespace op
{
namespace v0
{
// clang-format off
/// \brief Takes two input tensors of identical rank, with the second tensor no larger than
/// the first in any dimension, and returns a copy of the first input tensor with
/// the specified slice overwritten by the second input tensor.
///
/// ## Parameters
///
/// | | Description |
/// | -------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
/// | `lower_bounds` | The (inclusive) lower-bound coordinates \f$l_i\f$ for the tensor slice to be overwritten. For example, a lower-bound of \f$(1,2)\f$ means to start the slice at row 1 and column 2. |
/// | `upper_bounds` | The (non-inclusive) upper-bound coordinates \f$u_i\f$ for the tensor slice to be overwritten. For example, an upper-bound of \f$(5,4)\f$ means to end the slice before row 4 and column 3. |
/// | `strides` | The strides \f$s_i\f$ for the tensor slice to be overwritten. For example, in the matrix case, strides of \f$(1,3)\f$ means to take every row, and every third column (starting at the lower bound). |
///
/// ## Inputs
///
/// | | Type | Description |
/// | ------ | ------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------- |
/// | `arg0` | \f$E[d_1,\dots,d_n]~(n \geq 0)\f$ | A tensor of any shape and element type. |
/// | `arg1` | \f$E[d'_1,\dots,d'_n]\f$ where \f$(d'_i = \lceil(u_i - l_i)\, /\, s_i\rceil\f$ | A tensor of the same element type and rank as `arg0`, whose shape is determined by the lower and upper slice bounds and slice strides. |
///
/// ## Output
///
/// | Type | Description |
/// | ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
/// | \f$E[d_1,\dots,d_n]\f$ | The tensor \f$T\f$ where \f$T[i_1,\dots,i_n] = \texttt{arg1}[j_1,\dots,j_n]\f$ if \f$j_1,\dots,j_n\f$ is in bounds for `arg1` and for all \f$m\f$, \f$i_m = l_m + j_m s_m\f$, otherwise \f$\texttt{arg0}[i_1,\dots,i_n]\f$. |
// clang-format on
class NGRAPH_DEPRECATED(
"This operation is deprecated and will be removed soon. Please do not use it.")
NGRAPH_API ReplaceSlice : public Op
{
NGRAPH_SUPPRESS_DEPRECATED_START
public:
static constexpr NodeTypeInfo type_info{"ReplaceSlice", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
ReplaceSlice() = default;
/// \brief Constructs a tensor slice replacement operation.
///
/// \param arg0 The tensor to overwrite into.
/// \param arg1 The tensor to write into `arg0`.
/// \param lower_bounds The axiswise lower bounds of the slice (inclusive).
/// \param upper_bounds The axiswise upper bounds of the slice (exclusive).
/// \param strides The slicing strides; for example, strides of `{n,m}` means to
/// take
/// every nth row and every mth column of `arg0` as part of the
/// slice to be replaced.
ReplaceSlice(const Output<Node>& arg0,
const Output<Node>& arg1,
const Coordinate& lower_bounds,
const Coordinate& upper_bounds,
const Strides& strides);
/// \brief Constructs a tensor slice replacement operation with unit strides; i.e.,
/// every element inside the bounding box will be overwritten.
///
/// \param arg0 The tensor to overwrite into.
/// \param arg1 The tensor to write into `arg0`.
/// \param lower_bounds The axiswise lower bounds of the slice (inclusive).
/// \param upper_bounds The axiswise upper bounds of the slice (exclusive).
ReplaceSlice(const Output<Node>& arg0,
const Output<Node>& arg1,
const Coordinate& lower_bounds,
const Coordinate& upper_bounds);
virtual std::shared_ptr<Node>
clone_with_new_inputs(const OutputVector& new_args) const override;
void validate_and_infer_types() override;
/// \return The inclusive lower-bound coordinates.
const Coordinate& get_lower_bounds() const { return m_lower_bounds; }
void set_lower_bounds(const Coordinate& lower_bounds)
{
m_lower_bounds = lower_bounds;
}
/// \return The exclusive upper-bound coordinates.
const Coordinate& get_upper_bounds() const { return m_upper_bounds; }
void set_uppper_bounds(const Coordinate& upper_bounds)
{
m_upper_bounds = upper_bounds;
}
/// \return The slicing strides.
const Strides& get_strides() const { return m_strides; }
void set_strides(const Strides& strides) { m_strides = strides; }
protected:
Coordinate m_lower_bounds;
Coordinate m_upper_bounds;
Strides m_strides;
NGRAPH_SUPPRESS_DEPRECATED_END
};
}
NGRAPH_SUPPRESS_DEPRECATED_START
using v0::ReplaceSlice;
NGRAPH_SUPPRESS_DEPRECATED_END
}
}

View File

@ -126,7 +126,6 @@
#include "ngraph/op/region_yolo.hpp"
#include "ngraph/op/relu.hpp"
#include "ngraph/op/reorg_yolo.hpp"
#include "ngraph/op/replace_slice.hpp"
#include "ngraph/op/reshape.hpp"
#include "ngraph/op/result.hpp"
#include "ngraph/op/reverse.hpp"

View File

@ -1,72 +0,0 @@
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <cmath>
#include "ngraph/check.hpp"
#include "ngraph/coordinate_transform.hpp"
namespace ngraph
{
namespace runtime
{
namespace reference
{
template <typename T>
void replace_slice(const T* arg0, // replacement context
const T* arg1, // replacement value
T* out,
const Shape& arg1_shape,
const Coordinate& lower_bounds,
const Coordinate& upper_bounds,
const Strides& strides,
const Shape& out_shape)
{
// Step 1: Copy the entire replacement context to the output.
CoordinateTransform copy_transform(out_shape);
for (Coordinate copy_coord : copy_transform)
{
out[copy_transform.index(copy_coord)] = arg0[copy_transform.index(copy_coord)];
}
// Step 2: Overwrite the slice for replacement.
CoordinateTransform input_transform(arg1_shape);
CoordinateTransform output_transform(
out_shape, lower_bounds, upper_bounds, strides);
NGRAPH_CHECK(shape_size(input_transform.get_target_shape()) ==
shape_size(output_transform.get_target_shape()));
CoordinateTransform::Iterator output_it = output_transform.begin();
for (const Coordinate& input_coord : input_transform)
{
if (output_it == output_transform.end())
break;
const Coordinate& output_coord = *output_it;
out[output_transform.index(output_coord)] =
arg1[input_transform.index(input_coord)];
++output_it;
}
}
}
}
}

View File

@ -1,178 +0,0 @@
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/op/replace_slice.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/slice.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::ReplaceSlice::type_info;
op::ReplaceSlice::ReplaceSlice(const Output<Node>& arg0,
const Output<Node>& arg1,
const Coordinate& lower_bounds,
const Coordinate& upper_bounds,
const Strides& strides)
: Op({arg0, arg1})
, m_lower_bounds(lower_bounds)
, m_upper_bounds(upper_bounds)
, m_strides(strides)
{
constructor_validate_and_infer_types();
}
op::ReplaceSlice::ReplaceSlice(const Output<Node>& arg0,
const Output<Node>& arg1,
const Coordinate& lower_bounds,
const Coordinate& upper_bounds)
: Op({arg0, arg1})
, m_lower_bounds(lower_bounds)
, m_upper_bounds(upper_bounds)
, m_strides(Strides(lower_bounds.size(), 1))
{
constructor_validate_and_infer_types();
}
void op::ReplaceSlice::validate_and_infer_types()
{
// An empty stride vector with lower_bounds/upper_bounds filled in means that we need to
// construct the default value.
if (m_strides.size() == 0)
{
m_strides = Strides(m_lower_bounds.size(), 1);
}
const PartialShape& arg0_shape = get_input_partial_shape(0);
const PartialShape& arg1_shape = get_input_partial_shape(1);
Dimension merged_args_rank;
NODE_VALIDATION_CHECK(this,
Dimension::merge(merged_args_rank, arg0_shape.rank(), arg1_shape.rank()),
"Argument ranks do not match (arg0 shape: ",
arg0_shape,
", arg1 shape: ",
arg1_shape,
").");
element::Type arg0_et = get_input_element_type(0);
element::Type arg1_et = get_input_element_type(1);
element::Type merged_args_et;
NODE_VALIDATION_CHECK(this,
element::Type::merge(merged_args_et, arg0_et, arg1_et),
"Argument element types do not match (arg0 element type: ",
arg0_et,
", arg1 element type: ",
arg1_et,
").");
NODE_VALIDATION_CHECK(this,
m_lower_bounds.size() == m_upper_bounds.size() &&
m_lower_bounds.size() == m_strides.size(),
"Ranks of lower bounds (",
m_lower_bounds,
"), upper bounds (",
m_upper_bounds,
") and strides (",
m_strides,
") do not match.");
size_t output_rank = m_upper_bounds.size();
for (size_t i = 0; i < output_rank; i++)
{
NODE_VALIDATION_CHECK(this,
m_lower_bounds[i] <= m_upper_bounds[i],
"Lower bound for slice is greater than upper bound at axis ",
i,
" (lower bounds: ",
m_lower_bounds,
", upper bounds: ",
m_upper_bounds,
").");
NODE_VALIDATION_CHECK(this,
m_strides[i] != 0,
"Stride for slice is zero at axis ",
i,
" (strides: ",
m_strides,
").");
}
NODE_VALIDATION_CHECK(this,
merged_args_rank.is_dynamic() ||
merged_args_rank.get_length() == output_rank,
"Argument ranks do not match the rank of the lower bounds (",
m_lower_bounds,
"), upper bounds (",
m_upper_bounds,
"), and strides (",
m_strides,
").");
std::vector<Dimension> sliced_dims(output_rank);
for (size_t i = 0; i < output_rank; i++)
{
NODE_VALIDATION_CHECK(this,
arg0_shape.rank().is_dynamic() || arg0_shape[i].is_dynamic() ||
m_upper_bounds[i] <= arg0_shape[i].get_length(),
"Upper bound for slice at axis ",
i,
" is out of range ",
"(upper bounds: ",
m_upper_bounds,
", argument shape: ",
arg0_shape,
").");
size_t sliced_dim = m_upper_bounds[i] - m_lower_bounds[i];
sliced_dim = sliced_dim / m_strides[i] + ((sliced_dim % m_strides[i] == 0) ? 0 : 1);
sliced_dims[i] = sliced_dim;
}
PartialShape slice_shape{sliced_dims};
NODE_VALIDATION_CHECK(this,
arg1_shape.compatible(slice_shape),
"Shape of replacement tensor (",
arg1_shape,
") does not match the slice shape ",
"(",
slice_shape,
").");
// Slight corner case here: if arg0 was rank-unknown, we can go ahead and set the output rank
// because the attribs will have given us enough info.
PartialShape result_shape =
(arg0_shape.rank().is_static())
? arg0_shape
: PartialShape(std::vector<Dimension>(output_rank, Dimension::dynamic()));
set_output_type(0, merged_args_et, result_shape);
}
shared_ptr<Node> op::ReplaceSlice::clone_with_new_inputs(const OutputVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<ReplaceSlice>(
new_args.at(0), new_args.at(1), m_lower_bounds, m_upper_bounds, m_strides);
}

View File

@ -159,7 +159,6 @@ set(SRC
type_prop/reduce_l1.cpp
type_prop/reduce_l2.cpp
type_prop/reorg_yolo.cpp
type_prop/replace_slice.cpp
type_prop/reshape.cpp
type_prop/reverse.cpp
type_prop/reverse_sequence.cpp
@ -328,7 +327,6 @@ set(MULTI_TEST_SRC
backend/region_yolo.in.cpp
backend/relu.in.cpp
backend/reorg_yolo.in.cpp
backend/replace_slice.in.cpp
backend/reshape.in.cpp
backend/reverse_sequence.in.cpp
backend/reverse.in.cpp

View File

@ -1,259 +0,0 @@
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/runtime/tensor.hpp"
#include "runtime/backend.hpp"
#include "util/all_close.hpp"
#include "util/all_close_f.hpp"
#include "util/ndarray.hpp"
#include "util/test_control.hpp"
#include "util/test_tools.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
using namespace std;
using namespace ngraph;
static string s_manifest = "${MANIFEST}";
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_scalar)
{
Shape shape_a{};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{};
auto r = make_shared<op::ReplaceSlice>(A, B, Coordinate{}, Coordinate{});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{312});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{808});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{808}), read_vector<float>(result), MIN_FLOAT_TOLERANCE_BITS));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_matrix_inplace)
{
Shape shape_a{4, 4};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
auto abs_A = make_shared<op::Abs>(A);
Shape shape_b{3, 2};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{4, 4};
auto r = make_shared<op::ReplaceSlice>(abs_A, B, Coordinate{0, 1}, Coordinate{3, 3});
auto abs_r = make_shared<op::Abs>(r);
auto f = make_shared<Function>(abs_r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{102, 103, 106, 107, 110, 111});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{1, 102, 103, 4, 5, 106, 107, 8, 9, 110, 111, 12, 13, 14, 15, 16}),
read_vector<float>(result),
MIN_FLOAT_TOLERANCE_BITS));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_matrix)
{
Shape shape_a{4, 4};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{3, 2};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{4, 4};
auto r = make_shared<op::ReplaceSlice>(A, B, Coordinate{0, 1}, Coordinate{3, 3});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{102, 103, 106, 107, 110, 111});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{1, 102, 103, 4, 5, 106, 107, 8, 9, 110, 111, 12, 13, 14, 15, 16}),
read_vector<float>(result),
MIN_FLOAT_TOLERANCE_BITS));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_vector)
{
Shape shape_a{16};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{12};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{16};
auto r = make_shared<op::ReplaceSlice>(A, B, Coordinate{2}, Coordinate{14});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{0, 1, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 14, 15}),
read_vector<float>(result),
MIN_FLOAT_TOLERANCE_BITS));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d)
{
Shape shape_a{4, 4, 4};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{2, 2, 2};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{4, 4, 4};
auto r = make_shared<op::ReplaceSlice>(A, B, Coordinate{1, 1, 1}, Coordinate{3, 3, 3});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{921, 922, 925, 926, 937, 938, 941, 942});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 921, 922, 23, 24, 925, 926, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 937, 938, 39, 40, 941, 942, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}),
read_vector<float>(result)));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d_strided)
{
Shape shape_a{4, 4, 4};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{2, 2, 2};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{4, 4, 4};
auto r = make_shared<op::ReplaceSlice>(
A, B, Coordinate{0, 0, 0}, Coordinate{4, 4, 4}, Strides{2, 2, 2});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{900, 902, 908, 910, 932, 934, 940, 942});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{900, 1, 902, 3, 4, 5, 6, 7, 908, 9, 910, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
932, 33, 934, 35, 36, 37, 38, 39, 940, 41, 942, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}),
read_vector<float>(result),
MIN_FLOAT_TOLERANCE_BITS));
}
NGRAPH_TEST(${BACKEND_NAME}, replace_slice_3d_strided_different_strides)
{
Shape shape_a{4, 4, 4};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
Shape shape_b{2, 2, 2};
auto B = make_shared<op::Parameter>(element::f32, shape_b);
Shape shape_r{4, 4, 4};
auto r = make_shared<op::ReplaceSlice>(
A, B, Coordinate{0, 0, 0}, Coordinate{4, 4, 4}, Strides{2, 2, 3});
auto f = make_shared<Function>(r, ParameterVector{A, B});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::f32, shape_a);
copy_data(a, vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63});
auto b = backend->create_tensor(element::f32, shape_b);
copy_data(b, vector<float>{900, 903, 908, 911, 932, 935, 940, 943});
auto result = backend->create_tensor(element::f32, shape_r);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a, b});
EXPECT_TRUE(test::all_close_f(
(vector<float>{900, 1, 2, 903, 4, 5, 6, 7, 908, 9, 10, 911, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
932, 33, 34, 935, 36, 37, 38, 39, 940, 41, 42, 943, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63}),
read_vector<float>(result),
MIN_FLOAT_TOLERANCE_BITS));
}

View File

@ -641,15 +641,6 @@ namespace
EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
}
void op_is_ReplaceSlice()
{
op::ReplaceSlice node;
EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
}
void op_is_Reshape()
{
op::Reshape node;

View File

@ -79,7 +79,6 @@
#include "ngraph/runtime/reference/region_yolo.hpp"
#include "ngraph/runtime/reference/relu.hpp"
#include "ngraph/runtime/reference/reorg_yolo.hpp"
#include "ngraph/runtime/reference/replace_slice.hpp"
#include "ngraph/runtime/reference/reshape.hpp"
#include "ngraph/runtime/reference/result.hpp"
#include "ngraph/runtime/reference/reverse.hpp"
@ -1246,19 +1245,6 @@ protected:
args[0]->get_data_ptr<const T>(), out[0]->get_data_ptr<T>(), element_count);
break;
}
case OP_TYPEID::ReplaceSlice:
{
const op::ReplaceSlice* slice = static_cast<const op::ReplaceSlice*>(&node);
reference::replace_slice<T>(args[0]->get_data_ptr<const T>(),
args[1]->get_data_ptr<const T>(),
out[0]->get_data_ptr<T>(),
node.get_input_shape(1),
slice->get_lower_bounds(),
slice->get_upper_bounds(),
slice->get_strides(),
node.get_output_shape(0));
break;
}
case OP_TYPEID::Reverse:
{
const op::Reverse* reverse = static_cast<const op::Reverse*>(&node);

View File

@ -109,7 +109,6 @@ NGRAPH_OP(QuantizedConvolution, ngraph::op)
NGRAPH_OP(QuantizedDot, ngraph::op)
NGRAPH_OP(Range, ngraph::op)
NGRAPH_OP(Relu, ngraph::op)
NGRAPH_OP(ReplaceSlice, ngraph::op)
NGRAPH_OP(Reshape, ngraph::op)
NGRAPH_OP(Result, ngraph::op)
NGRAPH_OP(Reverse, ngraph::op)

View File

@ -19,7 +19,6 @@
#include "dyn_elimination.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/range.hpp"
#include "ngraph/op/replace_slice.hpp"
#include "ngraph/op/reshape.hpp"
#include "ngraph/op/slice.hpp"
#include "ngraph/op/transpose.hpp"

View File

@ -1,720 +0,0 @@
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
using namespace std;
using namespace ngraph;
TEST(type_prop, replace_slice_deduce_vector)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{3});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{2}, Coordinate{5});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6}));
}
TEST(type_prop, replace_slice_deduce_matrix)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{3, 6});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{2, 1}, Coordinate{5, 7});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_matrix_strided)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{1, 3});
auto rsl = make_shared<op::ReplaceSlice>(
param0, param1, Coordinate{2, 1}, Coordinate{5, 7}, Strides{3, 2});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_matrix_strided_uneven)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{1, 2});
auto rsl = make_shared<op::ReplaceSlice>(
param0, param1, Coordinate{2, 1}, Coordinate{5, 7}, Strides{3, 4});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_vector_edge)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0}, Coordinate{6});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6}));
}
TEST(type_prop, replace_slice_deduce_matrix_edge)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{6, 8});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_matrix_zero_cols)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 0});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{6, 0});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_matrix_zero_zero)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{0, 0});
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{0, 0});
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_EQ(rsl->get_shape(), (Shape{6, 8}));
}
TEST(type_prop, replace_slice_deduce_vector_invalid_strides)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{4});
try
{
auto sl = make_shared<op::ReplaceSlice>(
param0, param1, Coordinate{0}, Coordinate{7}, Strides{1, 2});
// Should have thrown, so fail if it didn't
FAIL() << "Invalid slice strides not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks of lower bounds (Coordinate{0}), upper bounds "
"(Coordinate{7}) and strides (Strides{1, 2}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_arg_rank_mismatch)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{3, 6, 5});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{2, 1}, Coordinate{5, 7});
// Should have thrown, so fail if it didn't
FAIL() << "Argument rank mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument ranks do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_arg_element_type_mismatch)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::i32, Shape{3, 6});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{2, 1}, Coordinate{5, 7});
// Should have thrown, so fail if it didn't
FAIL() << "Argument element type mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument element types do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_slice_shape_mismatch)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{3, 6});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{1, 1}, Coordinate{5, 7});
// Should have thrown, so fail if it didn't
FAIL() << "Slice shape mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string(
"Shape of replacement tensor ({3,6}) does not match the slice shape ({4,6})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_slice_shape_mismatch_strided)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{4, 6});
try
{
auto rsl = make_shared<op::ReplaceSlice>(
param0, param1, Coordinate{1, 1}, Coordinate{5, 7}, Strides{1, 2});
// Should have thrown, so fail if it didn't
FAIL() << "Slice shape mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string(
"Shape of replacement tensor ({4,6}) does not match the slice shape ({4,3})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_vector_edge_upper_oob)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{7});
try
{
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0}, Coordinate{7});
// Should have thrown, so fail if it didn't
FAIL() << "Upper bound out of range not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Upper bound for slice at axis 0 is out of range"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_edge_upper_oob)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 9});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{6, 9});
// Should have thrown, so fail if it didn't
FAIL() << "Upper bound out of range not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Upper bound for slice at axis 1 is out of range"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_vector_lower_above_upper)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{0});
try
{
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{3}, Coordinate{2});
// Should have thrown, so fail if it didn't
FAIL() << "Lower bound above upper not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Lower bound for slice is greater than upper bound at axis 0"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_lower_above_upper)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 0});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 5}, Coordinate{6, 4});
// Should have thrown, so fail if it didn't
FAIL() << "Lower bound above upper not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Lower bound for slice is greater than upper bound at axis 1"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_lower_missing)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 6});
try
{
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0}, Coordinate{5, 5});
// Should have thrown, so fail if it didn't
FAIL() << "Missing lower bound coordinate not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks of lower bounds (Coordinate{0}), upper bounds "
"(Coordinate{5, 5}) and strides (Strides{1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_upper_missing)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 6});
try
{
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{5});
// Should have thrown, so fail if it didn't
FAIL() << "Missing upper bound coordinate not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks of lower bounds (Coordinate{0, 0}), upper bounds "
"(Coordinate{5}) and strides (Strides{1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_lower_extra)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 6});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0, 0}, Coordinate{5, 5});
// Should have thrown, so fail if it didn't
FAIL() << "Extra lower bound coordinate not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Ranks of lower bounds (Coordinate{0, 0, "
"0}), upper bounds (Coordinate{5, 5}) and "
"strides (Strides{1, 1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_deduce_matrix_upper_extra)
{
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 6});
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, Coordinate{0, 0}, Coordinate{5, 5, 5});
// Should have thrown, so fail if it didn't
FAIL() << "Extra upper bound coordinate not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Ranks of lower bounds (Coordinate{0, 0}), "
"upper bounds (Coordinate{5, 5, 5}) and "
"strides (Strides{1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_ok)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(type_prop,
replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_rank_mismatch)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Mismatch of lower-bounds/upper-bounds/strides ranks not detected (argument "
"rank-dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks of lower bounds (Coordinate{1, 2, 3, 4}), upper bounds "
"(Coordinate{1, 3, 5}) and strides (Strides{1, 1, 1, 2}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop,
replace_slice_partial_input_rank_dynamic_replacement_rank_dynamic_attribs_bounds_crossing)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 8};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Crossing lower/upper bounds not detected (argument rank-dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Lower bound for slice is greater than upper bound at axis 3 (lower "
"bounds: Coordinate{1, 2, 3, 8}, upper bounds: Coordinate{1, 3, 5, 7})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_partial_input_rank_static_dynamic_replacement_rank_dynamic_ok)
{
PartialShape input_shape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(type_prop,
replace_slice_partial_input_rank_static_dynamic_some_dims_known_replacement_rank_dynamic_ok)
{
PartialShape input_shape{2, 4, 10, Dimension::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_TRUE(
rsl->get_output_partial_shape(0).same_scheme(PartialShape{2, 4, 10, Dimension::dynamic()}));
}
TEST(
type_prop,
replace_slice_partial_input_rank_static_dynamic_replacement_rank_dynamic_attribs_rank_mismatches_input)
{
PartialShape input_shape{Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Mismatch of attrib ranks with arg ranks not detected (argument rank-static "
"dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Argument ranks do not match the rank of the lower bounds "
"(Coordinate{1, 2, 3, 4}), upper bounds (Coordinate{1, 3, "
"5, 7}), and strides (Strides{1, 1, 1, 2})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(
type_prop,
replace_slice_partial_input_rank_static_dynamic_some_dims_known_replacement_rank_dynamic_upper_bounds_oob)
{
PartialShape input_shape{2, 2, 10, Dimension::dynamic()};
PartialShape replacement_shape{PartialShape::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Upper bounds out of bounds not detected (argument rank-static dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Upper bound for slice at axis 1 is out of "
"range (upper bounds: Coordinate{1, 3, 5, "
"7}, argument shape: {2,2,10,?})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_ok)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(type_prop,
replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_some_dims_known_ok)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{0, Dimension::dynamic(), Dimension::dynamic(), 2};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
auto rsl = make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
ASSERT_EQ(rsl->get_element_type(), element::f32);
ASSERT_TRUE(rsl->get_output_partial_shape(0).same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(
type_prop,
replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_some_dims_known_attribs_mismatch_replacement_shape)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{1, Dimension::dynamic(), Dimension::dynamic(), 2};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Mismatch of shape inferred from attributes with provided replacement shape not "
"detected (rank-dynamic/rank-static dynamic inputs)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Shape of replacement tensor ({1,?,?,2}) does not match "
"the slice shape ({0,1,2,2})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(
type_prop,
replace_slice_partial_input_rank_dynamic_replacement_rank_static_dynamic_attribs_rank_mismatches_replacement)
{
PartialShape input_shape{PartialShape::dynamic()};
PartialShape replacement_shape{Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Mismatch of attrib ranks with arg ranks not detected (arguments "
"rank-dynamic/rank-static "
"dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Argument ranks do not match the rank of the lower bounds "
"(Coordinate{1, 2, 3, 4}), upper bounds (Coordinate{1, 3, "
"5, 7}), and strides (Strides{1, 1, 1, 2})"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(
type_prop,
replace_slice_partial_input_rank_static_dynamic_replacement_rank_static_dynamic_argument_ranks_mismatch)
{
PartialShape input_shape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()};
PartialShape replacement_shape{Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic(),
Dimension::dynamic()};
Coordinate lower_bounds{1, 2, 3, 4};
Coordinate upper_bounds{1, 3, 5, 7};
Strides strides{1, 1, 1, 2};
auto param0 = make_shared<op::Parameter>(element::f32, input_shape);
auto param1 = make_shared<op::Parameter>(element::f32, replacement_shape);
try
{
auto rsl =
make_shared<op::ReplaceSlice>(param0, param1, lower_bounds, upper_bounds, strides);
// Should have thrown, so fail if it didn't
FAIL() << "Mismatching input/replacement ranks not detected (arguments both rank-static "
"dynamic)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument ranks do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}