Review tile class for shape inference aspects (#14673)
* Review tile for shape inference: - propagate labels and dimension - template implementation of shape inference - if repeats is not-positive output dim is always 0 * Refactor Tile shape inference * Review preserve partial values and labels * Add support to evaluate bounds from repeats input * Remove not used code
This commit is contained in:
parent
f02c663a1d
commit
88456e6c3c
@ -30,8 +30,11 @@ public:
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
|
||||
bool evaluate_lower(const HostTensorVector& outputs) const override;
|
||||
bool evaluate_upper(const HostTensorVector& outputs) const override;
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
bool has_evaluate() const override;
|
||||
bool evaluate_label(TensorLabelVector& output_labels) const override;
|
||||
|
||||
private:
|
||||
bool evaluate_tile(const HostTensorVector& outputs, const HostTensorVector& inputs) const;
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include <openvino/op/tile.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace op {
|
||||
namespace v0 {
|
||||
@ -14,33 +15,51 @@ void shape_infer(const Tile* op,
|
||||
const std::vector<T>& input_shapes,
|
||||
std::vector<T>& output_shapes,
|
||||
const std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>>& constant_data = {}) {
|
||||
NODE_VALIDATION_CHECK(op, input_shapes.size() == 2 && output_shapes.size() == 1);
|
||||
const auto& arg_shape = input_shapes[0];
|
||||
auto& repeats_shape = input_shapes[1];
|
||||
auto& output_shape = output_shapes[0];
|
||||
using DimType = typename std::iterator_traits<typename T::iterator>::value_type;
|
||||
std::vector<int64_t> axes_val;
|
||||
NODE_VALIDATION_CHECK(op, repeats_shape.rank().compatible(1), "PartialShape of repeats must be of rank 1");
|
||||
using TDim = typename std::iterator_traits<typename T::iterator>::value_type;
|
||||
|
||||
// Get repeats
|
||||
bool axes_are_known = get_data_as_int64<T>(1, op, axes_val, constant_data);
|
||||
const auto arg_rank = arg_shape.rank();
|
||||
if (arg_rank.is_static() && (axes_are_known || repeats_shape[0].is_static())) {
|
||||
// try to specify rank
|
||||
int64_t data_rank = arg_shape.size();
|
||||
int64_t repeats_rank = axes_are_known ? axes_val.size() : repeats_shape[0].get_length();
|
||||
auto output_rank = std::max(data_rank, repeats_rank);
|
||||
output_shape.resize(output_rank);
|
||||
// if have constant axes, compute new axes
|
||||
if (axes_are_known) {
|
||||
auto remain_arg = output_rank - data_rank;
|
||||
auto remain_axes = output_rank - repeats_rank;
|
||||
for (size_t i = 0; i < static_cast<size_t>(output_rank); i++) {
|
||||
auto data_tmp = i < static_cast<size_t>(remain_arg) ? DimType(1) : arg_shape[i - (remain_arg)];
|
||||
auto repeat_tmp = i < static_cast<size_t>(remain_axes) ? DimType(1) : axes_val[i - remain_axes];
|
||||
output_shape[i] = data_tmp * repeat_tmp;
|
||||
}
|
||||
NODE_VALIDATION_CHECK(op, input_shapes.size() == 2 && output_shapes.size() == 1);
|
||||
|
||||
const auto& repeats_shape = input_shapes[1];
|
||||
NODE_VALIDATION_CHECK(op, repeats_shape.rank().compatible(1), "Tile repeats must be of rank 1");
|
||||
|
||||
const auto& arg_shape = input_shapes[0];
|
||||
auto& output_shape = output_shapes[0];
|
||||
|
||||
// Get repeats and pre process values
|
||||
T repeats;
|
||||
bool has_repeats;
|
||||
if (auto rep_data = get_input_const_data_as<T, int64_t>(op, 1, constant_data)) {
|
||||
// set negatives repeats to 0
|
||||
repeats.resize(rep_data->size());
|
||||
std::transform(rep_data->begin(), rep_data->end(), repeats.begin(), [](int64_t r) -> TDim {
|
||||
return {static_cast<typename TDim::value_type>(std::max(static_cast<int64_t>(0), r))};
|
||||
});
|
||||
has_repeats = true;
|
||||
} else {
|
||||
has_repeats = get_data_as_shape(1, op, repeats);
|
||||
}
|
||||
|
||||
const auto& arg_rank = arg_shape.rank();
|
||||
if (arg_rank.is_static() && has_repeats) {
|
||||
const auto output_rank = std::max(arg_shape.size(), repeats.size());
|
||||
|
||||
std::vector<TDim> dims;
|
||||
dims.reserve(output_rank);
|
||||
|
||||
// add missing repeats
|
||||
repeats.insert(repeats.begin(), output_rank - repeats.size(), TDim{1});
|
||||
|
||||
// insert missing input dimensions
|
||||
auto rep_it = std::next(repeats.begin(), output_rank - arg_shape.size());
|
||||
dims.insert(dims.begin(), repeats.begin(), rep_it);
|
||||
|
||||
// calc repeated output dimensions
|
||||
std::transform(arg_shape.begin(), arg_shape.end(), rep_it, std::back_inserter(dims), std::multiplies<TDim>());
|
||||
|
||||
output_shape = T(std::move(dims));
|
||||
} else if (arg_rank.is_static() && repeats_shape[0].is_static()) {
|
||||
// unknown repeats but shape is 1-D static, any dim can be repeated (add missing dimension)
|
||||
output_shape.resize(std::max<size_t>(arg_rank.get_length(), repeats_shape[0].get_length()));
|
||||
} else {
|
||||
// can't deduce shape, set default value
|
||||
output_shape = PartialShape::dynamic();
|
||||
|
@ -3,7 +3,7 @@
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <openvino/core/validation_util.hpp>
|
||||
#include <ngraph/validation_util.hpp>
|
||||
#include <openvino/opsets/opset1.hpp>
|
||||
|
||||
template <class OpType, class T>
|
||||
@ -43,6 +43,76 @@ void eltwise_shape_infer(const OpType* op, const std::vector<T>& input_shapes, s
|
||||
output_shapes[0] = output_shape;
|
||||
}
|
||||
|
||||
namespace ov {
|
||||
namespace op {
|
||||
|
||||
/**
|
||||
* \brief Get the operator's input const as pointer to vector of specified type.
|
||||
*
|
||||
* The behaviour depends on shape type. The default output type is std::vector<TData> can be replace by other type
|
||||
* which if is possible to construct it from constant data vector.
|
||||
*
|
||||
* \tparam TShape Shape type which enabled this version (not ov::PartialShape)
|
||||
* \tparam TData Type use to cast input's data.
|
||||
* \tparam TRes Result type which has got default type as std::vector<TData>.
|
||||
*
|
||||
* \param op Pointer to operator.
|
||||
* \param idx Operator's input number.
|
||||
* \param constant_data Map with constant. Default empty.
|
||||
*
|
||||
* \return Pointer to constant data or nullptr if input has no constant data.
|
||||
*/
|
||||
template <class TShape,
|
||||
class TData,
|
||||
class TRes = std::vector<TData>,
|
||||
typename std::enable_if<!std::is_same<TShape, ov::PartialShape>::value>::type* = nullptr>
|
||||
std::unique_ptr<TRes> get_input_const_data_as(const ov::Node* op,
|
||||
size_t idx,
|
||||
const std::map<size_t, HostTensorPtr>& constant_data = {}) {
|
||||
if (constant_data.count(idx)) {
|
||||
return std::unique_ptr<TRes>(new TRes(ov::opset1::Constant(constant_data.at(idx)).cast_vector<TData>()));
|
||||
} else {
|
||||
const auto& constant = ov::as_type_ptr<ov::opset1::Constant>(op->get_input_node_shared_ptr(idx));
|
||||
NODE_VALIDATION_CHECK(op, constant != nullptr, "Static shape inference lacks constant data on port ", idx);
|
||||
return std::unique_ptr<TRes>(new TRes(constant->cast_vector<TData>()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* \brief Get the operator's input const as pointer to vector of specified type.
|
||||
*
|
||||
* The behaviour depends on shape type. The default output type is std::vector<TData> can be replace by other type
|
||||
* which if is possible to construct it from constant data vector.
|
||||
*
|
||||
* \tparam TShape Shape type which enabled this version (ov::PartialShape)
|
||||
* \tparam TData Type use to cast input's data.
|
||||
* \tparam TRes Result type which has got default type as std::vector<TData>.
|
||||
*
|
||||
* \param op Pointer to operator.
|
||||
* \param idx Operator's input number.
|
||||
* \param constant_data Map with constant. Default empty.
|
||||
*
|
||||
* \return Pointer to constant data or nullptr if input has no constant data.
|
||||
*/
|
||||
template <class TShape,
|
||||
class TData,
|
||||
class TRes = std::vector<TData>,
|
||||
typename std::enable_if<std::is_same<TShape, ov::PartialShape>::value>::type* = nullptr>
|
||||
std::unique_ptr<std::vector<TData>> get_input_const_data_as(const ov::Node* op,
|
||||
size_t idx,
|
||||
const std::map<size_t, HostTensorPtr>& constant_data = {}) {
|
||||
if (constant_data.count(idx)) {
|
||||
return std::unique_ptr<TRes>(new TRes(ov::opset1::Constant(constant_data.at(idx)).cast_vector<TData>()));
|
||||
} else if (const auto& constant = ov::get_constant_from_source(op->input_value(idx))) {
|
||||
return std::unique_ptr<TRes>(new TRes(constant->cast_vector<TData>()));
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace op
|
||||
} // namespace ov
|
||||
|
||||
template <class T>
|
||||
inline bool get_data_as_int64(
|
||||
size_t idx,
|
||||
|
@ -27,19 +27,18 @@ bool ngraph::op::v0::Tile::visit_attributes(AttributeVisitor& visitor) {
|
||||
|
||||
void op::v0::Tile::validate_and_infer_types() {
|
||||
OV_OP_SCOPE(v0_Tile_validate_and_infer_types);
|
||||
auto arg_et = get_input_element_type(0);
|
||||
|
||||
// Repeats should have integer data type. For now we only allow i64
|
||||
auto repeats_et = get_input_element_type(1);
|
||||
const auto& repeats_et = get_input_element_type(1);
|
||||
NODE_VALIDATION_CHECK(this,
|
||||
repeats_et.is_integral(),
|
||||
"Tile repeats must have any integer element type, but has ",
|
||||
repeats_et);
|
||||
|
||||
std::vector<ov::PartialShape> output_shapes = {ov::PartialShape{}};
|
||||
std::vector<ov::PartialShape> input_shapes = {get_input_partial_shape(0), get_input_partial_shape(1)};
|
||||
const auto input_shapes = get_node_input_partial_shapes(*this);
|
||||
auto output_shapes = std::vector<PartialShape>(1, ov::PartialShape{});
|
||||
shape_infer(this, input_shapes, output_shapes);
|
||||
set_output_type(0, arg_et, output_shapes[0]);
|
||||
set_output_type(0, get_input_element_type(0), output_shapes[0]);
|
||||
|
||||
set_input_is_relevant_to_shape(0);
|
||||
set_input_is_relevant_to_shape(1);
|
||||
@ -85,3 +84,22 @@ bool op::v0::Tile::has_evaluate() const {
|
||||
OV_OP_SCOPE(v0_Tile_has_evaluate);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool op::v0::Tile::evaluate_lower(const HostTensorVector& output_values) const {
|
||||
OV_OP_SCOPE(v0_Tile_evaluate_lower);
|
||||
|
||||
return get_input_tensor(1).has_and_set_bound() && default_lower_bound_evaluator(this, output_values);
|
||||
}
|
||||
|
||||
bool op::v0::Tile::evaluate_upper(const HostTensorVector& output_values) const {
|
||||
OV_OP_SCOPE(v0_Tile_evaluate_upper);
|
||||
|
||||
return get_input_tensor(1).has_and_set_bound() && default_upper_bound_evaluator(this, output_values);
|
||||
}
|
||||
|
||||
bool op::v0::Tile::evaluate_label(TensorLabelVector& output_labels) const {
|
||||
OV_OP_SCOPE(v0_Tile_evaluate_label);
|
||||
OPENVINO_ASSERT(output_labels.size() == 1);
|
||||
|
||||
return get_input_tensor(1).has_and_set_bound() && default_label_evaluator(this, output_labels);
|
||||
}
|
||||
|
@ -2,49 +2,231 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "common_test_utils/test_assertions.hpp"
|
||||
#include "dimension_tracker.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "util/type_prop.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace testing;
|
||||
|
||||
TEST(type_prop, tile) {
|
||||
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8, 10});
|
||||
auto param1 = op::Constant::create(element::i64, Shape{3}, {3, 4, 1});
|
||||
auto top = make_shared<op::v0::Tile>(param0, param1);
|
||||
ASSERT_EQ(top->get_element_type(), element::f32);
|
||||
ASSERT_EQ(top->get_shape(), (Shape{18, 32, 10}));
|
||||
class TypePropTileTest : public TypePropOpTest<op::v0::Tile> {
|
||||
protected:
|
||||
PartialShape shape_in;
|
||||
};
|
||||
|
||||
TEST_F(TypePropTileTest, exception_if_repeats_are_float) {
|
||||
const auto data = make_shared<op::Parameter>(element::f64, Shape{2, 3, 4});
|
||||
const auto repeats = op::Constant::create(element::f32, Shape{3}, {3, 2, 1});
|
||||
|
||||
OV_EXPECT_THROW(auto op = make_op(data, repeats),
|
||||
NodeValidationFailure,
|
||||
HasSubstr("Tile repeats must have any integer element type, but has"));
|
||||
}
|
||||
|
||||
TEST(type_prop, tile_small_data_rank) {
|
||||
auto param0 = make_shared<op::Parameter>(element::f32, Shape{8, 10});
|
||||
auto param1 = op::Constant::create(element::i64, Shape{3}, {3, 4, 1});
|
||||
auto top = make_shared<op::v0::Tile>(param0, param1);
|
||||
ASSERT_EQ(top->get_element_type(), element::f32);
|
||||
ASSERT_EQ(top->get_shape(), (Shape{3, 32, 10}));
|
||||
TEST_F(TypePropTileTest, exception_if_repeats_shape_is_not_rank_1) {
|
||||
const auto data = make_shared<op::Parameter>(element::f64, Shape{2, 3, 4});
|
||||
const auto repeats = op::Constant::create(element::i16, Shape{3, 1}, {3, 2, 1});
|
||||
|
||||
OV_EXPECT_THROW(auto op = make_op(data, repeats),
|
||||
NodeValidationFailure,
|
||||
HasSubstr("Tile repeats must be of rank 1"));
|
||||
}
|
||||
|
||||
TEST(type_prop, tile_few_repeats) {
|
||||
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8, 10});
|
||||
auto param1 = op::Constant::create(element::i64, Shape{2}, {4, 1});
|
||||
auto top = make_shared<op::v0::Tile>(param0, param1);
|
||||
ASSERT_EQ(top->get_element_type(), element::f32);
|
||||
ASSERT_EQ(top->get_shape(), (Shape{6, 32, 10}));
|
||||
TEST_F(TypePropTileTest, repeats_has_negative_values) {
|
||||
const auto data = make_shared<op::Parameter>(element::i32, PartialShape{-1, 3, 4, {-1, 5}, {4, -1}});
|
||||
const auto repeats = op::Constant::create(element::i8, Shape{5}, {-1, -2, 1, -1, -1});
|
||||
auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), element::i32);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape({0, 0, 4, 0, 0}));
|
||||
}
|
||||
|
||||
TEST(type_prop, tile_few_repeats_dyn_input) {
|
||||
auto param0 = make_shared<op::Parameter>(element::f32, PartialShape{6, Dimension(8, 10), 10});
|
||||
auto param1 = op::Constant::create(element::i64, Shape{2}, {4, 1});
|
||||
auto top = make_shared<op::v0::Tile>(param0, param1);
|
||||
ASSERT_EQ(top->get_element_type(), element::f32);
|
||||
ASSERT_EQ(top->get_output_partial_shape(0), (PartialShape{6, Dimension(32, 40), 10}));
|
||||
TEST_F(TypePropTileTest, repeats_are_undefined_and_its_rank_lt_data_rank) {
|
||||
const auto data = make_shared<op::Parameter>(element::f32, Shape{6, 8, 10});
|
||||
const auto repeats = make_shared<op::Parameter>(element::i32, Shape{2});
|
||||
|
||||
const auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), element::f32);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic(3));
|
||||
}
|
||||
|
||||
TEST(type_prop, tile_out_rank_from_repeats) {
|
||||
auto param0 = make_shared<op::Parameter>(element::f32, Shape{6, 8, 10});
|
||||
auto param1 = make_shared<op::Parameter>(element::i32, Shape{5});
|
||||
auto top = make_shared<op::v0::Tile>(param0, param1);
|
||||
ASSERT_EQ(top->get_element_type(), element::f32);
|
||||
ASSERT_EQ(top->get_output_partial_shape(0).size(), 5);
|
||||
TEST_F(TypePropTileTest, repeats_are_undefined_and_its_rank_gt_data_rank) {
|
||||
const auto data = make_shared<op::Parameter>(element::f32, Shape{6, 8, 10});
|
||||
const auto repeats = make_shared<op::Parameter>(element::i32, Shape{5});
|
||||
|
||||
const auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), element::f32);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic(5));
|
||||
}
|
||||
|
||||
TEST_F(TypePropTileTest, data_dynamic_rank_repeats_are_undefined) {
|
||||
const auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
|
||||
const auto repeats = make_shared<op::Parameter>(element::i32, Shape{5});
|
||||
|
||||
const auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), element::f32);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic());
|
||||
}
|
||||
|
||||
TEST_F(TypePropTileTest, data_and_repeats_are_dynamic_rank) {
|
||||
const auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
|
||||
const auto repeats = make_shared<op::Parameter>(element::i32, PartialShape::dynamic());
|
||||
|
||||
const auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), element::f32);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic());
|
||||
}
|
||||
|
||||
TEST_F(TypePropTileTest, propagate_label_and_dynamic_value_no_repeats) {
|
||||
auto p_shape = PartialShape{{2, 5}, 3};
|
||||
set_shape_labels(p_shape, 1);
|
||||
|
||||
constexpr auto et = element::i64;
|
||||
const auto labeled_param = std::make_shared<op::Parameter>(et, p_shape);
|
||||
const auto labeled_shape_of = std::make_shared<op::ShapeOf>(labeled_param);
|
||||
|
||||
const auto repeats = op::Constant::create(element::i32, Shape{1}, {1});
|
||||
const auto op = make_op(labeled_shape_of, repeats);
|
||||
const auto bc =
|
||||
std::make_shared<op::v3::Broadcast>(std::make_shared<op::Parameter>(ov::element::i32, PartialShape{1}),
|
||||
op,
|
||||
"BIDIRECTIONAL");
|
||||
|
||||
const auto& out_shape = bc->get_output_partial_shape(0);
|
||||
EXPECT_EQ(out_shape, p_shape);
|
||||
EXPECT_THAT(get_shape_labels(out_shape), ElementsAre(1, 2));
|
||||
}
|
||||
|
||||
TEST_F(TypePropTileTest, propagate_label_and_dynamic_value) {
|
||||
auto p_shape = PartialShape{{2, 5}, 3};
|
||||
set_shape_labels(p_shape, 1);
|
||||
|
||||
constexpr auto et = element::i64;
|
||||
const auto labeled_param = std::make_shared<op::Parameter>(et, p_shape);
|
||||
const auto labeled_shape_of = std::make_shared<op::ShapeOf>(labeled_param);
|
||||
|
||||
const auto repeats = op::Constant::create(element::i32, Shape{1}, {2});
|
||||
const auto op = make_op(labeled_shape_of, repeats);
|
||||
const auto bc =
|
||||
std::make_shared<op::v3::Broadcast>(std::make_shared<op::Parameter>(ov::element::i32, PartialShape{1}),
|
||||
op,
|
||||
"BIDIRECTIONAL");
|
||||
|
||||
const auto& out_shape = bc->get_output_partial_shape(0);
|
||||
EXPECT_EQ(out_shape, PartialShape({{2, 5}, 3, {2, 5}, 3}));
|
||||
EXPECT_THAT(get_shape_labels(out_shape), ElementsAre(1, 2, 1, 2));
|
||||
}
|
||||
|
||||
TEST_F(TypePropTileTest, preserve_partial_values_and_labels) {
|
||||
auto shape = PartialShape{1, {1, 2}, {-1, 3}, {2, -1}, -1};
|
||||
set_shape_labels(shape, 20);
|
||||
const auto p_repeats = std::make_shared<op::Parameter>(element::i64, shape);
|
||||
const auto shape_of_repeats = std::make_shared<op::ShapeOf>(p_repeats);
|
||||
|
||||
auto data = op::Constant::create(element::i64, Shape{2, 2, 2, 1, 1}, {1, 2, 3, 4, 5, 6, 7, 8});
|
||||
|
||||
const auto op = make_op(data, shape_of_repeats);
|
||||
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), PartialShape({2, {2, 4}, {-1, 6}, -1, -1}));
|
||||
EXPECT_THAT(get_shape_labels(op->get_output_partial_shape(0)),
|
||||
ElementsAre(ov::no_label, ov::no_label, ov::no_label, 23, 24));
|
||||
}
|
||||
|
||||
using TileTestParam = std::tuple<PartialShape, std::vector<int64_t>, PartialShape>;
|
||||
|
||||
class TileTest : public TypePropTileTest, public WithParamInterface<TileTestParam> {
|
||||
protected:
|
||||
void SetUp() override {
|
||||
std::tie(shape_in, repeats_val, exp_shape) = GetParam();
|
||||
}
|
||||
|
||||
std::vector<size_t> get_exp_labels() const {
|
||||
auto labels = get_shape_labels(shape_in);
|
||||
|
||||
if (!labels.empty()) {
|
||||
auto repeats = repeats_val;
|
||||
int64_t size_diff = labels.size() - repeats.size();
|
||||
|
||||
if (size_diff >= 0) {
|
||||
repeats.insert(repeats.begin(), size_diff, 1);
|
||||
} else {
|
||||
labels.insert(labels.begin(), -size_diff, ov::no_label);
|
||||
}
|
||||
|
||||
std::transform(labels.begin(),
|
||||
labels.end(),
|
||||
repeats.begin(),
|
||||
labels.begin(),
|
||||
[](const size_t label, const int64_t repeat) {
|
||||
return (label != ov::no_label && repeat == 1) ? label : ov::no_label;
|
||||
});
|
||||
}
|
||||
return labels;
|
||||
}
|
||||
|
||||
PartialShape exp_shape;
|
||||
std::vector<int64_t> repeats_val;
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
type_prop_static_shape,
|
||||
TileTest,
|
||||
Values(
|
||||
std::make_tuple(PartialShape{1, 2}, std::vector<int64_t>{0, 0}, PartialShape{0, 0}),
|
||||
std::make_tuple(PartialShape{3, 7, 1, 2, 4}, std::vector<int64_t>{2, 1, 7, 1, 2}, PartialShape{6, 7, 7, 2, 8}),
|
||||
std::make_tuple(PartialShape{1, 4, 2}, std::vector<int64_t>(3, 1), PartialShape{1, 4, 2}),
|
||||
std::make_tuple(PartialShape{1, 2, 4}, std::vector<int64_t>{2, 1}, PartialShape{1, 4, 4}),
|
||||
std::make_tuple(PartialShape{3, 6, 7, 1, 2, 4}, std::vector<int64_t>{2, 2}, PartialShape{3, 6, 7, 1, 4, 8}),
|
||||
std::make_tuple(PartialShape{1, 2, 4}, std::vector<int64_t>{2, 1, 1, 1}, PartialShape{2, 1, 2, 4}),
|
||||
std::make_tuple(PartialShape{1, 2, 4}, std::vector<int64_t>{2, 1, 2, 3, 4}, PartialShape{2, 1, 2, 6, 16})),
|
||||
PrintToStringParamName());
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
type_prop_dynamic_shape,
|
||||
TileTest,
|
||||
Values(
|
||||
std::make_tuple(PartialShape{{1, 5}, {2, -1}}, std::vector<int64_t>{0, 0}, PartialShape{0, 0}),
|
||||
std::make_tuple(PartialShape{{3, -1}, {-1, 7}, -1, {2, 3}, {-1, 2}},
|
||||
std::vector<int64_t>{2, 1, 7, 1, 2},
|
||||
PartialShape{{6, -1}, {-1, 7}, -1, {2, 3}, {-1, 4}}),
|
||||
std::make_tuple(PartialShape{1, 4, 2}, std::vector<int64_t>(3, 1), PartialShape{1, 4, 2}),
|
||||
std::make_tuple(PartialShape{3, 6, {7, 9}, 1, {2, 8}, 4},
|
||||
std::vector<int64_t>{2, 2},
|
||||
PartialShape{3, 6, {7, 9}, 1, {4, 16}, 8}),
|
||||
std::make_tuple(PartialShape{-1, -1, -1}, std::vector<int64_t>{2, 1, 2, 3, 4}, PartialShape{2, 1, -1, -1, -1})),
|
||||
PrintToStringParamName());
|
||||
|
||||
TEST_P(TileTest, default_ctor) {
|
||||
constexpr auto dt = element::f16;
|
||||
const auto data = make_shared<op::Parameter>(dt, shape_in);
|
||||
const auto repeats = op::Constant::create(element::i64, Shape{repeats_val.size()}, repeats_val);
|
||||
|
||||
const auto op = make_op();
|
||||
op->set_arguments(OutputVector{data, repeats});
|
||||
op->validate_and_infer_types();
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), dt);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), exp_shape);
|
||||
}
|
||||
|
||||
TEST_P(TileTest, propagate_shapes_and_labels) {
|
||||
ASSERT_TRUE(shape_in.rank().is_static()) << "Cannot test labels propagation for dynamic rank.";
|
||||
|
||||
constexpr auto dt = element::f32;
|
||||
const auto data = make_shared<op::Parameter>(dt, shape_in);
|
||||
const auto repeats = op::Constant::create(element::i64, Shape{repeats_val.size()}, repeats_val);
|
||||
|
||||
const auto op = make_op(data, repeats);
|
||||
|
||||
EXPECT_EQ(op->get_element_type(), dt);
|
||||
EXPECT_EQ(op->get_output_size(), 1);
|
||||
EXPECT_EQ(op->get_output_partial_shape(0), exp_shape);
|
||||
EXPECT_EQ(get_shape_labels(op->get_output_partial_shape(0)), get_exp_labels());
|
||||
}
|
||||
|
@ -4,10 +4,7 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <openvino/op/ops.hpp>
|
||||
#include <openvino/op/parameter.hpp>
|
||||
#include <utils/shape_inference/shape_inference.hpp>
|
||||
#include <utils/shape_inference/static_shape.hpp>
|
||||
#include "utils.hpp"
|
||||
|
||||
using namespace ov;
|
||||
using namespace ov::intel_cpu;
|
||||
@ -49,3 +46,19 @@ TEST(StaticShapeInferenceTest, TileSmallDataRankTest) {
|
||||
shape_inference(tile.get(), static_input_shapes, static_output_shapes);
|
||||
ASSERT_EQ(static_output_shapes[0], StaticShape({3, 32, 10}));
|
||||
}
|
||||
|
||||
TEST(StaticShapeInferenceTest, TileSmallDataRankTestRepeatsInConstMap) {
|
||||
auto param0 = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1});
|
||||
auto param1 = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1});
|
||||
auto tile = std::make_shared<op::v0::Tile>(param0, param1);
|
||||
|
||||
int32_t repeats[] = {3, 4, 1};
|
||||
const std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>>& constant_data = {
|
||||
{1, std::make_shared<HostTensor>(element::i32, Shape{3}, repeats)}};
|
||||
|
||||
// Test Static Shape
|
||||
ShapeVector input_shapes = {StaticShape{8, 10}, StaticShape{3}}, output_shapes = {StaticShape{}};
|
||||
shape_inference(tile.get(), input_shapes, output_shapes, constant_data);
|
||||
|
||||
ASSERT_EQ(output_shapes.front(), StaticShape({3, 32, 10}));
|
||||
}
|
Loading…
Reference in New Issue
Block a user