Moved core tests from root folder to new API (#19381)
This commit is contained in:
parent
a45e5e03c5
commit
39b75fd213
@ -4,6 +4,8 @@
|
||||
|
||||
#include "common_test_utils/all_close_f.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <bitset>
|
||||
#include <cmath>
|
||||
@ -11,9 +13,6 @@
|
||||
#include <sstream>
|
||||
|
||||
#include "common_test_utils/float_util.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/env_util.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -2,15 +2,12 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "openvino/core/deprecated.hpp"
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
#include "openvino/core/any.hpp"
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "openvino/core/runtime_attribute.hpp"
|
||||
|
||||
using namespace ov;
|
||||
|
@ -2,19 +2,19 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/type/bfloat16.hpp"
|
||||
#include "openvino/core/type/bfloat16.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <climits>
|
||||
#include <random>
|
||||
|
||||
#include "common_test_utils/float_util.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/runtime/aligned_buffer.hpp"
|
||||
#include "openvino/util/log.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
template <typename T>
|
||||
std::string to_hex(T value) {
|
||||
@ -140,7 +140,7 @@ TEST(bfloat16, numeric_limits) {
|
||||
}
|
||||
|
||||
TEST(benchmark, bfloat16) {
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
size_t buffer_size = 128 * 3 * 224 * 224;
|
||||
ngraph::runtime::AlignedBuffer data(buffer_size * sizeof(float), 4096);
|
||||
float* f = static_cast<float*>(data.get_ptr());
|
||||
@ -155,7 +155,7 @@ TEST(benchmark, bfloat16) {
|
||||
{
|
||||
ngraph::runtime::AlignedBuffer bf_data(buffer_size * sizeof(bfloat16), 4096);
|
||||
bfloat16* p = static_cast<bfloat16*>(bf_data.get_ptr());
|
||||
stopwatch timer;
|
||||
ngraph::stopwatch timer;
|
||||
timer.start();
|
||||
for (size_t i = 0; i < buffer_size; ++i) {
|
||||
p[i] = bfloat16(f[i]);
|
||||
@ -167,7 +167,7 @@ TEST(benchmark, bfloat16) {
|
||||
{
|
||||
ngraph::runtime::AlignedBuffer bf_data(buffer_size * sizeof(bfloat16), 4096);
|
||||
bfloat16* p = static_cast<bfloat16*>(bf_data.get_ptr());
|
||||
stopwatch timer;
|
||||
ngraph::stopwatch timer;
|
||||
timer.start();
|
||||
for (size_t i = 0; i < buffer_size; ++i) {
|
||||
p[i] = bfloat16::truncate(f[i]);
|
||||
@ -179,7 +179,7 @@ TEST(benchmark, bfloat16) {
|
||||
{
|
||||
ngraph::runtime::AlignedBuffer bf_data(buffer_size * sizeof(bfloat16), 4096);
|
||||
bfloat16* p = static_cast<bfloat16*>(bf_data.get_ptr());
|
||||
stopwatch timer;
|
||||
ngraph::stopwatch timer;
|
||||
timer.start();
|
||||
for (size_t i = 0; i < buffer_size; ++i) {
|
||||
p[i] = bfloat16::round_to_nearest(f[i]);
|
||||
@ -191,7 +191,7 @@ TEST(benchmark, bfloat16) {
|
||||
{
|
||||
ngraph::runtime::AlignedBuffer bf_data(buffer_size * sizeof(bfloat16), 4096);
|
||||
bfloat16* p = static_cast<bfloat16*>(bf_data.get_ptr());
|
||||
stopwatch timer;
|
||||
ngraph::stopwatch timer;
|
||||
timer.start();
|
||||
for (size_t i = 0; i < buffer_size; ++i) {
|
||||
p[i] = bfloat16::round_to_nearest_even(f[i]);
|
||||
@ -199,7 +199,7 @@ TEST(benchmark, bfloat16) {
|
||||
timer.stop();
|
||||
OPENVINO_INFO << "float to bfloat16 round to nearest even " << timer.get_milliseconds() << "ms";
|
||||
}
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
TEST(bfloat16, assigns) {
|
||||
|
@ -2,36 +2,47 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "common_test_utils/graph_comparator.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "common_test_utils/type_prop.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset5.hpp"
|
||||
#include "ngraph/opsets/opset7.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "openvino/core/except.hpp"
|
||||
#include "openvino/op/abs.hpp"
|
||||
#include "openvino/op/acos.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/assign.hpp"
|
||||
#include "openvino/op/concat.hpp"
|
||||
#include "openvino/op/matmul.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/read_value.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/reshape.hpp"
|
||||
#include "openvino/op/result.hpp"
|
||||
#include "openvino/op/split.hpp"
|
||||
#include "openvino/op/squeeze.hpp"
|
||||
#include "openvino/op/util/variable.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(build_graph, build_simple) {
|
||||
// Function with 4 parameters
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::MatMul>(arg2, arg0);
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::v0::MatMul>(arg2, arg0);
|
||||
ASSERT_EQ(dot->input_value(0).get_node_shared_ptr(), arg2);
|
||||
ASSERT_EQ(dot->input_value(1).get_node_shared_ptr(), arg0);
|
||||
|
||||
auto cluster_0 = make_shared<Function>(dot, ParameterVector{arg0, arg1, arg2, arg3});
|
||||
auto cluster_0 = make_shared<Model>(dot, ParameterVector{arg0, arg1, arg2, arg3});
|
||||
|
||||
ASSERT_EQ(cluster_0->get_output_op(0)->input_value(0).get_node_shared_ptr(), dot);
|
||||
}
|
||||
@ -40,16 +51,16 @@ TEST(build_graph, literal) {
|
||||
// float scalar from a float
|
||||
// auto float0 = FloatConstant::make(3.0);
|
||||
vector<float> float_t{3.0};
|
||||
auto float0 = make_shared<op::Constant>(element::f32, Shape{1}, float_t);
|
||||
auto float0 = make_shared<op::v0::Constant>(element::f32, Shape{1}, float_t);
|
||||
ASSERT_EQ(float0->get_vector<float>(), std::vector<float>{3.0});
|
||||
ASSERT_EQ(float0->get_element_type(), element::f32);
|
||||
ASSERT_EQ(float0->get_shape(), Shape{1});
|
||||
auto d = make_shared<op::MatMul>(float0, float0);
|
||||
auto d = make_shared<op::v0::MatMul>(float0, float0);
|
||||
ASSERT_EQ(d->input_values().at(0).get_node_shared_ptr(), float0);
|
||||
ASSERT_EQ(d->input_values().at(1).get_node_shared_ptr(), float0);
|
||||
|
||||
vector<int32_t> int32{3};
|
||||
auto int32_0 = make_shared<op::Constant>(element::i32, Shape{}, int32);
|
||||
auto int32_0 = make_shared<op::v0::Constant>(element::i32, Shape{}, int32);
|
||||
ASSERT_EQ(int32_0->get_vector<int32_t>(), std::vector<int>{3});
|
||||
ASSERT_EQ(int32_0->get_element_type(), element::i32);
|
||||
ASSERT_EQ(int32_0->get_shape(), Shape{});
|
||||
@ -60,7 +71,7 @@ TEST(build_graph, tensor) {
|
||||
// auto float0 = FloatConstant::make(3.0);
|
||||
Shape shape{2, 3};
|
||||
vector<float> float_t(shape_size(shape), 0);
|
||||
auto float0 = make_shared<op::Constant>(element::f32, shape, float_t);
|
||||
auto float0 = make_shared<op::v0::Constant>(element::f32, shape, float_t);
|
||||
ASSERT_EQ(float0->get_element_type(), element::f32);
|
||||
ASSERT_EQ(float0->get_shape(), shape);
|
||||
auto d = make_shared<op::v1::Add>(float0, float0);
|
||||
@ -69,7 +80,7 @@ TEST(build_graph, tensor) {
|
||||
|
||||
Shape ishape{3, 5};
|
||||
vector<int32_t> idata(shape_size(ishape), 0);
|
||||
auto int32_0 = make_shared<op::Constant>(element::i32, ishape, idata);
|
||||
auto int32_0 = make_shared<op::v0::Constant>(element::i32, ishape, idata);
|
||||
ASSERT_EQ(int32_0->get_element_type(), element::i32);
|
||||
ASSERT_EQ(int32_0->get_shape(), ishape);
|
||||
}
|
||||
@ -77,21 +88,21 @@ TEST(build_graph, tensor) {
|
||||
// Check functions with undeclared parameters
|
||||
TEST(build_graph, function_undeclared_parameters) {
|
||||
// Function with 4 parameters
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::MatMul>(arg2, arg0);
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::v0::MatMul>(arg2, arg0);
|
||||
ASSERT_EQ(dot->input_values()[0].get_node_shared_ptr(), arg2);
|
||||
ASSERT_EQ(dot->input_values()[1].get_node_shared_ptr(), arg0);
|
||||
try {
|
||||
auto f = make_shared<Function>(dot, ParameterVector{arg0, arg1, arg3});
|
||||
auto f = make_shared<Model>(dot, ParameterVector{arg0, arg1, arg3});
|
||||
f->get_ops();
|
||||
// Should have thrown, so fail if it didn't
|
||||
FAIL() << "Undeclared parameter not detected.";
|
||||
} catch (const ngraph_error& error) {
|
||||
} catch (const ov::Exception& error) {
|
||||
EXPECT_HAS_SUBSTRING(error.what(), std::string("Model references undeclared parameter"));
|
||||
} catch (...) {
|
||||
FAIL() << "Model construction failed for unexpected reason";
|
||||
@ -102,13 +113,13 @@ TEST(build_graph, function_undeclared_parameters) {
|
||||
TEST(build_graph, no_arg_construction) {
|
||||
// The ops
|
||||
// Parameters aren't converted yet
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, Shape{7});
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, Shape{7});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{7});
|
||||
auto arg3 = make_shared<op::Parameter>(element::f32, Shape{7});
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7});
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7});
|
||||
auto arg3 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{7});
|
||||
auto add0 = make_shared<op::v1::Add>();
|
||||
auto abs0 = make_shared<op::Abs>();
|
||||
auto acos0 = make_shared<op::Acos>();
|
||||
auto abs0 = make_shared<op::v0::Abs>();
|
||||
auto acos0 = make_shared<op::v0::Acos>();
|
||||
auto add1 = make_shared<op::v1::Add>();
|
||||
add0->set_argument(1, arg0);
|
||||
add0->set_argument(0, arg1);
|
||||
@ -117,39 +128,41 @@ TEST(build_graph, no_arg_construction) {
|
||||
add1->set_argument(0, acos0);
|
||||
add1->set_argument(1, abs0);
|
||||
NodeVector ops{arg0, arg1, add0, abs0, acos0, add1};
|
||||
validate_nodes_and_infer_types(ops);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ngraph::validate_nodes_and_infer_types(ops);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
ASSERT_EQ(add1->get_output_shape(0), Shape{7});
|
||||
}
|
||||
|
||||
TEST(build_graph, multi_output_split_dynamic) {
|
||||
const auto data = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
const auto data = make_shared<ov::op::v0::Parameter>(element::f32, PartialShape::dynamic());
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
const auto split = make_shared<op::v1::Split>(data, axis, 2);
|
||||
auto abs = make_shared<op::Abs>(split->output(1));
|
||||
auto abs = make_shared<op::v0::Abs>(split->output(1));
|
||||
EXPECT_TRUE(abs->get_output_partial_shape(0).same_scheme(PartialShape::dynamic()));
|
||||
|
||||
auto new_parameter = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto new_parameter = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
split->input(0).replace_source_output(new_parameter->output(0));
|
||||
|
||||
auto f = make_shared<Function>(abs, ParameterVector{new_parameter});
|
||||
auto f = make_shared<Model>(abs, ParameterVector{new_parameter});
|
||||
|
||||
f->validate_nodes_and_infer_types();
|
||||
EXPECT_EQ(abs->get_shape(), (Shape{2, 2}));
|
||||
}
|
||||
|
||||
TEST(build_graph, function_revalidate_and_infer) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4, 6, 8});
|
||||
auto pattern = op::Constant::create(element::i64, Shape{6}, {1, 3, 16, 2, 2, 2});
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4, 6, 8});
|
||||
auto pattern = op::v0::Constant::create(element::i64, Shape{6}, {1, 3, 16, 2, 2, 2});
|
||||
|
||||
auto r = make_shared<op::v1::Reshape>(arg, pattern, true);
|
||||
auto relu = make_shared<op::Relu>(r);
|
||||
auto f = make_shared<Function>(relu, ParameterVector{arg});
|
||||
auto relu = make_shared<op::v0::Relu>(r);
|
||||
auto f = make_shared<Model>(relu, ParameterVector{arg});
|
||||
|
||||
EXPECT_EQ(r->get_output_element_type(0), element::f32);
|
||||
EXPECT_EQ(r->get_output_shape(0), (Shape{1, 3, 16, 2, 2, 2}));
|
||||
EXPECT_EQ(f->get_output_shape(0), (Shape{1, 3, 16, 2, 2, 2}));
|
||||
|
||||
auto new_pattern = op::Constant::create(element::i64, Shape{2}, {32, 12});
|
||||
auto new_pattern = op::v0::Constant::create(element::i64, Shape{2}, {32, 12});
|
||||
r->input(1).replace_source_output(new_pattern->output(0));
|
||||
|
||||
f->validate_nodes_and_infer_types();
|
||||
@ -167,17 +180,17 @@ TEST(build_graph, default_output_checks) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_sink) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<ov::op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto assign = make_shared<opset5::Assign>(crop, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(crop, "v0");
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
|
||||
SinkVector sinks = f->get_sinks();
|
||||
EXPECT_EQ(sinks.size(), 1);
|
||||
@ -187,17 +200,17 @@ TEST(build_graph, build_graph_with_sink) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_sink_output_ctor) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto assign = make_shared<opset5::Assign>(crop, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(crop, "v0");
|
||||
|
||||
auto f = make_shared<Function>(OutputVector({pattern->output(0)}), SinkVector({assign}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(OutputVector({pattern->output(0)}), SinkVector({assign}), ParameterVector{arg});
|
||||
|
||||
SinkVector sinks = f->get_sinks();
|
||||
EXPECT_EQ(sinks.size(), 1);
|
||||
@ -207,17 +220,17 @@ TEST(build_graph, build_graph_with_sink_output_ctor) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_add_sink) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto assign = make_shared<opset5::Assign>(crop, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(crop, "v0");
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res}), ParameterVector{arg});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 5);
|
||||
@ -233,17 +246,17 @@ TEST(build_graph, build_graph_with_add_sink) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_wrong_remove_sink) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<ov::op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto assign = make_shared<opset5::Assign>(crop, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(crop, "v0");
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
|
||||
SinkVector sinks = f->get_sinks();
|
||||
EXPECT_EQ(sinks.size(), 1);
|
||||
@ -256,17 +269,17 @@ TEST(build_graph, build_graph_with_wrong_remove_sink) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_remove_sink) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto assign = make_shared<opset5::Assign>(crop, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(crop, "v0");
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res}), SinkVector({assign}), ParameterVector{arg});
|
||||
|
||||
pattern->input(1).replace_source_output(arg);
|
||||
|
||||
@ -281,17 +294,17 @@ TEST(build_graph, build_graph_with_remove_sink) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_add_result) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::Result>(crop);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res}), ParameterVector{arg});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 5);
|
||||
@ -307,17 +320,17 @@ TEST(build_graph, build_graph_with_add_result) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_remove_result) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::Result>(crop);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res, res2}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res, res2}), ParameterVector{arg});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 8);
|
||||
@ -332,18 +345,18 @@ TEST(build_graph, build_graph_with_remove_result) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_add_parameter) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, read};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::Result>(crop);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res, res2}), ParameterVector{arg});
|
||||
auto f = make_shared<Model>(ResultVector({res, res2}), ParameterVector{arg});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 8);
|
||||
@ -361,18 +374,18 @@ TEST(build_graph, build_graph_with_add_parameter) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_remove_parameter) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
std::vector<shared_ptr<Node>> args = {arg, arg2};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::Result>(crop);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res, res2}), ParameterVector{arg, arg2});
|
||||
auto f = make_shared<Model>(ResultVector({res, res2}), ParameterVector{arg, arg2});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 7);
|
||||
@ -388,20 +401,20 @@ TEST(build_graph, build_graph_with_remove_parameter) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_remove_parameter_indexing) {
|
||||
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<opset5::ReadValue>(init_const, "v0");
|
||||
auto assign = make_shared<opset5::Assign>(read, "v0");
|
||||
auto arg = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read = make_shared<op::v3::ReadValue>(init_const, "v0");
|
||||
auto assign = make_shared<op::v3::Assign>(read, "v0");
|
||||
assign->add_control_dependency(read);
|
||||
std::vector<shared_ptr<Node>> args = {arg2, arg};
|
||||
auto pattern = make_shared<op::Concat>(args, 1);
|
||||
auto res = make_shared<op::Result>(pattern);
|
||||
const auto axis = op::Constant::create(element::i64, Shape{}, {1});
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::Result>(crop);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(ResultVector({res, res2}), ParameterVector{arg2, arg});
|
||||
auto f = make_shared<Model>(ResultVector({res, res2}), ParameterVector{arg2, arg});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 7);
|
||||
@ -421,144 +434,144 @@ TEST(build_graph, build_graph_with_remove_parameter_indexing) {
|
||||
|
||||
TEST(build_graph, build_graph_parameters_autodetection) {
|
||||
// Function with 4 parameters
|
||||
using namespace opset7;
|
||||
auto arg0 = make_shared<Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<MatMul>(arg2, arg0);
|
||||
auto arg0 = make_shared<op::v0::Parameter>(element::f32, Shape{7, 3});
|
||||
auto arg1 = make_shared<op::v0::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<op::v0::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = ngraph::builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::v0::MatMul>(arg2, arg0);
|
||||
|
||||
auto f = make_shared<Function>(OutputVector{dot});
|
||||
auto f = make_shared<Model>(OutputVector{dot});
|
||||
EXPECT_EQ(f->get_parameters().size(), 2);
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_parameters_variables_autodetection) {
|
||||
using namespace opset7;
|
||||
auto arg = make_shared<Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto arg = make_shared<op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
|
||||
auto variable = make_shared<Variable>(VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto read = make_shared<ReadValue>(init_const, variable);
|
||||
auto assign = make_shared<Assign>(read, variable);
|
||||
auto variable =
|
||||
make_shared<op::util::Variable>(op::util::VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto read = make_shared<op::v6::ReadValue>(init_const, variable);
|
||||
auto assign = make_shared<op::v6::Assign>(read, variable);
|
||||
assign->add_control_dependency(read);
|
||||
|
||||
std::vector<shared_ptr<Node>> args = {arg2, arg};
|
||||
auto pattern = make_shared<Concat>(args, 1);
|
||||
auto res = make_shared<Result>(pattern);
|
||||
const auto axis = Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<Result>(crop);
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
auto f = make_shared<Function>(OutputVector{res, res2}, SinkVector{assign});
|
||||
auto f = make_shared<Model>(OutputVector{res, res2}, SinkVector{assign});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 10);
|
||||
ParameterVector params = f->get_parameters();
|
||||
EXPECT_EQ(params.size(), 2);
|
||||
VariableVector variables = f->get_variables();
|
||||
op::util::VariableVector variables = f->get_variables();
|
||||
EXPECT_EQ(variables.size(), 1);
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_variables_ctors) {
|
||||
using namespace opset7;
|
||||
auto arg = make_shared<Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto arg = make_shared<op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
|
||||
auto variable = make_shared<Variable>(VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto read = make_shared<ReadValue>(init_const, variable);
|
||||
auto assign = make_shared<Assign>(read, variable);
|
||||
auto variable =
|
||||
make_shared<op::util::Variable>(op::util::VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto read = make_shared<op::v6::ReadValue>(init_const, variable);
|
||||
auto assign = make_shared<op::v6::Assign>(read, variable);
|
||||
assign->add_control_dependency(read);
|
||||
|
||||
std::vector<shared_ptr<Node>> args = {arg2, arg};
|
||||
auto pattern = make_shared<Concat>(args, 1);
|
||||
auto res = make_shared<Result>(pattern);
|
||||
const auto axis = Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<Result>(crop);
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
{
|
||||
auto f = make_shared<Function>(OutputVector{res, res2},
|
||||
auto f = make_shared<Model>(OutputVector{res, res2},
|
||||
SinkVector{assign},
|
||||
ParameterVector{arg, arg2},
|
||||
VariableVector{variable});
|
||||
op::util::VariableVector{variable});
|
||||
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 10);
|
||||
ParameterVector params = f->get_parameters();
|
||||
EXPECT_EQ(params.size(), 2);
|
||||
VariableVector variables = f->get_variables();
|
||||
op::util::VariableVector variables = f->get_variables();
|
||||
EXPECT_EQ(variables.size(), 1);
|
||||
}
|
||||
|
||||
// autodetect variables
|
||||
{
|
||||
auto f = make_shared<Function>(OutputVector{res, res2}, SinkVector{assign}, ParameterVector{arg, arg2});
|
||||
auto f = make_shared<Model>(OutputVector{res, res2}, SinkVector{assign}, ParameterVector{arg, arg2});
|
||||
NodeVector nodes = f->get_ops();
|
||||
EXPECT_EQ(nodes.size(), 10);
|
||||
ParameterVector params = f->get_parameters();
|
||||
EXPECT_EQ(params.size(), 2);
|
||||
VariableVector variables = f->get_variables();
|
||||
op::util::VariableVector variables = f->get_variables();
|
||||
EXPECT_EQ(variables.size(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_unregistred_variables) {
|
||||
using namespace opset7;
|
||||
auto arg = make_shared<Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto arg = make_shared<op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto arg2 = make_shared<op::v0::Parameter>(element::f32, Shape{2, 2});
|
||||
auto init_const = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
|
||||
auto variable = make_shared<Variable>(VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto variable_2 = make_shared<Variable>(VariableInfo{PartialShape::dynamic(), element::dynamic, "v1"});
|
||||
auto read = make_shared<ReadValue>(init_const, variable);
|
||||
auto read_2 = make_shared<ReadValue>(init_const, variable_2);
|
||||
auto assign = make_shared<Assign>(read, variable);
|
||||
auto assign_2 = make_shared<Assign>(read_2, variable_2);
|
||||
auto variable =
|
||||
make_shared<op::util::Variable>(op::util::VariableInfo{PartialShape::dynamic(), element::dynamic, "v0"});
|
||||
auto variable_2 =
|
||||
make_shared<op::util::Variable>(op::util::VariableInfo{PartialShape::dynamic(), element::dynamic, "v1"});
|
||||
auto read = make_shared<op::v6::ReadValue>(init_const, variable);
|
||||
auto read_2 = make_shared<op::v6::ReadValue>(init_const, variable_2);
|
||||
auto assign = make_shared<op::v6::Assign>(read, variable);
|
||||
auto assign_2 = make_shared<op::v6::Assign>(read_2, variable_2);
|
||||
assign->add_control_dependency(read);
|
||||
|
||||
std::vector<shared_ptr<Node>> args = {arg2, arg};
|
||||
auto pattern = make_shared<Concat>(args, 1);
|
||||
auto res = make_shared<Result>(pattern);
|
||||
const auto axis = Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<Result>(crop);
|
||||
auto pattern = make_shared<op::v0::Concat>(args, 1);
|
||||
auto res = make_shared<op::v0::Result>(pattern);
|
||||
const auto axis = op::v0::Constant::create(element::i64, Shape{}, {1});
|
||||
auto crop = make_shared<op::v1::Split>(pattern, axis, 3);
|
||||
auto res2 = make_shared<op::v0::Result>(crop);
|
||||
|
||||
EXPECT_ANY_THROW(const auto unused = make_shared<Function>(OutputVector{res, res2},
|
||||
EXPECT_ANY_THROW(const auto unused = make_shared<Model>(OutputVector{res, res2},
|
||||
SinkVector{assign, assign_2},
|
||||
ParameterVector{arg, arg2},
|
||||
VariableVector{variable}));
|
||||
op::util::VariableVector{variable}));
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_sinks_compare) {
|
||||
shared_ptr<Function> f0, f1;
|
||||
shared_ptr<Model> f0, f1;
|
||||
{
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto init_const1 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset5::ReadValue>(init_const0, "v0");
|
||||
auto read1 = make_shared<opset5::ReadValue>(init_const1, "v1");
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto init_const1 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v3::ReadValue>(init_const0, "v0");
|
||||
auto read1 = make_shared<op::v3::ReadValue>(init_const1, "v1");
|
||||
std::vector<shared_ptr<Node>> args = {read0, read1};
|
||||
auto add = make_shared<opset5::Add>(read0, read1);
|
||||
auto assign0 = make_shared<opset5::Assign>(add, "v0");
|
||||
auto assign1 = make_shared<opset5::Assign>(add, "v1");
|
||||
auto add = make_shared<op::v1::Add>(read0, read1);
|
||||
auto assign0 = make_shared<op::v3::Assign>(add, "v0");
|
||||
auto assign1 = make_shared<op::v3::Assign>(add, "v1");
|
||||
|
||||
f0 = make_shared<Function>(ResultVector({}), SinkVector({assign0, assign1}), ParameterVector{});
|
||||
f0 = make_shared<Model>(ResultVector({}), SinkVector({assign0, assign1}), ParameterVector{});
|
||||
}
|
||||
|
||||
{
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto init_const1 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset5::ReadValue>(init_const0, "v0");
|
||||
auto read1 = make_shared<opset5::ReadValue>(init_const1, "v1");
|
||||
auto add = make_shared<opset5::Add>(read0, read1);
|
||||
auto squeeze = make_shared<opset5::Squeeze>(add);
|
||||
auto assign0 = make_shared<opset5::Assign>(squeeze, "v0");
|
||||
auto assign1 = make_shared<opset5::Assign>(add, "v1");
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto init_const1 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v3::ReadValue>(init_const0, "v0");
|
||||
auto read1 = make_shared<op::v3::ReadValue>(init_const1, "v1");
|
||||
auto add = make_shared<op::v1::Add>(read0, read1);
|
||||
auto squeeze = make_shared<op::v0::Squeeze>(add);
|
||||
auto assign0 = make_shared<op::v3::Assign>(squeeze, "v0");
|
||||
auto assign1 = make_shared<op::v3::Assign>(add, "v1");
|
||||
|
||||
f1 = make_shared<Function>(ResultVector({}), SinkVector({assign0, assign1}), ParameterVector{});
|
||||
f1 = make_shared<Model>(ResultVector({}), SinkVector({assign0, assign1}), ParameterVector{});
|
||||
}
|
||||
const auto fc = FunctionsComparator::with_default()
|
||||
.enable(FunctionsComparator::ATTRIBUTES)
|
||||
@ -568,41 +581,41 @@ TEST(build_graph, build_graph_with_sinks_compare) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_sinks_compare_reads) {
|
||||
shared_ptr<Function> f0, f1;
|
||||
shared_ptr<Model> f0, f1;
|
||||
{
|
||||
auto variable0 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto variable1 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v1"});
|
||||
auto variable0 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto variable1 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v1"});
|
||||
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset7::ReadValue>(init_const0, variable0);
|
||||
auto assign0 = make_shared<opset7::Assign>(read0, variable0);
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v6::ReadValue>(init_const0, variable0);
|
||||
auto assign0 = make_shared<op::v6::Assign>(read0, variable0);
|
||||
|
||||
auto init_const1 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read1 = make_shared<opset7::ReadValue>(init_const1, variable1);
|
||||
auto assign1 = make_shared<opset7::Assign>(read1, variable1);
|
||||
auto init_const1 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read1 = make_shared<op::v6::ReadValue>(init_const1, variable1);
|
||||
auto assign1 = make_shared<op::v6::Assign>(read1, variable1);
|
||||
|
||||
f0 = make_shared<Function>(ResultVector({}),
|
||||
f0 = make_shared<Model>(ResultVector({}),
|
||||
SinkVector({assign0, assign1}),
|
||||
ParameterVector{},
|
||||
VariableVector{variable0, variable1});
|
||||
op::util::VariableVector{variable0, variable1});
|
||||
}
|
||||
|
||||
{
|
||||
auto variable0 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto variable1 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v1"});
|
||||
auto variable0 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto variable1 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v1"});
|
||||
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset7::ReadValue>(init_const0, variable1);
|
||||
auto assign0 = make_shared<opset7::Assign>(read0, variable0);
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v6::ReadValue>(init_const0, variable1);
|
||||
auto assign0 = make_shared<op::v6::Assign>(read0, variable0);
|
||||
|
||||
auto init_const1 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read1 = make_shared<opset7::ReadValue>(init_const1, variable0);
|
||||
auto assign1 = make_shared<opset7::Assign>(read1, variable1);
|
||||
auto init_const1 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read1 = make_shared<op::v6::ReadValue>(init_const1, variable0);
|
||||
auto assign1 = make_shared<op::v6::Assign>(read1, variable1);
|
||||
|
||||
f1 = make_shared<Function>(ResultVector({}),
|
||||
f1 = make_shared<Model>(ResultVector({}),
|
||||
SinkVector({assign0, assign1}),
|
||||
ParameterVector{},
|
||||
VariableVector{variable0, variable1});
|
||||
op::util::VariableVector{variable0, variable1});
|
||||
}
|
||||
const auto fc = FunctionsComparator::with_default()
|
||||
.enable(FunctionsComparator::ATTRIBUTES)
|
||||
@ -612,35 +625,35 @@ TEST(build_graph, build_graph_with_sinks_compare_reads) {
|
||||
}
|
||||
|
||||
TEST(build_graph, build_graph_with_sinks_compare_results) {
|
||||
shared_ptr<Function> f0, f1;
|
||||
shared_ptr<Model> f0, f1;
|
||||
{
|
||||
auto variable0 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset7::ReadValue>(init_const0, variable0);
|
||||
auto op = make_shared<opset7::Relu>(read0);
|
||||
auto assign0 = make_shared<opset7::Assign>(read0, variable0);
|
||||
auto result0 = make_shared<opset7::Result>(assign0);
|
||||
auto result1 = make_shared<opset7::Result>(op);
|
||||
auto variable0 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v6::ReadValue>(init_const0, variable0);
|
||||
auto op = make_shared<op::v0::Relu>(read0);
|
||||
auto assign0 = make_shared<op::v6::Assign>(read0, variable0);
|
||||
auto result0 = make_shared<op::v0::Result>(assign0);
|
||||
auto result1 = make_shared<op::v0::Result>(op);
|
||||
|
||||
f0 = make_shared<Function>(ResultVector({result0, result1}),
|
||||
f0 = make_shared<Model>(ResultVector({result0, result1}),
|
||||
SinkVector({assign0}),
|
||||
ParameterVector{},
|
||||
VariableVector{variable0});
|
||||
op::util::VariableVector{variable0});
|
||||
}
|
||||
|
||||
{
|
||||
auto variable0 = make_shared<Variable>(VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto init_const0 = op::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<opset7::ReadValue>(init_const0, variable0);
|
||||
auto op = make_shared<opset7::Relu>(read0);
|
||||
auto assign0 = make_shared<opset7::Assign>(read0, variable0);
|
||||
auto result0 = make_shared<opset7::Result>(assign0);
|
||||
auto result1 = make_shared<opset7::Result>(op);
|
||||
auto variable0 = make_shared<op::util::Variable>(op::util::VariableInfo{Shape{2, 2}, element::f32, "v0"});
|
||||
auto init_const0 = op::v0::Constant::create(element::f32, Shape{2, 2}, {0, 0, 0, 0});
|
||||
auto read0 = make_shared<op::v6::ReadValue>(init_const0, variable0);
|
||||
auto op = make_shared<op::v0::Relu>(read0);
|
||||
auto assign0 = make_shared<op::v6::Assign>(read0, variable0);
|
||||
auto result0 = make_shared<op::v0::Result>(assign0);
|
||||
auto result1 = make_shared<op::v0::Result>(op);
|
||||
|
||||
f1 = make_shared<Function>(ResultVector({result0, result1}),
|
||||
f1 = make_shared<Model>(ResultVector({result0, result1}),
|
||||
SinkVector({assign0}),
|
||||
ParameterVector{},
|
||||
VariableVector{variable0});
|
||||
op::util::VariableVector{variable0});
|
||||
}
|
||||
const auto fc = FunctionsComparator::with_default()
|
||||
.enable(FunctionsComparator::ATTRIBUTES)
|
||||
|
@ -1,387 +0,0 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
shared_ptr<op::Parameter> getParamFromShape(const Shape& shape) {
|
||||
return make_shared<op::Parameter>(element::f32, shape);
|
||||
}
|
||||
|
||||
inline const Shape& getShapeFromParam(const shared_ptr<Node>& node) {
|
||||
return node->get_shape();
|
||||
}
|
||||
|
||||
// input shapes are equal so AutoBroadcast does nothing
|
||||
TEST(autobroadcast, no_broadcast_equal) {
|
||||
Shape s2345{2, 3, 4, 5};
|
||||
auto lhs = getParamFromShape(s2345);
|
||||
auto rhs = getParamFromShape(s2345);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_EQ(ab_lhs, lhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s2345);
|
||||
|
||||
EXPECT_EQ(ab_rhs, rhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s2345);
|
||||
}
|
||||
|
||||
// input shapes are incompatable
|
||||
TEST(autobroadcast, no_broadcast_incompatable) {
|
||||
Shape s2345{2, 3, 4, 5};
|
||||
Shape s6789{6, 7, 8, 9};
|
||||
auto lhs = getParamFromShape(s2345);
|
||||
auto rhs = getParamFromShape(s6789);
|
||||
|
||||
EXPECT_THROW(builder::numpy_broadcast({lhs, rhs}), builder::numpy_autobroadcast_incompatible_shapes);
|
||||
}
|
||||
|
||||
// basic broadcast test
|
||||
// 1D to 2D
|
||||
// lhs broadcast to 2,3
|
||||
TEST(autobroadcast, normal_broadcast_2d) {
|
||||
Shape s3{3};
|
||||
Shape s23{2, 3};
|
||||
auto lhs = getParamFromShape(s3);
|
||||
auto rhs = getParamFromShape(s23);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_NE(ab_lhs, lhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s23);
|
||||
|
||||
EXPECT_EQ(ab_rhs, rhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s23);
|
||||
}
|
||||
|
||||
// basic broadcast test
|
||||
// 2D to 3D
|
||||
// lhs broadcast to 2,3,4
|
||||
TEST(autobroadcast, normal_broadcast_3d) {
|
||||
Shape s34{3, 4};
|
||||
Shape s234{2, 3, 4};
|
||||
auto lhs = getParamFromShape(s34);
|
||||
auto rhs = getParamFromShape(s234);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_NE(ab_lhs, lhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s234);
|
||||
|
||||
EXPECT_EQ(ab_rhs, rhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s234);
|
||||
}
|
||||
|
||||
// basic broadcast test
|
||||
// 3D to 4D
|
||||
// lhs broadcast to 2,3,4,5
|
||||
TEST(autobroadcast, normal_broadcast_4d) {
|
||||
Shape s345{3, 4, 5};
|
||||
Shape s2345{2, 3, 4, 5};
|
||||
auto lhs = getParamFromShape(s345);
|
||||
auto rhs = getParamFromShape(s2345);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_NE(ab_lhs, lhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s2345);
|
||||
|
||||
EXPECT_EQ(ab_rhs, rhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s2345);
|
||||
}
|
||||
|
||||
// basic reshape and broadcast test
|
||||
// rhs reshape to 2,3,4 then
|
||||
// rhs broadcast to 2,3,4,5
|
||||
TEST(autobroadcast, reshape_1x_broadcast) {
|
||||
Shape s2345{2, 3, 4, 5};
|
||||
Shape s2341{2, 3, 4, 1};
|
||||
auto lhs = getParamFromShape(s2345);
|
||||
auto rhs = getParamFromShape(s2341);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_EQ(ab_lhs, lhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s2345);
|
||||
|
||||
EXPECT_NE(ab_rhs, rhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s2345);
|
||||
}
|
||||
|
||||
// same as above, but additionally
|
||||
// lhs reshape to 2,4,5 then
|
||||
// lhs broadcast to 2,3,4,5
|
||||
TEST(autobroadcast, reshape_2x_broadcast) {
|
||||
Shape s2145{2, 1, 4, 5};
|
||||
Shape s2341{2, 3, 4, 1};
|
||||
auto lhs = getParamFromShape(s2145);
|
||||
auto rhs = getParamFromShape(s2341);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
Shape s2345{2, 3, 4, 5};
|
||||
|
||||
EXPECT_NE(ab_lhs, lhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s2345);
|
||||
|
||||
EXPECT_NE(ab_rhs, rhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s2345);
|
||||
}
|
||||
|
||||
// matching singular dimension on axis 2
|
||||
// should not require reshape of either lhs or rhs
|
||||
// i.e. this should be the same as normal broadcast casse
|
||||
// rhs broadcast to 2,3,1,5
|
||||
TEST(autobroadcast, broadcast_with_dim1) {
|
||||
Shape s2315{2, 3, 1, 5};
|
||||
Shape s315{3, 1, 5};
|
||||
auto lhs = getParamFromShape(s2315);
|
||||
auto rhs = getParamFromShape(s315);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_EQ(ab_lhs, lhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s2315);
|
||||
|
||||
EXPECT_NE(ab_rhs, rhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s2315);
|
||||
}
|
||||
|
||||
// reshape only test
|
||||
// rhs reshape to 1,3,4,5 with no broadcast
|
||||
TEST(autobroadcast, broadcast_with_leading_dim1) {
|
||||
Shape s1345{1, 3, 4, 5};
|
||||
Shape s345{3, 4, 5};
|
||||
auto lhs = getParamFromShape(s1345);
|
||||
auto rhs = getParamFromShape(s345);
|
||||
|
||||
auto shaped = builder::numpy_broadcast({lhs, rhs});
|
||||
const shared_ptr<Node>& ab_lhs = shaped.first;
|
||||
const shared_ptr<Node>& ab_rhs = shaped.second;
|
||||
|
||||
EXPECT_EQ(ab_lhs, lhs); // no change
|
||||
EXPECT_EQ(getShapeFromParam(ab_lhs), s1345);
|
||||
|
||||
EXPECT_NE(ab_rhs, rhs);
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s1345);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, numpy_broadcast_for_matmul_op_2d) {
|
||||
const Shape lhs{3, 1, 4, 6};
|
||||
const Shape rhs{6, 5};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
|
||||
|
||||
EXPECT_EQ(result.at(0).get_shape(), (Shape{3, 1, 4, 6}));
|
||||
EXPECT_EQ(result.at(1).get_shape(), (Shape{3, 1, 6, 5}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, numpy_broadcast_for_matmul_op_3d) {
|
||||
const Shape lhs{3, 1, 4, 6};
|
||||
const Shape rhs{2, 6, 5};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
|
||||
|
||||
EXPECT_EQ(result.at(0).get_shape(), (Shape{3, 2, 4, 6}));
|
||||
EXPECT_EQ(result.at(1).get_shape(), (Shape{3, 2, 6, 5}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, numpy_broadcast_for_matmul_op_nop) {
|
||||
const Shape lhs{4, 6};
|
||||
const Shape rhs{6, 5};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
|
||||
|
||||
EXPECT_EQ(result.at(0).get_shape(), (Shape{4, 6}));
|
||||
EXPECT_EQ(result.at(1).get_shape(), (Shape{6, 5}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_scalar) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{};
|
||||
size_t start_match_axis{3};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_1elem_tensor) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{1, 1, 1};
|
||||
size_t start_match_axis{1};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_1d) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{5};
|
||||
size_t start_match_axis{3};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_2d) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{4, 5};
|
||||
size_t start_match_axis{2};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_2d_inside) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{3, 4};
|
||||
size_t start_match_axis{1};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_1d_left) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
const Shape rhs{2};
|
||||
size_t start_match_axis{0};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, opset1_legacy_broadcast_identical) {
|
||||
const Shape lhs{2, 3, 4, 5};
|
||||
size_t start_match_axis{0};
|
||||
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
const auto rhs_node = make_shared<op::Parameter>(element::f32, lhs);
|
||||
|
||||
const Output<Node> result =
|
||||
builder::opset1::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
|
||||
|
||||
EXPECT_EQ(result.get_shape(), lhs);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_from_bcast_axes) {
|
||||
const Shape output_shape{2, 3, 4, 5};
|
||||
const Shape input_shape{3, 5};
|
||||
const AxisSet broadcast_axes{0, 2};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), 2);
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{1, 3}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_from_bcast_axes_scalar) {
|
||||
const Shape output_shape{2, 3, 4, 5};
|
||||
const Shape input_shape{};
|
||||
const AxisSet broadcast_axes{0, 1, 2, 3};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), 0);
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_from_bcast_axes_identical) {
|
||||
const Shape output_shape{2, 3, 4, 5};
|
||||
const Shape input_shape(output_shape);
|
||||
const AxisSet broadcast_axes{};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), output_shape.size());
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{0, 1, 2, 3}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_start_match_axis) {
|
||||
const PartialShape output_shape{2, 3, 4, 5};
|
||||
const PartialShape input_shape{3, 4};
|
||||
const std::size_t start_match_axis{1};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), 2);
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{1, 2}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_start_match_axis_scalar) {
|
||||
const PartialShape output_shape{2, 3, 4, 5};
|
||||
const PartialShape input_shape{};
|
||||
const std::size_t start_match_axis{4};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), 0);
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{}));
|
||||
}
|
||||
|
||||
TEST(autobroadcast, axes_mapping_start_match_axis_identical) {
|
||||
const PartialShape output_shape{2, 3, 4, 5};
|
||||
const PartialShape input_shape{2, 3, 4, 5};
|
||||
const std::size_t start_match_axis{0};
|
||||
|
||||
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
|
||||
EXPECT_TRUE(op::is_constant(axes_mapping.get_node()));
|
||||
Shape axes_mapping_shape = ov::as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
|
||||
EXPECT_EQ(axes_mapping_shape.size(), output_shape.rank().get_length());
|
||||
EXPECT_EQ(axes_mapping_shape, (Shape{0, 1, 2, 3}));
|
||||
}
|
@ -2,8 +2,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/check.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common_test_utils/test_assertions.hpp"
|
||||
@ -50,44 +48,6 @@ TEST(check, check_with_explanation) {
|
||||
EXPECT_TRUE(check_failure_thrown);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_true_string_info) {
|
||||
NGRAPH_CHECK(true, "this should not throw");
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_true_non_string_info) {
|
||||
NGRAPH_CHECK(true, "this should not throw", 123);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_true_no_info) {
|
||||
NGRAPH_CHECK(true);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_false_string_info) {
|
||||
EXPECT_THROW({ NGRAPH_CHECK(false, "this should throw"); }, ngraph::CheckFailure);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_false_non_string_info) {
|
||||
EXPECT_THROW({ NGRAPH_CHECK(false, "this should throw", 123); }, ngraph::CheckFailure);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_false_no_info) {
|
||||
EXPECT_THROW({ NGRAPH_CHECK(false); }, ngraph::CheckFailure);
|
||||
}
|
||||
|
||||
TEST(check, ngraph_check_with_explanation) {
|
||||
bool check_failure_thrown = false;
|
||||
|
||||
try {
|
||||
NGRAPH_CHECK(false, "xyzzyxyzzy", 123);
|
||||
} catch (const ngraph::CheckFailure& e) {
|
||||
check_failure_thrown = true;
|
||||
EXPECT_PRED_FORMAT2(testing::IsSubstring, "Check 'false' failed at", e.what());
|
||||
EXPECT_PRED_FORMAT2(testing::IsSubstring, "xyzzyxyzzy123", e.what());
|
||||
}
|
||||
|
||||
EXPECT_TRUE(check_failure_thrown);
|
||||
}
|
||||
|
||||
TEST(check, ov_throw_exception_check_relative_path_to_source) {
|
||||
using namespace testing;
|
||||
const auto path = ov::util::path_join({"src", "core", "tests", "check.cpp"});
|
||||
|
@ -2,15 +2,16 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "openvino/op/constant.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "common_test_utils/type_prop.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/runtime/host_tensor.hpp"
|
||||
#include "openvino/core/except.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
//
|
||||
@ -20,7 +21,7 @@ using namespace std;
|
||||
TEST(constant, boolean_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::boolean, shape, input);
|
||||
ov::op::v0::Constant c(element::boolean, shape, input);
|
||||
auto v = c.get_vector<char>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -43,7 +44,7 @@ TEST(constant, boolean_string) {
|
||||
|
||||
TEST(constant, boolean_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::boolean, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::boolean, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<char>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -60,7 +61,7 @@ TEST(constant, boolean_string_broadcast) {
|
||||
|
||||
TEST(constant, boolean_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::boolean, shape, vector<char>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::boolean, shape, vector<char>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<char>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -77,7 +78,7 @@ TEST(constant, boolean_vector) {
|
||||
|
||||
TEST(constant, boolean_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::boolean, shape, vector<char>{1});
|
||||
ov::op::v0::Constant c(element::boolean, shape, vector<char>{1});
|
||||
auto v = c.get_vector<char>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -99,7 +100,7 @@ TEST(constant, boolean_vector_broadcast) {
|
||||
TEST(constant, float_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::f32, shape, input);
|
||||
ov::op::v0::Constant c(element::f32, shape, input);
|
||||
auto v = c.get_vector<float>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -122,7 +123,7 @@ TEST(constant, float_string) {
|
||||
|
||||
TEST(constant, float_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f32, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::f32, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<float>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -139,7 +140,7 @@ TEST(constant, float_string_broadcast) {
|
||||
|
||||
TEST(constant, float_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f32, shape, vector<float>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::f32, shape, vector<float>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<float>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -156,7 +157,7 @@ TEST(constant, float_vector) {
|
||||
|
||||
TEST(constant, float_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f32, shape, vector<float>{1});
|
||||
ov::op::v0::Constant c(element::f32, shape, vector<float>{1});
|
||||
auto v = c.get_vector<float>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -178,7 +179,7 @@ TEST(constant, float_vector_broadcast) {
|
||||
TEST(constant, double_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::f64, shape, input);
|
||||
ov::op::v0::Constant c(element::f64, shape, input);
|
||||
auto v = c.get_vector<double>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -201,7 +202,7 @@ TEST(constant, double_string) {
|
||||
|
||||
TEST(constant, double_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f64, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::f64, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<double>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -218,7 +219,7 @@ TEST(constant, double_string_broadcast) {
|
||||
|
||||
TEST(constant, double_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f64, shape, vector<double>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::f64, shape, vector<double>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<double>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -235,7 +236,7 @@ TEST(constant, double_vector) {
|
||||
|
||||
TEST(constant, double_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f64, shape, vector<double>{1});
|
||||
ov::op::v0::Constant c(element::f64, shape, vector<double>{1});
|
||||
auto v = c.get_vector<double>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -257,7 +258,7 @@ TEST(constant, double_vector_broadcast) {
|
||||
TEST(constant, int4_string) {
|
||||
Shape shape{3};
|
||||
std::vector<std::string> input{"1", "0", "-1"};
|
||||
op::Constant c(element::i4, shape, input);
|
||||
ov::op::v0::Constant c(element::i4, shape, input);
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -277,7 +278,7 @@ TEST(constant, int4_string) {
|
||||
|
||||
TEST(constant, int4_string_broadcast_negative_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<string>{"-1"});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<string>{"-1"});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], -1);
|
||||
@ -293,7 +294,7 @@ TEST(constant, int4_string_broadcast_negative_number) {
|
||||
|
||||
TEST(constant, int4_string_broadcast_positive_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<string>{"1"});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -309,7 +310,7 @@ TEST(constant, int4_string_broadcast_positive_number) {
|
||||
|
||||
TEST(constant, int4_vector_negative_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<int8_t>{-1, -2, -1});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<int8_t>{-1, -2, -1});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(-1));
|
||||
@ -323,7 +324,7 @@ TEST(constant, int4_vector_negative_number) {
|
||||
|
||||
TEST(constant, int4_vector_positive_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<int8_t>{1, 2, 5});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<int8_t>{1, 2, 5});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(1));
|
||||
@ -337,7 +338,7 @@ TEST(constant, int4_vector_positive_number) {
|
||||
|
||||
TEST(constant, int4_vector_broadcast_negative_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<int8_t>{-1});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<int8_t>{-1});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(-1));
|
||||
@ -351,7 +352,7 @@ TEST(constant, int4_vector_broadcast_negative_number) {
|
||||
|
||||
TEST(constant, int4_vector_broadcast_positive_number) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::i4, shape, vector<int8_t>{3});
|
||||
ov::op::v0::Constant c(element::i4, shape, vector<int8_t>{3});
|
||||
auto v = c.cast_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(3));
|
||||
@ -365,17 +366,19 @@ TEST(constant, int4_vector_broadcast_positive_number) {
|
||||
|
||||
TEST(constant, int4_input_value_validation) {
|
||||
Shape shape{2};
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, 8), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, -9), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, 8), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, -9), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<int>{-9}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<int>{8}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<int>{-9}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<int>{8}), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<int>{-9, 1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<int>{8, 2}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<int>{-9, 1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<int>{8, 2}), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<std::string>{"-9", "1"}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::i4, shape, std::vector<std::string>{"8", "1"}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<std::string>{"-9", "1"}),
|
||||
::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::i4, shape, std::vector<std::string>{"8", "1"}),
|
||||
::ngraph::CheckFailure);
|
||||
}
|
||||
|
||||
//
|
||||
@ -385,7 +388,7 @@ TEST(constant, int4_input_value_validation) {
|
||||
TEST(constant, int8_string) {
|
||||
Shape shape{4};
|
||||
std::vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::i8, shape, input);
|
||||
ov::op::v0::Constant c(element::i8, shape, input);
|
||||
auto v = c.get_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -410,7 +413,7 @@ TEST(constant, int8_string) {
|
||||
|
||||
TEST(constant, int8_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i8, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::i8, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -429,7 +432,7 @@ TEST(constant, int8_string_broadcast) {
|
||||
|
||||
TEST(constant, int8_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i8, shape, vector<int8_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::i8, shape, vector<int8_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -446,7 +449,7 @@ TEST(constant, int8_vector) {
|
||||
|
||||
TEST(constant, int8_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i8, shape, vector<int8_t>{1});
|
||||
ov::op::v0::Constant c(element::i8, shape, vector<int8_t>{1});
|
||||
auto v = c.get_vector<int8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -468,7 +471,7 @@ TEST(constant, int8_vector_broadcast) {
|
||||
TEST(constant, int16_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::i16, shape, input);
|
||||
ov::op::v0::Constant c(element::i16, shape, input);
|
||||
auto v = c.get_vector<int16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -491,7 +494,7 @@ TEST(constant, int16_string) {
|
||||
|
||||
TEST(constant, int16_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i16, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::i16, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<int16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -508,7 +511,7 @@ TEST(constant, int16_string_broadcast) {
|
||||
|
||||
TEST(constant, int16_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i16, shape, vector<int16_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::i16, shape, vector<int16_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<int16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -525,7 +528,7 @@ TEST(constant, int16_vector) {
|
||||
|
||||
TEST(constant, int16_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i16, shape, vector<int16_t>{1});
|
||||
ov::op::v0::Constant c(element::i16, shape, vector<int16_t>{1});
|
||||
auto v = c.get_vector<int16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -547,7 +550,7 @@ TEST(constant, int16_vector_broadcast) {
|
||||
TEST(constant, int32_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::i32, shape, input);
|
||||
ov::op::v0::Constant c(element::i32, shape, input);
|
||||
auto v = c.get_vector<int32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -570,7 +573,7 @@ TEST(constant, int32_string) {
|
||||
|
||||
TEST(constant, int32_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i32, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::i32, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<int32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -587,7 +590,7 @@ TEST(constant, int32_string_broadcast) {
|
||||
|
||||
TEST(constant, int32_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i32, shape, vector<int32_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::i32, shape, vector<int32_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<int32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -604,7 +607,7 @@ TEST(constant, int32_vector) {
|
||||
|
||||
TEST(constant, int32_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i32, shape, vector<int32_t>{1});
|
||||
ov::op::v0::Constant c(element::i32, shape, vector<int32_t>{1});
|
||||
auto v = c.get_vector<int32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -626,7 +629,7 @@ TEST(constant, int32_vector_broadcast) {
|
||||
TEST(constant, int64_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::i64, shape, input);
|
||||
ov::op::v0::Constant c(element::i64, shape, input);
|
||||
auto v = c.get_vector<int64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -649,7 +652,7 @@ TEST(constant, int64_string) {
|
||||
|
||||
TEST(constant, int64_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i64, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::i64, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<int64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -666,7 +669,7 @@ TEST(constant, int64_string_broadcast) {
|
||||
|
||||
TEST(constant, int64_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i64, shape, vector<int64_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::i64, shape, vector<int64_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<int64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -683,7 +686,7 @@ TEST(constant, int64_vector) {
|
||||
|
||||
TEST(constant, int64_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::i64, shape, vector<int64_t>{1});
|
||||
ov::op::v0::Constant c(element::i64, shape, vector<int64_t>{1});
|
||||
auto v = c.get_vector<int64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -705,7 +708,7 @@ TEST(constant, int64_vector_broadcast) {
|
||||
TEST(constant, uint1_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u1, shape, input);
|
||||
ov::op::v0::Constant c(element::u1, shape, input);
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -725,7 +728,7 @@ TEST(constant, uint1_string) {
|
||||
|
||||
TEST(constant, uint1_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u1, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u1, shape, vector<string>{"1"});
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -740,7 +743,7 @@ TEST(constant, uint1_string_broadcast) {
|
||||
TEST(constant, uint1_vector_less_than_single_byte) {
|
||||
Shape shape{4};
|
||||
vector<uint8_t> input{1, 0, 1, 0};
|
||||
op::Constant c(element::u1, shape, input);
|
||||
ov::op::v0::Constant c(element::u1, shape, input);
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
for (unsigned i = 0; i != input.size(); ++i) {
|
||||
@ -754,7 +757,7 @@ TEST(constant, uint1_vector_less_than_single_byte) {
|
||||
TEST(constant, uint1_vector_bigger_than_single_byte) {
|
||||
Shape shape{12};
|
||||
vector<uint8_t> input{1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0};
|
||||
op::Constant c(element::u1, shape, input);
|
||||
ov::op::v0::Constant c(element::u1, shape, input);
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
for (unsigned i = 0; i != input.size(); ++i) {
|
||||
@ -767,7 +770,7 @@ TEST(constant, uint1_vector_bigger_than_single_byte) {
|
||||
|
||||
TEST(constant, uint1_vector_broadcast) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::u1, shape, vector<int8_t>{1});
|
||||
ov::op::v0::Constant c(element::u1, shape, vector<int8_t>{1});
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(1));
|
||||
@ -785,7 +788,7 @@ TEST(constant, uint1_vector_broadcast) {
|
||||
TEST(constant, uint4_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u4, shape, input);
|
||||
ov::op::v0::Constant c(element::u4, shape, input);
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -806,7 +809,7 @@ TEST(constant, uint4_string) {
|
||||
|
||||
TEST(constant, uint4_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u4, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u4, shape, vector<string>{"1"});
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -821,7 +824,7 @@ TEST(constant, uint4_string_broadcast) {
|
||||
|
||||
TEST(constant, uint4_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u4, shape, vector<uint8_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::u4, shape, vector<uint8_t>{1, 0, 1, 0});
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -836,7 +839,7 @@ TEST(constant, uint4_vector) {
|
||||
|
||||
TEST(constant, uint4_vector_broadcast) {
|
||||
Shape shape{3};
|
||||
op::Constant c(element::u4, shape, vector<uint8_t>{1});
|
||||
ov::op::v0::Constant c(element::u4, shape, vector<uint8_t>{1});
|
||||
auto v = c.cast_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], int8_t(1));
|
||||
@ -852,17 +855,19 @@ TEST(constant, uint4_vector_broadcast) {
|
||||
|
||||
TEST(constant, uint4_input_value_validation) {
|
||||
Shape shape{2};
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, 16), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, -1), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, 16), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, -1), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<int>{-1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<int>{16}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<int>{-1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<int>{16}), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<int>{-1, 1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<int>{16, 2}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<int>{-1, 1}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<int>{16, 2}), ::ngraph::CheckFailure);
|
||||
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<std::string>{"-1", "1"}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(op::Constant c(element::u4, shape, std::vector<std::string>{"16", "1"}), ::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<std::string>{"-1", "1"}),
|
||||
::ngraph::CheckFailure);
|
||||
EXPECT_THROW(ov::op::v0::Constant c(element::u4, shape, std::vector<std::string>{"16", "1"}),
|
||||
::ngraph::CheckFailure);
|
||||
}
|
||||
|
||||
//
|
||||
@ -872,7 +877,7 @@ TEST(constant, uint4_input_value_validation) {
|
||||
TEST(constant, uint8_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u8, shape, input);
|
||||
ov::op::v0::Constant c(element::u8, shape, input);
|
||||
auto v = c.get_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -895,7 +900,7 @@ TEST(constant, uint8_string) {
|
||||
|
||||
TEST(constant, uint8_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u8, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u8, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -912,7 +917,7 @@ TEST(constant, uint8_string_broadcast) {
|
||||
|
||||
TEST(constant, uint8_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u8, shape, vector<uint8_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::u8, shape, vector<uint8_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -929,7 +934,7 @@ TEST(constant, uint8_vector) {
|
||||
|
||||
TEST(constant, uint8_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u8, shape, vector<uint8_t>{1});
|
||||
ov::op::v0::Constant c(element::u8, shape, vector<uint8_t>{1});
|
||||
auto v = c.get_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -951,7 +956,7 @@ TEST(constant, uint8_vector_broadcast) {
|
||||
TEST(constant, uint16_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u16, shape, input);
|
||||
ov::op::v0::Constant c(element::u16, shape, input);
|
||||
auto v = c.get_vector<uint16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -974,7 +979,7 @@ TEST(constant, uint16_string) {
|
||||
|
||||
TEST(constant, uint16_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u16, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u16, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<uint16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -991,7 +996,7 @@ TEST(constant, uint16_string_broadcast) {
|
||||
|
||||
TEST(constant, uint16_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u16, shape, vector<uint16_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::u16, shape, vector<uint16_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<uint16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1008,7 +1013,7 @@ TEST(constant, uint16_vector) {
|
||||
|
||||
TEST(constant, uint16_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u16, shape, vector<uint16_t>{1});
|
||||
ov::op::v0::Constant c(element::u16, shape, vector<uint16_t>{1});
|
||||
auto v = c.get_vector<uint16_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1030,7 +1035,7 @@ TEST(constant, uint16_vector_broadcast) {
|
||||
TEST(constant, uint32_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u32, shape, input);
|
||||
ov::op::v0::Constant c(element::u32, shape, input);
|
||||
auto v = c.get_vector<uint32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1053,7 +1058,7 @@ TEST(constant, uint32_string) {
|
||||
|
||||
TEST(constant, uint32_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u32, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u32, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<uint32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1070,7 +1075,7 @@ TEST(constant, uint32_string_broadcast) {
|
||||
|
||||
TEST(constant, uint32_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u32, shape, vector<uint32_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::u32, shape, vector<uint32_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<uint32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1087,7 +1092,7 @@ TEST(constant, uint32_vector) {
|
||||
|
||||
TEST(constant, uint32_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u32, shape, vector<uint32_t>{1});
|
||||
ov::op::v0::Constant c(element::u32, shape, vector<uint32_t>{1});
|
||||
auto v = c.get_vector<uint32_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1109,7 +1114,7 @@ TEST(constant, uint32_vector_broadcast) {
|
||||
TEST(constant, uint64_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::u64, shape, input);
|
||||
ov::op::v0::Constant c(element::u64, shape, input);
|
||||
auto v = c.get_vector<uint64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1132,7 +1137,7 @@ TEST(constant, uint64_string) {
|
||||
|
||||
TEST(constant, uint64_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u64, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::u64, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<uint64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1149,7 +1154,7 @@ TEST(constant, uint64_string_broadcast) {
|
||||
|
||||
TEST(constant, uint64_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u64, shape, vector<uint64_t>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::u64, shape, vector<uint64_t>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<uint64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1166,7 +1171,7 @@ TEST(constant, uint64_vector) {
|
||||
|
||||
TEST(constant, uint64_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::u64, shape, vector<uint64_t>{1});
|
||||
ov::op::v0::Constant c(element::u64, shape, vector<uint64_t>{1});
|
||||
auto v = c.get_vector<uint64_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], 1);
|
||||
@ -1188,7 +1193,7 @@ TEST(constant, uint64_vector_broadcast) {
|
||||
TEST(constant, bfloat16_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::bf16, shape, input);
|
||||
ov::op::v0::Constant c(element::bf16, shape, input);
|
||||
auto v = c.get_vector<bfloat16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], bfloat16(1));
|
||||
@ -1211,7 +1216,7 @@ TEST(constant, bfloat16_string) {
|
||||
|
||||
TEST(constant, bfloat16_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::bf16, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::bf16, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<bfloat16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], bfloat16(1));
|
||||
@ -1228,7 +1233,7 @@ TEST(constant, bfloat16_string_broadcast) {
|
||||
|
||||
TEST(constant, bfloat16_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::bf16, shape, vector<bfloat16>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::bf16, shape, vector<bfloat16>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<bfloat16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], bfloat16(1));
|
||||
@ -1245,7 +1250,7 @@ TEST(constant, bfloat16_vector) {
|
||||
|
||||
TEST(constant, bfloat16_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::bf16, shape, vector<bfloat16>{1});
|
||||
ov::op::v0::Constant c(element::bf16, shape, vector<bfloat16>{1});
|
||||
auto v = c.get_vector<bfloat16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], bfloat16(1));
|
||||
@ -1267,7 +1272,7 @@ TEST(constant, bfloat16_vector_broadcast) {
|
||||
TEST(constant, float16_string) {
|
||||
Shape shape{4};
|
||||
vector<string> input{"1", "0", "1", "0"};
|
||||
op::Constant c(element::f16, shape, input);
|
||||
ov::op::v0::Constant c(element::f16, shape, input);
|
||||
auto v = c.get_vector<float16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], float16(1));
|
||||
@ -1290,7 +1295,7 @@ TEST(constant, float16_string) {
|
||||
|
||||
TEST(constant, float16_string_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f16, shape, vector<string>{"1"});
|
||||
ov::op::v0::Constant c(element::f16, shape, vector<string>{"1"});
|
||||
auto v = c.get_vector<float16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], float16(1));
|
||||
@ -1307,7 +1312,7 @@ TEST(constant, float16_string_broadcast) {
|
||||
|
||||
TEST(constant, float16_vector) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f16, shape, vector<float16>{1, 0, 1, 0});
|
||||
ov::op::v0::Constant c(element::f16, shape, vector<float16>{1, 0, 1, 0});
|
||||
auto v = c.get_vector<float16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], float16(1));
|
||||
@ -1324,7 +1329,7 @@ TEST(constant, float16_vector) {
|
||||
|
||||
TEST(constant, float16_vector_broadcast) {
|
||||
Shape shape{4};
|
||||
op::Constant c(element::f16, shape, vector<float16>{1});
|
||||
ov::op::v0::Constant c(element::f16, shape, vector<float16>{1});
|
||||
auto v = c.get_vector<float16>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
EXPECT_EQ(v[0], float16(1));
|
||||
@ -1341,8 +1346,8 @@ TEST(constant, float16_vector_broadcast) {
|
||||
|
||||
TEST(constant, shared_data) {
|
||||
Shape shape{100, 200};
|
||||
auto c1 = make_shared<op::Constant>(element::f16, shape, vector<float16>{123});
|
||||
auto c2 = static_pointer_cast<op::Constant>(c1->clone_with_new_inputs({}));
|
||||
auto c1 = make_shared<ov::op::v0::Constant>(element::f16, shape, vector<float16>{123});
|
||||
auto c2 = static_pointer_cast<ov::op::v0::Constant>(c1->clone_with_new_inputs({}));
|
||||
const int16_t* p1 = c1->get_data_ptr<int16_t>();
|
||||
const int16_t* p2 = c2->get_data_ptr<int16_t>();
|
||||
EXPECT_EQ(p1, p2);
|
||||
@ -1352,7 +1357,7 @@ template <typename T1, typename T2>
|
||||
::testing::AssertionResult test_convert() {
|
||||
Shape shape{5};
|
||||
vector<T1> expected{1, 2, 3, 4, 5};
|
||||
auto c1 = make_shared<op::Constant>(ov::element::from<T2>(), shape, expected);
|
||||
auto c1 = make_shared<ov::op::v0::Constant>(ov::element::from<T2>(), shape, expected);
|
||||
vector<T1> actual = c1->template cast_vector<T1>();
|
||||
::testing::AssertionResult rc =
|
||||
(actual == expected ? ::testing::AssertionSuccess() : ::testing::AssertionFailure());
|
||||
@ -1522,7 +1527,7 @@ template <typename T1, typename T2>
|
||||
::testing::AssertionResult test_uniform_ctor() {
|
||||
Shape shape{5};
|
||||
vector<T1> expected{3, 3, 3, 3, 3};
|
||||
auto c1 = make_shared<op::Constant>(ov::element::from<T2>(), shape, 3);
|
||||
auto c1 = make_shared<ov::op::v0::Constant>(ov::element::from<T2>(), shape, 3);
|
||||
vector<T1> actual = c1->template cast_vector<T1>();
|
||||
::testing::AssertionResult rc =
|
||||
(actual == expected ? ::testing::AssertionSuccess() : ::testing::AssertionFailure());
|
||||
@ -1689,32 +1694,30 @@ TEST(constant, construct_uniform) {
|
||||
}
|
||||
|
||||
TEST(constant, bad_get_data_ptr) {
|
||||
op::Constant c(element::f32, Shape{}, vector<float>{1.0});
|
||||
ov::op::v0::Constant c(element::f32, Shape{}, vector<float>{1.0});
|
||||
EXPECT_EQ(*c.get_data_ptr<element::Type_t::f32>(), 1.0);
|
||||
try {
|
||||
c.get_data_ptr<element::Type_t::f64>();
|
||||
FAIL() << "Bad type not detected.";
|
||||
} catch (const CheckFailure& error) {
|
||||
} catch (const AssertFailure& error) {
|
||||
EXPECT_HAS_SUBSTRING(error.what(), std::string("get_data_ptr"));
|
||||
}
|
||||
try {
|
||||
c.get_data_ptr<element::Type_t::i32>();
|
||||
FAIL() << "Bad type not detected.";
|
||||
} catch (const CheckFailure& error) {
|
||||
} catch (const AssertFailure& error) {
|
||||
EXPECT_HAS_SUBSTRING(error.what(), std::string("get_data_ptr"));
|
||||
}
|
||||
}
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
TEST(constant, hold_host_tensor) {
|
||||
TEST(constant, hold_tensor) {
|
||||
Shape shape{4};
|
||||
void* hostDataPtr = nullptr;
|
||||
std::shared_ptr<op::Constant> constOp;
|
||||
std::shared_ptr<ov::op::v0::Constant> constOp;
|
||||
{
|
||||
auto tensor = std::make_shared<runtime::HostTensor>(element::f32, Shape{1, 2, 3, 3});
|
||||
hostDataPtr = tensor->get_data_ptr();
|
||||
constOp = std::make_shared<op::Constant>(tensor);
|
||||
auto tensor = ov::Tensor(element::f32, Shape{1, 2, 3, 3});
|
||||
hostDataPtr = tensor.data();
|
||||
constOp = std::make_shared<ov::op::v0::Constant>(tensor);
|
||||
}
|
||||
const void* constDataPtr = constOp->get_data_ptr();
|
||||
ASSERT_EQ(constDataPtr, hostDataPtr);
|
||||
@ -1727,12 +1730,14 @@ TEST(constant, lazy_bitwise_identical) {
|
||||
auto shape = Shape{10, 1000, 1000};
|
||||
auto type = element::i32;
|
||||
auto byte_size = shape_size(shape) * sizeof(int32_t);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
auto aligned_weights_buffer = std::make_shared<ngraph::runtime::AlignedBuffer>(byte_size);
|
||||
std::memset(aligned_weights_buffer->get_ptr<char>(), 1, byte_size);
|
||||
auto weights = std::make_shared<ngraph::runtime::SharedBuffer<std::shared_ptr<ngraph::runtime::AlignedBuffer>>>(
|
||||
aligned_weights_buffer->get_ptr<char>(),
|
||||
aligned_weights_buffer->size(),
|
||||
aligned_weights_buffer);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
|
||||
using namespace std::chrono;
|
||||
auto create_constant = [&]() {
|
||||
@ -1778,20 +1783,3 @@ TEST(constant, lazy_bitwise_identical) {
|
||||
// '10' times is guaranteed to be faster here (typical value is ~200'000)
|
||||
EXPECT_GT(bitwise_check_count_only, bitwise_check_count * 10);
|
||||
}
|
||||
|
||||
// Disabled just because of long execution time. Enable for nightly builds in future
|
||||
TEST(constant, DISABLED_nightly_huge_size_4GB) {
|
||||
uint64_t start = 1llu << 32;
|
||||
uint64_t s = start + 5;
|
||||
std::vector<uint8_t> data(s);
|
||||
for (uint64_t i = start; i < s; i++) {
|
||||
data[i] = static_cast<uint8_t>(i - start + 42);
|
||||
}
|
||||
Shape shape{static_cast<Shape::size_type>(s)};
|
||||
op::Constant c(element::u8, shape, data.data());
|
||||
auto v = c.get_vector<uint8_t>();
|
||||
ASSERT_EQ(v.size(), shape_size(shape));
|
||||
for (uint64_t i = start; i < s; i++) {
|
||||
EXPECT_EQ(v[i], i - start + 42) << i << " failed";
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdio>
|
||||
#include <iostream>
|
||||
@ -11,19 +13,15 @@
|
||||
#include "common_test_utils/all_close.hpp"
|
||||
#include "common_test_utils/ndarray.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/op/batch_norm.hpp"
|
||||
#include "ngraph/op/parameter.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "ngraph/pass/visualize_tree.hpp"
|
||||
#include "ngraph/pattern/matcher.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
#include "openvino/core/graph_util.hpp"
|
||||
#include "openvino/core/model.hpp"
|
||||
#include "openvino/op/abs.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/multiply.hpp"
|
||||
#include "openvino/op/op.hpp"
|
||||
#include "openvino/pass/pattern/matcher.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
class ControlDependencyOp : public ov::op::Op {
|
||||
@ -83,74 +81,74 @@ public:
|
||||
}
|
||||
|
||||
TEST(control_dependencies, cdep_ops) {
|
||||
auto A = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto B = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::Abs>(A);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto B = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::v0::Abs>(A);
|
||||
auto cdop = make_shared<ControlDependencyOp>(OutputVector{A}, std::set<std::shared_ptr<Node>>{absn});
|
||||
|
||||
auto f = make_shared<Function>(cdop, ParameterVector{A, B});
|
||||
auto f = make_shared<Model>(cdop, ParameterVector{A, B});
|
||||
test_ordered_ops(f, NodeVector{absn});
|
||||
}
|
||||
|
||||
TEST(control_dependencies, two_cdep_ops) {
|
||||
auto A = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto B = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::Abs>(A);
|
||||
auto C = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn_c = make_shared<op::Abs>(C);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto B = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::v0::Abs>(A);
|
||||
auto C = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn_c = make_shared<op::v0::Abs>(C);
|
||||
auto cdop = make_shared<ControlDependencyOp>(OutputVector{A}, std::set<std::shared_ptr<Node>>{absn, absn_c});
|
||||
|
||||
auto f = make_shared<Function>(cdop, ParameterVector{A, B, C});
|
||||
auto f = make_shared<Model>(cdop, ParameterVector{A, B, C});
|
||||
test_ordered_ops(f, NodeVector{absn, absn_c});
|
||||
}
|
||||
|
||||
TEST(control_dependencies, two_cdep_ops_op_on_top) {
|
||||
auto A = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::Abs>(A);
|
||||
auto B = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn_b = make_shared<op::Abs>(B);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::v0::Abs>(A);
|
||||
auto B = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn_b = make_shared<ov::op::v0::Abs>(B);
|
||||
auto cdop = make_shared<ControlDependencyOp>(OutputVector{A}, std::set<std::shared_ptr<Node>>{absn, absn_b});
|
||||
auto absn_cdop = make_shared<op::Abs>(cdop);
|
||||
auto absn_cdop = make_shared<ov::op::v0::Abs>(cdop);
|
||||
|
||||
auto f = make_shared<Function>(absn_cdop, ParameterVector{A, B});
|
||||
auto f = make_shared<Model>(absn_cdop, ParameterVector{A, B});
|
||||
test_ordered_ops(f, NodeVector{absn, absn_b});
|
||||
}
|
||||
|
||||
TEST(control_dependencies, clone_function_cdop) {
|
||||
auto A = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::Abs>(A);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<ov::op::v0::Abs>(A);
|
||||
auto cdop = make_shared<ControlDependencyOp>(OutputVector{A}, std::set<std::shared_ptr<Node>>{absn});
|
||||
|
||||
auto f = make_shared<Function>(cdop, ParameterVector{A});
|
||||
auto f = make_shared<Model>(cdop, ParameterVector{A});
|
||||
test_ordered_ops(f, NodeVector{absn});
|
||||
auto clone = f->clone();
|
||||
auto matcher = std::make_shared<pattern::Matcher>(cdop);
|
||||
auto matcher = std::make_shared<pass::pattern::Matcher>(cdop);
|
||||
auto cdop_clone = clone->get_results().at(0)->input_value(0).get_node_shared_ptr();
|
||||
ASSERT_TRUE(matcher->match(cdop_clone));
|
||||
auto cloned_deps = cdop_clone->get_control_dependencies();
|
||||
ASSERT_EQ(cloned_deps.size(), 1);
|
||||
auto cloned_abs = *begin(cloned_deps);
|
||||
ASSERT_TRUE(is_type<op::Abs>(cloned_abs));
|
||||
ASSERT_TRUE(is_type<ov::op::v0::Abs>(cloned_abs));
|
||||
}
|
||||
|
||||
TEST(control_dependencies, clone_function_cdop_abs) {
|
||||
auto A = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<op::Abs>(A);
|
||||
auto B = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto absn_b = make_shared<op::Abs>(B);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn = make_shared<ov::op::v0::Abs>(A);
|
||||
auto B = make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
auto absn_b = make_shared<ov::op::v0::Abs>(B);
|
||||
auto cdop = make_shared<ControlDependencyOp>(OutputVector{A}, std::set<std::shared_ptr<Node>>{absn, absn_b});
|
||||
auto absn_cdop = make_shared<op::Abs>(cdop);
|
||||
auto absn_cdop = make_shared<ov::op::v0::Abs>(cdop);
|
||||
|
||||
auto f = make_shared<Function>(absn_cdop, ParameterVector{A, B});
|
||||
auto f = make_shared<Model>(absn_cdop, ParameterVector{A, B});
|
||||
auto clone = f->clone();
|
||||
auto matcher = std::make_shared<pattern::Matcher>(cdop);
|
||||
auto matcher = std::make_shared<pass::pattern::Matcher>(cdop);
|
||||
auto cdop_clone =
|
||||
clone->get_results().at(0)->input_value(0).get_node_shared_ptr()->input_value(0).get_node_shared_ptr();
|
||||
ASSERT_TRUE(matcher->match(cdop_clone));
|
||||
auto cloned_deps = cdop_clone->get_control_dependencies();
|
||||
ASSERT_EQ(cloned_deps.size(), 2);
|
||||
for (auto ccdep : cloned_deps) {
|
||||
ASSERT_TRUE(is_type<op::Abs>(ccdep));
|
||||
ASSERT_TRUE(is_type<ov::op::v0::Abs>(ccdep));
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,8 +159,8 @@ static size_t count_control_dependencies(const shared_ptr<Node>& node, const sha
|
||||
|
||||
TEST(control_dependencies, replace_node) {
|
||||
Shape shape{2, 2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto B = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto A = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto B = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto MUL_AB = make_shared<op::v1::Multiply>(A, B);
|
||||
auto MUL_BA = make_shared<op::v1::Multiply>(B, A);
|
||||
auto ADD = make_shared<op::v1::Add>(A, B);
|
||||
@ -170,7 +168,7 @@ TEST(control_dependencies, replace_node) {
|
||||
ADD->add_control_dependency(MUL_AB);
|
||||
ASSERT_TRUE(1 == count_control_dependencies(ADD, MUL_AB));
|
||||
ASSERT_TRUE(0 == count_control_dependencies(ADD, MUL_BA));
|
||||
replace_node(MUL_AB, MUL_BA);
|
||||
ov::replace_node(MUL_AB, MUL_BA);
|
||||
ASSERT_TRUE(0 == count_control_dependencies(ADD, MUL_AB));
|
||||
ASSERT_TRUE(1 == count_control_dependencies(ADD, MUL_BA));
|
||||
}
|
||||
|
@ -4,14 +4,14 @@
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
TEST(convert_u1_to_string, convert_u1_to_string) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::u1, Shape{12}, &values[0]);
|
||||
auto constant = make_shared<ov::op::v0::Constant>(element::u1, Shape{12}, &values[0]);
|
||||
|
||||
vector<string> ref{"1", "0", "1", "0", "1", "0", "1", "1", "0", "0", "0", "1"};
|
||||
for (size_t i = 0; i < 12; ++i) {
|
||||
|
@ -2,30 +2,31 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "openvino/core/coordinate.hpp"
|
||||
|
||||
#include <memory>
|
||||
#include <ngraph/coordinate_transform.hpp>
|
||||
#include <numeric>
|
||||
#include <string>
|
||||
|
||||
#include "common_test_utils/ndarray.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/coordinate_transform.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
using namespace ov;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
TEST(coordinate, shape0d) {
|
||||
auto ct = CoordinateTransform({});
|
||||
auto ct = ngraph::CoordinateTransform({});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 1);
|
||||
auto it = ct.begin();
|
||||
EXPECT_EQ(*it++, Coordinate({}));
|
||||
EXPECT_EQ(*it++, ov::Coordinate({}));
|
||||
EXPECT_TRUE(it == ct.end());
|
||||
}
|
||||
|
||||
TEST(coordinate, shape1d) {
|
||||
auto ct = CoordinateTransform({3});
|
||||
auto ct = ngraph::CoordinateTransform({3});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 3);
|
||||
auto it = ct.begin();
|
||||
EXPECT_EQ(*it++, Coordinate({0}));
|
||||
@ -35,7 +36,7 @@ TEST(coordinate, shape1d) {
|
||||
}
|
||||
|
||||
TEST(coordinate, shape2d) {
|
||||
auto ct = CoordinateTransform({2, 3});
|
||||
auto ct = ngraph::CoordinateTransform({2, 3});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 6);
|
||||
auto it = ct.begin();
|
||||
EXPECT_EQ(*it++, Coordinate({0, 0}));
|
||||
@ -48,7 +49,7 @@ TEST(coordinate, shape2d) {
|
||||
}
|
||||
|
||||
TEST(coordinate, shape3d) {
|
||||
auto ct = CoordinateTransform({2, 3, 4});
|
||||
auto ct = ngraph::CoordinateTransform({2, 3, 4});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 24);
|
||||
auto it = ct.begin();
|
||||
EXPECT_EQ(*it++, Coordinate({0, 0, 0}));
|
||||
@ -79,22 +80,12 @@ TEST(coordinate, shape3d) {
|
||||
}
|
||||
|
||||
TEST(coordinate, zero_sized_axis) {
|
||||
auto ct = CoordinateTransform({2, 0, 4});
|
||||
auto ct = ngraph::CoordinateTransform({2, 0, 4});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 0);
|
||||
auto it = ct.begin();
|
||||
EXPECT_TRUE(it == ct.end());
|
||||
}
|
||||
|
||||
TEST(DISABLED_coordinate, random) {
|
||||
auto ct = CoordinateTransform({2, 3, 4});
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 24);
|
||||
auto it = ct.begin();
|
||||
it += 5;
|
||||
EXPECT_EQ(*it, Coordinate({0, 1, 1}));
|
||||
it += -2;
|
||||
EXPECT_EQ(*it, Coordinate({0, 1, 1}));
|
||||
}
|
||||
|
||||
TEST(coordinate, corner) {
|
||||
Shape source_shape{10, 10};
|
||||
Coordinate source_start_corner = Coordinate{3, 3};
|
||||
@ -106,7 +97,7 @@ TEST(coordinate, corner) {
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
auto ct = ngraph::CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
@ -140,7 +131,7 @@ TEST(coordinate, strides) {
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
auto ct = ngraph::CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
@ -184,7 +175,7 @@ TEST(coordinate, axis_order) {
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
auto ct = ngraph::CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
@ -221,94 +212,3 @@ TEST(coordinate, axis_order) {
|
||||
EXPECT_EQ(ct.to_source_coordinate(*it++), Coordinate({2, 1, 3}));
|
||||
EXPECT_TRUE(it == ct.end());
|
||||
}
|
||||
|
||||
TEST(DISABLED_coordinate, padding) {
|
||||
Shape source_shape{10, 10};
|
||||
Coordinate source_start_corner = Coordinate{0, 0};
|
||||
Coordinate source_end_corner{source_shape};
|
||||
Strides source_strides = Strides(source_shape.size(), 1);
|
||||
AxisVector source_axis_order(source_shape.size());
|
||||
iota(source_axis_order.begin(), source_axis_order.end(), 0);
|
||||
CoordinateDiff target_padding_below = CoordinateDiff(source_shape.size(), 0);
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
source_axis_order,
|
||||
target_padding_below,
|
||||
target_padding_above,
|
||||
source_dilation_strides);
|
||||
|
||||
// for (const Coordinate& c : ct)
|
||||
// {
|
||||
// cout << c << ", " << ct.to_source_coordinate(c) << endl;
|
||||
// }
|
||||
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 24);
|
||||
auto it = ct.begin();
|
||||
|
||||
EXPECT_TRUE(it == ct.end());
|
||||
}
|
||||
|
||||
TEST(DISABLED_coordinate, dilation) {
|
||||
Shape source_shape{10, 10};
|
||||
Coordinate source_start_corner = Coordinate{0, 0};
|
||||
Coordinate source_end_corner{source_shape};
|
||||
Strides source_strides = Strides(source_shape.size(), 1);
|
||||
AxisVector source_axis_order(source_shape.size());
|
||||
iota(source_axis_order.begin(), source_axis_order.end(), 0);
|
||||
CoordinateDiff target_padding_below = CoordinateDiff(source_shape.size(), 0);
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
source_axis_order,
|
||||
target_padding_below,
|
||||
target_padding_above,
|
||||
source_dilation_strides);
|
||||
|
||||
// for (const Coordinate& c : ct)
|
||||
// {
|
||||
// cout << ct.to_source_coordinate(c) << endl;
|
||||
// }
|
||||
|
||||
ASSERT_EQ(shape_size(ct.get_target_shape()), 24);
|
||||
auto it = ct.begin();
|
||||
|
||||
EXPECT_TRUE(it == ct.end());
|
||||
}
|
||||
|
||||
TEST(benchmark, coordinate) {
|
||||
Shape source_shape{128, 3, 2000, 1000};
|
||||
Coordinate source_start_corner = Coordinate{0, 0, 0, 0};
|
||||
Coordinate source_end_corner{source_shape};
|
||||
Strides source_strides = Strides(source_shape.size(), 1);
|
||||
AxisVector source_axis_order(source_shape.size());
|
||||
iota(source_axis_order.begin(), source_axis_order.end(), 0);
|
||||
CoordinateDiff target_padding_below = CoordinateDiff(source_shape.size(), 0);
|
||||
CoordinateDiff target_padding_above = CoordinateDiff(source_shape.size(), 0);
|
||||
Strides source_dilation_strides = Strides(source_shape.size(), 1);
|
||||
|
||||
stopwatch timer;
|
||||
timer.start();
|
||||
auto ct = CoordinateTransform(source_shape,
|
||||
source_start_corner,
|
||||
source_end_corner,
|
||||
source_strides,
|
||||
source_axis_order,
|
||||
target_padding_below,
|
||||
target_padding_above,
|
||||
source_dilation_strides);
|
||||
|
||||
for (const Coordinate& c : ct) {
|
||||
(void)c;
|
||||
}
|
||||
timer.stop();
|
||||
cout << "time: " << timer.get_milliseconds() << endl;
|
||||
}
|
||||
|
@ -2,17 +2,18 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/coordinate_range.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <ngraph/coordinate_range.hpp>
|
||||
#include <numeric>
|
||||
#include <utility>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "openvino/core/coordinate.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ngraph::coordinates;
|
||||
using namespace ov;
|
||||
using Index = size_t;
|
||||
using ExpectedOutput = std::vector<std::pair<Index, Coordinate>>;
|
||||
using ExpectedOutput = std::vector<std::pair<Index, ov::Coordinate>>;
|
||||
|
||||
///
|
||||
///
|
||||
@ -24,7 +25,7 @@ TEST(coordinate_range, slice_range_shape0d) {
|
||||
const Shape s;
|
||||
const Coordinate start_corner(s.size());
|
||||
|
||||
auto slice_range = slice(s, start_corner, s);
|
||||
auto slice_range = ngraph::coordinates::slice(s, start_corner, s);
|
||||
auto it = slice_range.begin();
|
||||
EXPECT_EQ(it, begin(slice_range));
|
||||
EXPECT_FALSE(it == slice_range.end());
|
||||
@ -41,7 +42,7 @@ TEST(coordinate_range, slice_range_shape1d) {
|
||||
ASSERT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto slice_range : slice(s, start_corner, s)) {
|
||||
for (auto slice_range : ngraph::coordinates::slice(s, start_corner, s)) {
|
||||
auto index = slice_range.begin_index;
|
||||
for (size_t i = 0; i < slice_range.element_number; index += slice_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
@ -64,7 +65,7 @@ TEST(coordinate_range, slice_range_shape2d) {
|
||||
ASSERT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto slice_range : slice(s, start_corner, s)) {
|
||||
for (auto slice_range : ngraph::coordinates::slice(s, start_corner, s)) {
|
||||
auto index = slice_range.begin_index;
|
||||
for (size_t i = 0; i < slice_range.element_number; index += slice_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
@ -91,7 +92,7 @@ TEST(coordinate_range, slice_range_shape3d) {
|
||||
ASSERT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto slice_range : slice(s, start_corner, s)) {
|
||||
for (auto slice_range : ngraph::coordinates::slice(s, start_corner, s)) {
|
||||
auto index = slice_range.begin_index;
|
||||
for (size_t i = 0; i < slice_range.element_number; index += slice_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
@ -105,7 +106,7 @@ TEST(coordinate_range, slice_range_zero_sized_axis) {
|
||||
const Shape s{2, 0, 4};
|
||||
const Coordinate start_corner(s.size());
|
||||
|
||||
auto slice_range = slice(s, start_corner, s);
|
||||
auto slice_range = ngraph::coordinates::slice(s, start_corner, s);
|
||||
auto it = slice_range.begin();
|
||||
EXPECT_TRUE(it == slice_range.end()) << "Expect empyt range";
|
||||
}
|
||||
@ -115,10 +116,10 @@ TEST(coordinate_range, slice_range_zero_sized_axis) {
|
||||
///
|
||||
TEST(coordinate_range, slice_range_input_validataion) {
|
||||
const Shape s{10, 10, 10};
|
||||
EXPECT_THROW(slice(s, {1}, {1}), std::domain_error);
|
||||
EXPECT_THROW(slice(s, s, {1}), std::domain_error);
|
||||
EXPECT_THROW(slice(s, {1}, s), std::domain_error);
|
||||
EXPECT_THROW(slice(s, s, s, {}), std::domain_error);
|
||||
EXPECT_THROW(ngraph::coordinates::slice(s, {1}, {1}), std::domain_error);
|
||||
EXPECT_THROW(ngraph::coordinates::slice(s, s, {1}), std::domain_error);
|
||||
EXPECT_THROW(ngraph::coordinates::slice(s, {1}, s), std::domain_error);
|
||||
EXPECT_THROW(ngraph::coordinates::slice(s, s, s, {}), std::domain_error);
|
||||
}
|
||||
|
||||
namespace {
|
||||
@ -164,7 +165,7 @@ TEST(coordinate_range, slice_range_corner) {
|
||||
<< "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto slice_range : slice(s, source_start_corner, source_end_corner)) {
|
||||
for (auto slice_range : ngraph::coordinates::slice(s, source_start_corner, source_end_corner)) {
|
||||
auto index = slice_range.begin_index;
|
||||
for (size_t i = 0; i < slice_range.element_number; index += slice_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
@ -194,7 +195,7 @@ TEST(coordinate_range, slice_range_strides) {
|
||||
<< "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto slice_range : slice(s, source_start_corner, source_end_corner, source_strides)) {
|
||||
for (auto slice_range : ngraph::coordinates::slice(s, source_start_corner, source_end_corner, source_strides)) {
|
||||
auto index = slice_range.begin_index;
|
||||
for (size_t i = 0; i < slice_range.element_number; index += slice_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
@ -215,7 +216,7 @@ TEST(coordinate_range, reverse_range_shape0d) {
|
||||
const Shape s;
|
||||
const AxisSet reverset_axis{};
|
||||
|
||||
auto reverse_range = reverse(s, reverset_axis);
|
||||
auto reverse_range = ngraph::coordinates::reverse(s, reverset_axis);
|
||||
auto it = reverse_range.begin();
|
||||
EXPECT_EQ(it, begin(reverse_range));
|
||||
auto v = *it; // if it is not end it has to be dereferencable;
|
||||
@ -231,9 +232,9 @@ TEST(coordinate_range, reverse_range_shape1d) {
|
||||
EXPECT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::forward);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::forward);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index += reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
@ -256,9 +257,9 @@ TEST(coordinate_range, reverse_range_shape2d) {
|
||||
EXPECT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::forward);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::forward);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index += reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
@ -285,9 +286,9 @@ TEST(coordinate_range, reverse_range_shape3d) {
|
||||
EXPECT_EQ(expected.size(), shape_size(s)) << "check epxected data";
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::forward);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::forward);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index += reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
@ -301,7 +302,7 @@ TEST(coordinate_range, reverse_range_shape3d) {
|
||||
TEST(coordinate_range, reverse_range_zero_sized_axis) {
|
||||
const Shape s{2, 0, 4};
|
||||
|
||||
auto reverse_range = reverse(s, {});
|
||||
auto reverse_range = ngraph::coordinates::reverse(s, {});
|
||||
auto it = reverse_range.begin();
|
||||
EXPECT_TRUE(it == reverse_range.end()) << "Expect empyt range";
|
||||
}
|
||||
@ -311,7 +312,7 @@ TEST(coordinate_range, reverse_range_zero_sized_axis) {
|
||||
///
|
||||
TEST(coordinate_range, reverse_range_input_validataion) {
|
||||
const Shape s{10, 10, 10};
|
||||
EXPECT_THROW(reverse(s, {10}), std::domain_error);
|
||||
EXPECT_THROW(ngraph::coordinates::reverse(s, {10}), std::domain_error);
|
||||
}
|
||||
|
||||
TEST(coordinate_range, reverse_range_2d) {
|
||||
@ -325,9 +326,9 @@ TEST(coordinate_range, reverse_range_2d) {
|
||||
{29, {2, 9}}, {28, {2, 8}}, {27, {2, 7}}, {26, {2, 6}}, {25, {2, 5}}, {24, {2, 4}}, {23, {2, 3}}, {22, {2, 2}}, {21, {2, 1}}, {20, {2, 0}}};
|
||||
// clang-format on
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::reverse);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::reverse);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index -= reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
@ -358,9 +359,9 @@ TEST(coordinate_range, reverse_1_range_3d) {
|
||||
// clang-format on
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::forward);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::forward);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index += reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
@ -391,9 +392,9 @@ TEST(coordinate_range, reverse_2_range_3d) {
|
||||
// clang-format on
|
||||
|
||||
auto expected_val = begin(expected);
|
||||
for (auto reverse_range : reverse(s, reverset_axis)) {
|
||||
for (auto reverse_range : ngraph::coordinates::reverse(s, reverset_axis)) {
|
||||
auto index = reverse_range.begin_index;
|
||||
ASSERT_EQ(reverse_range.direction, Direction::reverse);
|
||||
ASSERT_EQ(reverse_range.direction, ngraph::coordinates::Direction::reverse);
|
||||
for (size_t i = 0; i < reverse_range.element_number; index -= reverse_range.step, ++i) {
|
||||
EXPECT_EQ(index, expected_val->first);
|
||||
++expected_val;
|
||||
|
@ -2,24 +2,61 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "common_test_utils/ndarray.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset5.hpp"
|
||||
#include "openvino/opsets/opset8.hpp"
|
||||
#include "openvino/core/shape.hpp"
|
||||
#include "openvino/op/abs.hpp"
|
||||
#include "openvino/op/acos.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/asin.hpp"
|
||||
#include "openvino/op/atan.hpp"
|
||||
#include "openvino/op/broadcast.hpp"
|
||||
#include "openvino/op/ceiling.hpp"
|
||||
#include "openvino/op/concat.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/convert.hpp"
|
||||
#include "openvino/op/cos.hpp"
|
||||
#include "openvino/op/cosh.hpp"
|
||||
#include "openvino/op/divide.hpp"
|
||||
#include "openvino/op/equal.hpp"
|
||||
#include "openvino/op/exp.hpp"
|
||||
#include "openvino/op/floor.hpp"
|
||||
#include "openvino/op/greater.hpp"
|
||||
#include "openvino/op/greater_eq.hpp"
|
||||
#include "openvino/op/less.hpp"
|
||||
#include "openvino/op/less_eq.hpp"
|
||||
#include "openvino/op/log.hpp"
|
||||
#include "openvino/op/loop.hpp"
|
||||
#include "openvino/op/maximum.hpp"
|
||||
#include "openvino/op/minimum.hpp"
|
||||
#include "openvino/op/multiply.hpp"
|
||||
#include "openvino/op/negative.hpp"
|
||||
#include "openvino/op/not_equal.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/power.hpp"
|
||||
#include "openvino/op/random_uniform.hpp"
|
||||
#include "openvino/op/reduce_sum.hpp"
|
||||
#include "openvino/op/reshape.hpp"
|
||||
#include "openvino/op/select.hpp"
|
||||
#include "openvino/op/sign.hpp"
|
||||
#include "openvino/op/sin.hpp"
|
||||
#include "openvino/op/sinh.hpp"
|
||||
#include "openvino/op/strided_slice.hpp"
|
||||
#include "openvino/op/subtract.hpp"
|
||||
#include "openvino/op/tan.hpp"
|
||||
#include "openvino/op/tanh.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
template <typename OP>
|
||||
bool check_unary() {
|
||||
Shape shape{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape)};
|
||||
ov::Shape shape{1};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape)};
|
||||
|
||||
auto node = make_shared<OP>(arg0);
|
||||
auto new_node = node->copy_with_new_inputs(new_args);
|
||||
@ -30,10 +67,10 @@ bool check_unary() {
|
||||
template <typename OP>
|
||||
bool check_binary() {
|
||||
Shape shape{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
|
||||
make_shared<op::Parameter>(element::f32, shape)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape),
|
||||
make_shared<ov::op::v0::Parameter>(element::f32, shape)};
|
||||
|
||||
auto node = make_shared<OP>(arg0, arg1);
|
||||
auto new_node = node->copy_with_new_inputs(new_args);
|
||||
@ -42,38 +79,38 @@ bool check_binary() {
|
||||
}
|
||||
|
||||
TEST(copy, abs) {
|
||||
ASSERT_TRUE(check_unary<op::Abs>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Abs>());
|
||||
}
|
||||
|
||||
TEST(copy, acos) {
|
||||
ASSERT_TRUE(check_unary<op::Acos>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Acos>());
|
||||
}
|
||||
|
||||
TEST(copy, add) {
|
||||
ASSERT_TRUE(check_binary<op::v1::Add>());
|
||||
ASSERT_TRUE(check_binary<ov::op::v1::Add>());
|
||||
}
|
||||
|
||||
TEST(copy, asin) {
|
||||
ASSERT_TRUE(check_unary<op::Asin>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Asin>());
|
||||
}
|
||||
|
||||
TEST(copy, atan) {
|
||||
ASSERT_TRUE(check_unary<op::Atan>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Atan>());
|
||||
}
|
||||
|
||||
TEST(copy, broadcast) {
|
||||
Shape shape{1, 3};
|
||||
Shape new_shape{4, 1, 3};
|
||||
AxisSet axes{1, 2};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
|
||||
op::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
op::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector())};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape),
|
||||
ov::op::v0::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
ov::op::v0::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector())};
|
||||
|
||||
auto node =
|
||||
make_shared<op::v1::Broadcast>(arg0,
|
||||
op::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
op::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector()));
|
||||
auto node = make_shared<op::v1::Broadcast>(
|
||||
arg0,
|
||||
ov::op::v0::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
ov::op::v0::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector()));
|
||||
auto new_node = node->copy_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<op::v1::Broadcast>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
@ -88,19 +125,19 @@ TEST(copy, broadcast) {
|
||||
}
|
||||
|
||||
TEST(copy, ceiling) {
|
||||
ASSERT_TRUE(check_unary<op::Ceiling>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Ceiling>());
|
||||
}
|
||||
|
||||
TEST(copy, concat) {
|
||||
Shape shape{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
|
||||
make_shared<op::Parameter>(element::f32, shape)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape),
|
||||
make_shared<ov::op::v0::Parameter>(element::f32, shape)};
|
||||
int64_t axis = 0;
|
||||
auto node = make_shared<op::Concat>(NodeVector{arg0, arg1}, axis);
|
||||
auto node = make_shared<ov::op::v0::Concat>(NodeVector{arg0, arg1}, axis);
|
||||
auto new_node = node->clone_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<op::Concat>(new_node);
|
||||
auto node_cast = ov::as_type_ptr<ov::op::v0::Concat>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
|
||||
ASSERT_TRUE(nullptr != new_node);
|
||||
@ -112,9 +149,9 @@ TEST(copy, constant) {
|
||||
Shape shape{};
|
||||
vector<float> c{2.4f};
|
||||
auto& et = element::f32;
|
||||
auto node = op::Constant::create(et, shape, c);
|
||||
auto node = ov::op::v0::Constant::create(et, shape, c);
|
||||
auto new_node = node->clone_with_new_inputs(OutputVector{});
|
||||
auto node_cast = ov::as_type_ptr<op::Constant>(new_node);
|
||||
auto node_cast = ov::as_type_ptr<ov::op::v0::Constant>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
ASSERT_TRUE(nullptr != new_node);
|
||||
ASSERT_TRUE(OutputVector{} == new_node->input_values());
|
||||
@ -126,12 +163,12 @@ TEST(copy, constant) {
|
||||
TEST(copy, convert) {
|
||||
Shape shape;
|
||||
auto& et = element::f64;
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape)};
|
||||
|
||||
auto node = make_shared<op::Convert>(arg0, et);
|
||||
auto node = make_shared<op::v0::Convert>(arg0, et);
|
||||
auto new_node = node->clone_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<op::Convert>(new_node);
|
||||
auto node_cast = ov::as_type_ptr<op::v0::Convert>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
|
||||
ASSERT_TRUE(nullptr != new_node);
|
||||
@ -140,11 +177,11 @@ TEST(copy, convert) {
|
||||
}
|
||||
|
||||
TEST(copy, cos) {
|
||||
ASSERT_TRUE(check_unary<op::Cos>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Cos>());
|
||||
}
|
||||
|
||||
TEST(copy, cosh) {
|
||||
ASSERT_TRUE(check_unary<op::Cosh>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Cosh>());
|
||||
}
|
||||
|
||||
TEST(copy, divide) {
|
||||
@ -156,11 +193,11 @@ TEST(copy, equal) {
|
||||
}
|
||||
|
||||
TEST(copy, exp) {
|
||||
ASSERT_TRUE(check_unary<op::Exp>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Exp>());
|
||||
}
|
||||
|
||||
TEST(copy, floor) {
|
||||
ASSERT_TRUE(check_unary<op::Floor>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Floor>());
|
||||
}
|
||||
|
||||
TEST(copy, greater_eq) {
|
||||
@ -180,7 +217,7 @@ TEST(copy, less) {
|
||||
}
|
||||
|
||||
TEST(copy, log) {
|
||||
ASSERT_TRUE(check_unary<op::Log>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Log>());
|
||||
}
|
||||
|
||||
TEST(copy, maximum) {
|
||||
@ -196,7 +233,7 @@ TEST(copy, multiply) {
|
||||
}
|
||||
|
||||
TEST(copy, negative) {
|
||||
ASSERT_TRUE(check_unary<op::Negative>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Negative>());
|
||||
}
|
||||
|
||||
TEST(copy, not_equal) {
|
||||
@ -205,9 +242,9 @@ TEST(copy, not_equal) {
|
||||
|
||||
TEST(copy, parameter) {
|
||||
Shape shape{1};
|
||||
auto node = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto node = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto new_node = node->clone_with_new_inputs({});
|
||||
auto node_cast = ov::as_type_ptr<op::Parameter>(new_node);
|
||||
auto node_cast = ov::as_type_ptr<ov::op::v0::Parameter>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
|
||||
ASSERT_TRUE(nullptr != new_node);
|
||||
@ -222,12 +259,12 @@ TEST(copy, power) {
|
||||
TEST(copy, reduce_sum) {
|
||||
Shape shape{4, 3};
|
||||
AxisSet axes{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
|
||||
auto axes_node = op::Constant::create(element::i64, {axes.size()}, axes.to_vector());
|
||||
auto axes_node = ov::op::v0::Constant::create(element::i64, {axes.size()}, axes.to_vector());
|
||||
auto node = make_shared<op::v1::ReduceSum>(arg0, axes_node, true);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
|
||||
op::Constant::create(element::i64, {axes.size()}, axes.to_vector())};
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape),
|
||||
ov::op::v0::Constant::create(element::i64, {axes.size()}, axes.to_vector())};
|
||||
auto new_node = node->clone_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<op::v1::ReduceSum>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
@ -242,11 +279,11 @@ TEST(copy, reshape) {
|
||||
Shape shape_in{2, 3, 4};
|
||||
Shape shape_out{6, 4};
|
||||
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape_in);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape_in),
|
||||
op::Constant::create(element::u64, {shape_out.size()}, shape_out)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape_in);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape_in),
|
||||
ov::op::v0::Constant::create(element::u64, {shape_out.size()}, shape_out)};
|
||||
|
||||
auto shape_pattern = op::Constant::create(element::u64, {shape_out.size()}, shape_out);
|
||||
auto shape_pattern = ov::op::v0::Constant::create(element::u64, {shape_out.size()}, shape_out);
|
||||
auto node = make_shared<op::v1::Reshape>(arg0, shape_pattern, false);
|
||||
auto new_node = node->clone_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<op::v1::Reshape>(new_node);
|
||||
@ -259,12 +296,12 @@ TEST(copy, reshape) {
|
||||
|
||||
TEST(copy, select) {
|
||||
Shape shape{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::boolean, shape);
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::boolean, shape),
|
||||
make_shared<op::Parameter>(element::f32, shape),
|
||||
make_shared<op::Parameter>(element::f32, shape)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::boolean, shape);
|
||||
auto arg1 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
auto arg2 = make_shared<ov::op::v0::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::boolean, shape),
|
||||
make_shared<ov::op::v0::Parameter>(element::f32, shape),
|
||||
make_shared<ov::op::v0::Parameter>(element::f32, shape)};
|
||||
|
||||
auto node = make_shared<op::v1::Select>(arg0, arg1, arg2);
|
||||
auto new_node = node->clone_with_new_inputs(new_args);
|
||||
@ -276,15 +313,15 @@ TEST(copy, select) {
|
||||
}
|
||||
|
||||
TEST(copy, sign) {
|
||||
ASSERT_TRUE(check_unary<op::Sign>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Sign>());
|
||||
}
|
||||
|
||||
TEST(copy, sin) {
|
||||
ASSERT_TRUE(check_unary<op::Sin>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Sin>());
|
||||
}
|
||||
|
||||
TEST(copy, sinh) {
|
||||
ASSERT_TRUE(check_unary<op::Sinh>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Sinh>());
|
||||
}
|
||||
|
||||
TEST(copy, strided_slice) {
|
||||
@ -293,15 +330,15 @@ TEST(copy, strided_slice) {
|
||||
Coordinate upper{2, 3, 4};
|
||||
Strides strides{1, 1, 1};
|
||||
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape_in);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape_in),
|
||||
op::Constant::create(element::u64, {lower.size()}, lower),
|
||||
op::Constant::create(element::u64, {upper.size()}, upper),
|
||||
op::Constant::create(element::i64, {strides.size()}, strides)};
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, shape_in);
|
||||
OutputVector new_args{make_shared<ov::op::v0::Parameter>(element::f32, shape_in),
|
||||
ov::op::v0::Constant::create(element::u64, {lower.size()}, lower),
|
||||
ov::op::v0::Constant::create(element::u64, {upper.size()}, upper),
|
||||
ov::op::v0::Constant::create(element::i64, {strides.size()}, strides)};
|
||||
|
||||
auto begin_node = op::Constant::create(element::i64, {lower.size()}, lower);
|
||||
auto end_node = op::Constant::create(element::i64, {upper.size()}, upper);
|
||||
auto strides_node = op::Constant::create(element::i64, {strides.size()}, strides);
|
||||
auto begin_node = ov::op::v0::Constant::create(element::i64, {lower.size()}, lower);
|
||||
auto end_node = ov::op::v0::Constant::create(element::i64, {upper.size()}, upper);
|
||||
auto strides_node = ov::op::v0::Constant::create(element::i64, {strides.size()}, strides);
|
||||
auto node = make_shared<op::v1::StridedSlice>(arg0,
|
||||
begin_node,
|
||||
end_node,
|
||||
@ -334,38 +371,38 @@ TEST(copy, subtract) {
|
||||
}
|
||||
|
||||
TEST(copy, tan) {
|
||||
ASSERT_TRUE(check_unary<op::Tan>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Tan>());
|
||||
}
|
||||
|
||||
TEST(copy, tanh) {
|
||||
ASSERT_TRUE(check_unary<op::Tanh>());
|
||||
ASSERT_TRUE(check_unary<op::v0::Tanh>());
|
||||
}
|
||||
|
||||
TEST(copy, loop) {
|
||||
// That which we iterate over
|
||||
auto X = make_shared<opset5::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
auto Y = make_shared<opset5::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
auto M = make_shared<opset5::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
auto X = make_shared<op::v0::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
auto Y = make_shared<op::v0::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
auto M = make_shared<op::v0::Parameter>(element::f32, Shape{32, 1, 10});
|
||||
|
||||
// Set up the cell body, a function from (Xi, Yi) -> (Zo)
|
||||
// Body parameters
|
||||
auto current_iteration = make_shared<opset5::Parameter>(element::i64, Shape{});
|
||||
auto Xi = make_shared<opset5::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto Yi = make_shared<opset5::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto M_body = make_shared<opset5::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto body_condition = std::make_shared<ngraph::opset5::Constant>(ngraph::element::boolean, ngraph::Shape{}, true);
|
||||
auto current_iteration = make_shared<op::v0::Parameter>(element::i64, Shape{});
|
||||
auto Xi = make_shared<op::v0::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto Yi = make_shared<op::v0::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto M_body = make_shared<op::v0::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto body_condition = std::make_shared<ov::op::v0::Constant>(ov::element::boolean, ov::Shape{}, true);
|
||||
|
||||
auto trip_count = std::make_shared<ngraph::opset5::Constant>(ngraph::element::i64, ngraph::Shape{}, 10);
|
||||
auto exec_condition = std::make_shared<ngraph::opset5::Constant>(ngraph::element::boolean, ngraph::Shape{}, true);
|
||||
auto trip_count = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, 10);
|
||||
auto exec_condition = std::make_shared<ov::op::v0::Constant>(ov::element::boolean, ov::Shape{}, true);
|
||||
// Body
|
||||
auto sum = make_shared<ngraph::opset5::Add>(Xi, Yi);
|
||||
auto Zo = make_shared<ngraph::opset5::Multiply>(sum, M_body);
|
||||
auto body = make_shared<ngraph::Function>(OutputVector{Zo, body_condition},
|
||||
ParameterVector{Xi, current_iteration, Yi, M_body});
|
||||
auto sum = make_shared<ov::op::v1::Add>(Xi, Yi);
|
||||
auto Zo = make_shared<ov::op::v1::Multiply>(sum, M_body);
|
||||
auto body =
|
||||
make_shared<ov::Model>(OutputVector{Zo, body_condition}, ParameterVector{Xi, current_iteration, Yi, M_body});
|
||||
|
||||
auto loop = make_shared<opset5::Loop>(trip_count, exec_condition);
|
||||
auto loop = make_shared<op::v5::Loop>(trip_count, exec_condition);
|
||||
loop->set_function(body);
|
||||
loop->set_special_body_ports(ngraph::opset5::Loop::SpecialBodyPorts{1, 1});
|
||||
loop->set_special_body_ports(ov::op::v5::Loop::SpecialBodyPorts{1, 1});
|
||||
|
||||
loop->set_invariant_input(Xi, X);
|
||||
loop->set_invariant_input(Yi, Y);
|
||||
@ -379,13 +416,13 @@ TEST(copy, loop) {
|
||||
auto out2 = loop->get_concatenated_slices(Zo, 0, 1, 1, -1, 1);
|
||||
loop->validate_and_infer_types();
|
||||
// That which we iterate over
|
||||
auto X_new = make_shared<opset5::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
auto Y_new = make_shared<opset5::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
auto M_new = make_shared<opset5::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
auto X_new = make_shared<op::v0::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
auto Y_new = make_shared<op::v0::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
auto M_new = make_shared<op::v0::Parameter>(element::f32, Shape{3, 2, 5});
|
||||
OutputVector new_args = {trip_count, exec_condition, X_new, Y_new, M_new};
|
||||
auto loop_copy = loop->clone_with_new_inputs(new_args);
|
||||
|
||||
auto node_cast = std::dynamic_pointer_cast<opset5::Loop>(loop_copy);
|
||||
auto node_cast = std::dynamic_pointer_cast<op::v5::Loop>(loop_copy);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
ASSERT_TRUE(nullptr != loop_copy);
|
||||
EXPECT_EQ(loop->get_num_iterations(), node_cast->get_num_iterations());
|
||||
@ -408,11 +445,11 @@ TEST(copy, random_uniform) {
|
||||
auto shape = std::vector<int64_t>{1, 2, 3};
|
||||
float min = 0., max = 1.;
|
||||
|
||||
const auto min_val_param = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
const auto max_val_param = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
auto out_shape = make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{1, 2, 3});
|
||||
const auto min_val_param = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
const auto max_val_param = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto out_shape = make_shared<ov::op::v0::Constant>(element::i64, Shape{3}, std::vector<int64_t>{1, 2, 3});
|
||||
auto ru =
|
||||
std::make_shared<ov::opset8::RandomUniform>(out_shape, min_val_param, max_val_param, element::f32, 150, 10);
|
||||
std::make_shared<ov::op::v8::RandomUniform>(out_shape, min_val_param, max_val_param, element::f32, 150, 10);
|
||||
|
||||
// Call `evaluate` to update m_state
|
||||
auto outputs = ov::TensorVector{{element::i64, out_shape->get_shape(), shape.data()}};
|
||||
@ -421,12 +458,12 @@ TEST(copy, random_uniform) {
|
||||
{element::f32, min_val_param->get_shape(), &min},
|
||||
{element::f32, max_val_param->get_shape(), &max}});
|
||||
|
||||
auto out_shape_c = make_shared<op::Constant>(element::i64, Shape{4}, std::vector<int64_t>{4, 3, 2, 1});
|
||||
const auto min_val_param_c = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
const auto max_val_param_c = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
auto out_shape_c = make_shared<ov::op::v0::Constant>(element::i64, Shape{4}, std::vector<int64_t>{4, 3, 2, 1});
|
||||
const auto min_val_param_c = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
const auto max_val_param_c = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
OutputVector new_args{out_shape_c, min_val_param_c, max_val_param_c};
|
||||
auto new_ru = ru->clone_with_new_inputs(new_args);
|
||||
auto node_cast = ov::as_type_ptr<ov::opset8::RandomUniform>(new_ru);
|
||||
auto node_cast = ov::as_type_ptr<ov::op::v8::RandomUniform>(new_ru);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
|
||||
ASSERT_TRUE(nullptr != new_ru);
|
||||
|
@ -7,18 +7,16 @@
|
||||
#include <iostream>
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <ngraph/pattern/op/wrap_type.hpp>
|
||||
#include <ngraph/rt_info.hpp>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset3.hpp"
|
||||
#include "ngraph/pass/graph_rewrite.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "openvino/core/graph_util.hpp"
|
||||
#include "openvino/core/node.hpp"
|
||||
#include "openvino/core/rt_info.hpp"
|
||||
#include "openvino/core/runtime_attribute.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
class TestAttributeNoCopyable : public ov::RuntimeAttribute {
|
||||
@ -71,19 +69,19 @@ public:
|
||||
return rt_info.count(TestAttributeMergable::get_type_info_static());
|
||||
}
|
||||
|
||||
ov::Any merge(const ngraph::NodeVector& nodes) const override {
|
||||
ov::Any merge(const ov::NodeVector& nodes) const override {
|
||||
return {TestAttributeMergable()};
|
||||
}
|
||||
};
|
||||
|
||||
TEST(copy_runtime_info, node_to_node_1) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeCopyable::set(a);
|
||||
TestAttributeNoCopyable::set(b);
|
||||
|
||||
copy_runtime_info(a, b);
|
||||
ov::copy_runtime_info(a, b);
|
||||
|
||||
ASSERT_TRUE(TestAttributeCopyable::exists_in(a));
|
||||
ASSERT_TRUE(TestAttributeCopyable::exists_in(b));
|
||||
@ -96,8 +94,8 @@ TEST(copy_runtime_info, node_to_node_1) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, node_to_node_2) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeCopyable::set(a);
|
||||
TestAttributeNoCopyable::set(a);
|
||||
@ -112,9 +110,9 @@ TEST(copy_runtime_info, node_to_node_2) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, node_to_nodes) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeCopyable::set(a);
|
||||
TestAttributeNoCopyable::set(b);
|
||||
@ -132,9 +130,9 @@ TEST(copy_runtime_info, node_to_nodes) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, nodes_to_node_1) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeCopyable::set(a);
|
||||
TestAttributeNoCopyable::set(a);
|
||||
@ -149,9 +147,9 @@ TEST(copy_runtime_info, nodes_to_node_1) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, nodes_to_node_2) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto c = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeMergable::set(a);
|
||||
TestAttributeMergable::set(b);
|
||||
@ -164,8 +162,8 @@ TEST(copy_runtime_info, nodes_to_node_2) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, nodes_to_node_3) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
|
||||
TestAttributeCopyable::set(a);
|
||||
TestAttributeNoCopyable::set(b);
|
||||
@ -177,10 +175,10 @@ TEST(copy_runtime_info, nodes_to_node_3) {
|
||||
}
|
||||
|
||||
TEST(copy_runtime_info, replace_output_update_name) {
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Relu>(a);
|
||||
auto c = make_shared<opset3::Relu>(b);
|
||||
auto d = make_shared<opset3::Relu>(c);
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Relu>(a);
|
||||
auto c = make_shared<op::v0::Relu>(b);
|
||||
auto d = make_shared<op::v0::Relu>(c);
|
||||
|
||||
TestAttributeMergable::set(b);
|
||||
TestAttributeMergable::set(c);
|
||||
|
@ -2,7 +2,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/type/element_type.hpp"
|
||||
#include "openvino/core/type/element_type.hpp"
|
||||
|
||||
#include <map>
|
||||
|
||||
|
@ -2,118 +2,113 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <random>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(file_util, path_join) {
|
||||
{
|
||||
string s1 = "";
|
||||
string s2 = "";
|
||||
|
||||
EXPECT_STREQ("", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "";
|
||||
string s2 = "/test1/test2";
|
||||
|
||||
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "";
|
||||
string s2 = "/test1/test2/";
|
||||
|
||||
EXPECT_STREQ("/test1/test2/", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/test1/test2/", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "";
|
||||
string s2 = "test1/test2";
|
||||
|
||||
EXPECT_STREQ("test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
|
||||
{
|
||||
string s1 = "/x1/x2";
|
||||
string s2 = "";
|
||||
|
||||
EXPECT_STREQ("/x1/x2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/x1/x2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "/x1/x2/";
|
||||
string s2 = "/";
|
||||
|
||||
EXPECT_STREQ("/", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "/x1/x2";
|
||||
string s2 = "/test1/test2";
|
||||
|
||||
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "/x1/x2/";
|
||||
string s2 = "test1/test2";
|
||||
|
||||
EXPECT_STREQ("/x1/x2/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/x1/x2/test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
{
|
||||
string s1 = "/x1/x2";
|
||||
string s2 = "test1/test2";
|
||||
|
||||
#ifndef _WIN32
|
||||
EXPECT_STREQ("/x1/x2/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/x1/x2/test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
#else
|
||||
EXPECT_STREQ("/x1/x2\\test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/x1/x2\\test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
#endif
|
||||
}
|
||||
{
|
||||
string s1 = "/";
|
||||
string s2 = "test1/test2";
|
||||
|
||||
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||
EXPECT_STREQ("/test1/test2", ov::util::path_join({s1, s2}).c_str());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(file_util, sanitize_path) {
|
||||
{
|
||||
string path = "../../tensor.data";
|
||||
EXPECT_STREQ("tensor.data", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("tensor.data", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
{
|
||||
string path = "/../tensor.data";
|
||||
EXPECT_STREQ("tensor.data", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("tensor.data", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
{
|
||||
string path = "..";
|
||||
EXPECT_STREQ("", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
{
|
||||
string path = "workspace/data/tensor.data";
|
||||
EXPECT_STREQ("workspace/data/tensor.data", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("workspace/data/tensor.data", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
{
|
||||
string path = "..\\..\\tensor.data";
|
||||
EXPECT_STREQ("tensor.data", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("tensor.data", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
{
|
||||
string path = "C:\\workspace\\tensor.data";
|
||||
EXPECT_STREQ("workspace\\tensor.data", file_util::sanitize_path(path).c_str());
|
||||
EXPECT_STREQ("workspace\\tensor.data", ov::util::sanitize_path(path).c_str());
|
||||
}
|
||||
}
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
|
||||
using namespace testing;
|
||||
|
||||
class TrimFileTest : public Test {
|
||||
|
@ -2,17 +2,17 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/type/float16.hpp"
|
||||
#include "openvino/core/type/float16.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <climits>
|
||||
#include <random>
|
||||
|
||||
#include "common_test_utils/float_util.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/runtime/aligned_buffer.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(float16, conversions) {
|
||||
float16 f16;
|
||||
|
@ -2,12 +2,12 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/op/util/framework_node.hpp"
|
||||
#include "openvino/op/util/framework_node.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
TEST(framework_node, attrs) {
|
||||
ov::op::util::FrameworkNodeAttrs attrs;
|
||||
|
||||
|
@ -4,71 +4,72 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <common_test_utils/ngraph_test_utils.hpp>
|
||||
#include <ngraph/opsets/opset3.hpp>
|
||||
#include <ngraph/pass/graph_rewrite.hpp>
|
||||
#include <ngraph/pass/manager.hpp>
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
#include "common_test_utils/ngraph_test_utils.hpp"
|
||||
#include "openvino/core/rtti.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/divide.hpp"
|
||||
#include "openvino/op/op.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/result.hpp"
|
||||
#include "openvino/op/tanh.hpp"
|
||||
#include "openvino/pass/pattern/op/label.hpp"
|
||||
|
||||
using namespace ::testing;
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace ov::pass;
|
||||
|
||||
class TestPass : public ngraph::pass::MatcherPass {
|
||||
class TestPass : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("TestPass");
|
||||
TestPass() : MatcherPass() {
|
||||
auto divide =
|
||||
std::make_shared<ngraph::pattern::op::Label>(element::f32, Shape{}, pattern::has_class<opset3::Divide>());
|
||||
ngraph::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
auto divide = std::make_shared<ov::pass::pattern::op::Label>(element::f32,
|
||||
Shape{},
|
||||
pattern::has_class<ov::op::v1::Divide>());
|
||||
ov::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
if (transformation_callback(m.get_match_root())) {
|
||||
auto relu = std::make_shared<ngraph::opset3::Relu>(m.get_match_root()->input_value(0));
|
||||
ngraph::replace_node(m.get_match_root(), relu);
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(m.get_match_root()->input_value(0));
|
||||
ov::replace_node(m.get_match_root(), relu);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(divide, "TestMatcher");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(divide, "TestMatcher");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
class GatherNodesPass : public ngraph::pass::MatcherPass {
|
||||
class GatherNodesPass : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("GatherNodesPass");
|
||||
GatherNodesPass(NodeVector& order) : MatcherPass() {
|
||||
ngraph::matcher_pass_callback callback = [&order](pattern::Matcher& m) {
|
||||
ov::matcher_pass_callback callback = [&order](pattern::Matcher& m) {
|
||||
order.push_back(m.get_match_root());
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(ngraph::pattern::any_input(), "GatherNodesPass");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(ov::pass::pattern::any_input(), "GatherNodesPass");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
class Anchor : public ngraph::pass::GraphRewrite {
|
||||
class Anchor : public ov::pass::GraphRewrite {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("Anchor");
|
||||
Anchor() : GraphRewrite() {}
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(TestPass, "TestPass");
|
||||
NGRAPH_RTTI_DEFINITION(Anchor, "Anchor");
|
||||
NGRAPH_RTTI_DEFINITION(GatherNodesPass, "GatherNodesPass");
|
||||
|
||||
std::shared_ptr<Function> get_function() {
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.5});
|
||||
auto divide = std::make_shared<ngraph::opset3::Divide>(data, divide_constant);
|
||||
return std::make_shared<ngraph::Function>(ngraph::NodeVector{divide}, ngraph::ParameterVector{data});
|
||||
std::shared_ptr<Model> get_model() {
|
||||
auto data = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{3, 1, 2});
|
||||
auto divide_constant = ov::op::v0::Constant::create(ov::element::f32, ov::Shape{1}, {1.5});
|
||||
auto divide = std::make_shared<ov::op::v1::Divide>(data, divide_constant);
|
||||
return std::make_shared<ov::Model>(ov::NodeVector{divide}, ov::ParameterVector{data});
|
||||
}
|
||||
|
||||
ngraph::pass::param_callback get_callback() {
|
||||
ov::pass::param_callback get_callback() {
|
||||
return [](const std::shared_ptr<const Node>& node) -> bool {
|
||||
if (std::dynamic_pointer_cast<const opset3::Divide>(node)) {
|
||||
if (std::dynamic_pointer_cast<const op::v1::Divide>(node)) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
@ -77,10 +78,10 @@ ngraph::pass::param_callback get_callback() {
|
||||
}
|
||||
|
||||
TEST(GraphRewriteOrderTest, MatcherPass) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
NodeVector order;
|
||||
ngraph::pass::Manager m;
|
||||
ov::pass::Manager m;
|
||||
auto pass = m.register_pass<pass::GraphRewrite>();
|
||||
pass->add_matcher<GatherNodesPass>(order);
|
||||
m.run_passes(f);
|
||||
@ -89,10 +90,10 @@ TEST(GraphRewriteOrderTest, MatcherPass) {
|
||||
}
|
||||
|
||||
TEST(BackwardGraphRewriteOrderTest, MatcherPass) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
NodeVector order;
|
||||
ngraph::pass::Manager m;
|
||||
ov::pass::Manager m;
|
||||
auto pass = m.register_pass<pass::BackwardGraphRewrite>();
|
||||
pass->add_matcher<GatherNodesPass>(order);
|
||||
m.run_passes(f);
|
||||
@ -103,28 +104,28 @@ TEST(BackwardGraphRewriteOrderTest, MatcherPass) {
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, MatcherPassCallback) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TestPass>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, GraphRewriteCallback) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TestPass>();
|
||||
anchor.set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, ManagerCallbackDeprecated) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<Anchor>();
|
||||
@ -132,11 +133,11 @@ TEST(GraphRewriteTest, ManagerCallbackDeprecated) {
|
||||
manager.get_pass_config()->set_callback(get_callback());
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, ManagerCallback) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<Anchor>();
|
||||
@ -145,129 +146,127 @@ TEST(GraphRewriteTest, ManagerCallback) {
|
||||
pass_config->set_callback(get_callback());
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, ManagerCallback2) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<TestPass>();
|
||||
manager.get_pass_config()->set_callback(get_callback());
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
class PrivateDivide : public ngraph::opset3::Divide {
|
||||
class PrivateDivide : public ov::op::v1::Divide {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
using ngraph::opset3::Divide::Divide;
|
||||
OPENVINO_OP("PrivateDivide", "test_opset", ov::op::v1::Divide);
|
||||
using ov::op::v1::Divide::Divide;
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(PrivateDivide, "PrivateDivide", ngraph::opset3::Divide);
|
||||
|
||||
std::shared_ptr<Function> get_derived_function() {
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.5});
|
||||
std::shared_ptr<Model> get_derived_model() {
|
||||
auto data = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{3, 1, 2});
|
||||
auto divide_constant = ov::op::v0::Constant::create(ov::element::f32, ov::Shape{1}, {1.5});
|
||||
auto divide = std::make_shared<PrivateDivide>(data, divide_constant);
|
||||
return std::make_shared<ngraph::Function>(ngraph::NodeVector{divide}, ngraph::ParameterVector{data});
|
||||
return std::make_shared<ov::Model>(ov::NodeVector{divide}, ov::ParameterVector{data});
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, MatcherPassCallbackDerived) {
|
||||
auto f = get_derived_function();
|
||||
auto f = get_derived_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TestPass>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
class TypeBasedTestPass : public ngraph::pass::MatcherPass {
|
||||
class TypeBasedTestPass : public ov::pass::MatcherPass {
|
||||
public:
|
||||
TypeBasedTestPass() : MatcherPass() {
|
||||
auto divide = std::make_shared<ngraph::opset3::Divide>(std::make_shared<ngraph::pattern::op::Label>(),
|
||||
std::make_shared<ngraph::pattern::op::Label>());
|
||||
// element::f32, Shape{}, pattern::has_class<opset3::Divide>());
|
||||
ngraph::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
auto divide = std::make_shared<ov::op::v1::Divide>(std::make_shared<ov::pass::pattern::op::Label>(),
|
||||
std::make_shared<ov::pass::pattern::op::Label>());
|
||||
// element::f32, Shape{}, pattern::has_class<op::v1::Divide>());
|
||||
ov::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
if (transformation_callback(m.get_match_root())) {
|
||||
auto relu = std::make_shared<ngraph::opset3::Relu>(m.get_match_root()->input_value(0));
|
||||
ngraph::replace_node(m.get_match_root(), relu);
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(m.get_match_root()->input_value(0));
|
||||
ov::replace_node(m.get_match_root(), relu);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(divide, "TestMatcher");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(divide, "TestMatcher");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
class TypeBasedTestPassDerived : public ngraph::pass::MatcherPass {
|
||||
class TypeBasedTestPassDerived : public ov::pass::MatcherPass {
|
||||
public:
|
||||
TypeBasedTestPassDerived() : MatcherPass() {
|
||||
auto divide = std::make_shared<PrivateDivide>(std::make_shared<ngraph::pattern::op::Label>(),
|
||||
std::make_shared<ngraph::pattern::op::Label>());
|
||||
ngraph::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
auto divide = std::make_shared<PrivateDivide>(std::make_shared<ov::pass::pattern::op::Label>(),
|
||||
std::make_shared<ov::pass::pattern::op::Label>());
|
||||
ov::graph_rewrite_callback callback = [this](pattern::Matcher& m) {
|
||||
if (transformation_callback(m.get_match_root())) {
|
||||
auto tanh = std::make_shared<ngraph::opset3::Tanh>(m.get_match_root()->input_value(0));
|
||||
ngraph::replace_node(m.get_match_root(), tanh);
|
||||
auto tanh = std::make_shared<ov::op::v0::Tanh>(m.get_match_root()->input_value(0));
|
||||
ov::replace_node(m.get_match_root(), tanh);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(divide, "TestMatcher");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(divide, "TestMatcher");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
TEST(GraphRewriteTest, TypeBasedMatcherPassCallback) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TypeBasedTestPass>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, TypeBasedMatcherPassCallbackDerived) {
|
||||
auto f = get_derived_function();
|
||||
auto f = get_derived_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TypeBasedTestPass>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, TypeBasedMatcherPassOrder1) {
|
||||
auto f = get_derived_function();
|
||||
auto f = get_derived_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TypeBasedTestPass>()->set_callback(get_callback());
|
||||
anchor.add_matcher<TypeBasedTestPassDerived>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
TEST(GraphRewriteTest, TypeBasedMatcherPassOrder2) {
|
||||
auto f = get_derived_function();
|
||||
auto f = get_derived_model();
|
||||
|
||||
Anchor anchor;
|
||||
anchor.add_matcher<TypeBasedTestPassDerived>()->set_callback(get_callback());
|
||||
anchor.add_matcher<TypeBasedTestPass>()->set_callback(get_callback());
|
||||
anchor.run_on_model(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Tanh>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Tanh>(f), 1);
|
||||
}
|
||||
|
||||
TEST(PassConfigTest, Test1) {
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestPass>();
|
||||
@ -277,11 +276,11 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestPass>();
|
||||
@ -291,13 +290,13 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
auto pass_config = std::make_shared<ngraph::pass::PassConfig>();
|
||||
auto pass_config = std::make_shared<ov::pass::PassConfig>();
|
||||
pass::Manager manager(pass_config);
|
||||
|
||||
manager.register_pass<TestPass>();
|
||||
@ -306,11 +305,11 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<Anchor>();
|
||||
@ -321,11 +320,11 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<Anchor>();
|
||||
@ -336,7 +335,7 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
@ -348,7 +347,7 @@ TEST(PassConfigTest, Test1) {
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestPass>();
|
||||
@ -358,15 +357,15 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
pass_config->disable<TestPass>();
|
||||
manager.run_passes(f);
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 0);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 0);
|
||||
|
||||
pass_config->enable<TestPass>();
|
||||
manager.run_passes(f);
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
|
||||
pass::Manager manager;
|
||||
auto anchor = manager.register_pass<Anchor>();
|
||||
@ -377,26 +376,26 @@ TEST(PassConfigTest, Test1) {
|
||||
|
||||
pass_config->disable<TestPass>();
|
||||
manager.run_passes(f);
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 0);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 0);
|
||||
|
||||
pass_config->enable<TestPass>();
|
||||
manager.run_passes(f);
|
||||
ASSERT_EQ(count_ops_of_type<opset3::Relu>(f), 1);
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Relu>(f), 1);
|
||||
}
|
||||
}
|
||||
|
||||
class CheckConsumers : public ngraph::pass::MatcherPass {
|
||||
class CheckConsumers : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
CheckConsumers() {
|
||||
ngraph::matcher_pass_callback callback = [](pattern::Matcher& m) -> bool {
|
||||
ov::matcher_pass_callback callback = [](pattern::Matcher& m) -> bool {
|
||||
auto node = m.get_match_root();
|
||||
auto consumers = [](Node* node) {
|
||||
int64_t cnt{0};
|
||||
for (auto output : node->outputs()) {
|
||||
cnt += output.get_target_inputs().size();
|
||||
}
|
||||
if (ov::as_type<op::Parameter>(node) || ov::as_type<op::Result>(node)) {
|
||||
if (ov::as_type<op::v0::Parameter>(node) || ov::as_type<op::v0::Result>(node)) {
|
||||
cnt += 1;
|
||||
}
|
||||
return cnt;
|
||||
@ -429,7 +428,7 @@ public:
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(ngraph::pattern::any_input(), "CheckConsumers");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(ov::pass::pattern::any_input(), "CheckConsumers");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
@ -437,7 +436,7 @@ public:
|
||||
NGRAPH_RTTI_DEFINITION(CheckConsumers, "CheckConsumers");
|
||||
|
||||
TEST(GraphRewriteTest, nodes_use_count) {
|
||||
auto f = get_function();
|
||||
auto f = get_model();
|
||||
pass::Manager m;
|
||||
m.register_pass<CheckConsumers>();
|
||||
ASSERT_NO_THROW(m.run_passes(f));
|
||||
|
@ -2,21 +2,23 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
#include "openvino/core/shape.hpp"
|
||||
#include "openvino/core/type/element_type.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(input_output, param_tensor) {
|
||||
// Params have no arguments, so we can check that the value becomes a tensor output
|
||||
auto& et = element::f32;
|
||||
Shape shape{2, 4};
|
||||
auto param = make_shared<op::Parameter>(et, shape);
|
||||
auto& et = ov::element::f32;
|
||||
ov::Shape shape{2, 4};
|
||||
auto param = make_shared<ov::op::v0::Parameter>(et, shape);
|
||||
|
||||
ASSERT_EQ(param->get_output_size(), 1);
|
||||
ASSERT_EQ(et, param->get_element_type());
|
||||
@ -24,8 +26,8 @@ TEST(input_output, param_tensor) {
|
||||
}
|
||||
|
||||
TEST(input_output, simple_output) {
|
||||
auto param_0 = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto param_1 = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto param_0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto param_1 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 4});
|
||||
auto add = make_shared<op::v1::Add>(param_0, param_1);
|
||||
|
||||
// Sort the ops
|
||||
|
@ -2,17 +2,18 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/runtime/tensor.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
TEST(int4, convert_i4_to_string) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::i4, Shape{3}, &values[0]);
|
||||
auto constant = make_shared<ov::op::v0::Constant>(element::i4, Shape{3}, &values[0]);
|
||||
|
||||
vector<string> ref{"-6", "-5", "1"};
|
||||
for (size_t i = 0; i < 3; ++i) {
|
||||
@ -22,7 +23,7 @@ TEST(int4, convert_i4_to_string) {
|
||||
|
||||
TEST(int4, tensor_or_constant_size) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::i4, Shape{3}, &values[0]);
|
||||
auto constant = make_shared<ov::op::v0::Constant>(element::i4, Shape{3}, &values[0]);
|
||||
EXPECT_EQ(2, constant->get_byte_size());
|
||||
|
||||
ov::Tensor runtime_tensor(ov::element::i4, ov::Shape{3});
|
||||
|
@ -4,14 +4,13 @@
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/interval.hpp"
|
||||
#include "openvino/core/interval.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using ::testing::Return;
|
||||
using namespace ov;
|
||||
|
||||
TEST(intervals, size) {
|
||||
EXPECT_TRUE(Interval().size() > 0);
|
||||
EXPECT_TRUE(ov::Interval().size() > 0);
|
||||
EXPECT_TRUE(Interval(2).size() == 1);
|
||||
EXPECT_TRUE(Interval(1, 5).size() == 5);
|
||||
EXPECT_TRUE(Interval(3, 2).size() == 0);
|
||||
|
@ -6,8 +6,6 @@
|
||||
#include <iostream>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -2,56 +2,52 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdio>
|
||||
#include <iostream>
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <ngraph/pattern/op/wrap_type.hpp>
|
||||
#include <ngraph/rt_info.hpp>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset3.hpp"
|
||||
#include "ngraph/pass/graph_rewrite.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "openvino/core/rt_info.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/pass/graph_rewrite.hpp"
|
||||
#include "openvino/pass/pattern/op/wrap_type.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace ov::pass;
|
||||
using namespace std;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
class TestMatcherPass : public pass::MatcherPass {
|
||||
class TestMatcherPass : public ov::pass::MatcherPass {
|
||||
public:
|
||||
TestMatcherPass() {
|
||||
auto m_relu1 = ngraph::pattern::wrap_type<ngraph::opset3::Relu>(pattern::consumers_count(1));
|
||||
auto m_relu2 = ngraph::pattern::wrap_type<ngraph::opset3::Relu>({m_relu1});
|
||||
auto m_relu1 = ov::pass::pattern::wrap_type<ov::op::v0::Relu>(pattern::consumers_count(1));
|
||||
auto m_relu2 = ov::pass::pattern::wrap_type<ov::op::v0::Relu>({m_relu1});
|
||||
|
||||
ngraph::graph_rewrite_callback callback = [=](pattern::Matcher& m) {
|
||||
ov::graph_rewrite_callback callback = [=](pattern::Matcher& m) {
|
||||
// Map that helps to connect labels with matched outputs
|
||||
auto& node_to_output = m.get_pattern_value_map();
|
||||
|
||||
// Create new Relu operation and add register it for additional execution
|
||||
auto new_relu = register_new_node<ngraph::opset3::Relu>(
|
||||
node_to_output.at(m_relu1).get_node_shared_ptr()->input_value(0));
|
||||
auto new_relu =
|
||||
register_new_node<ov::op::v0::Relu>(node_to_output.at(m_relu1).get_node_shared_ptr()->input_value(0));
|
||||
|
||||
// Copy runtime info attributes to newly created operation
|
||||
ngraph::copy_runtime_info(m.get_matched_nodes(), new_relu);
|
||||
ov::copy_runtime_info(m.get_matched_nodes(), new_relu);
|
||||
|
||||
// Save last Relu name to new Relu operation
|
||||
new_relu->set_friendly_name(m.get_match_root()->get_friendly_name());
|
||||
|
||||
// Replace Relu->Relu with Relu
|
||||
ngraph::replace_node(m.get_match_root(), new_relu);
|
||||
ov::replace_node(m.get_match_root(), new_relu);
|
||||
|
||||
// Return true as the root node was changed
|
||||
return true;
|
||||
};
|
||||
|
||||
// Register pattern with Divide operation as a pattern root node
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(m_relu2, "ReluReluFusion");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(m_relu2, "ReluReluFusion");
|
||||
// Register Matcher
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
@ -60,10 +56,10 @@ public:
|
||||
TEST(pattern, matcher_pass) {
|
||||
{
|
||||
TestMatcherPass test_matcher;
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Relu>(a);
|
||||
auto c = make_shared<opset3::Relu>(b);
|
||||
auto f = std::make_shared<Function>(ngraph::NodeVector{c}, ParameterVector{a});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Relu>(a);
|
||||
auto c = make_shared<op::v0::Relu>(b);
|
||||
auto f = std::make_shared<Model>(ov::NodeVector{c}, ParameterVector{a});
|
||||
|
||||
ASSERT_TRUE(test_matcher.get_matcher()->match(c->output(0)));
|
||||
ASSERT_TRUE(test_matcher.get_matcher()->get_matched_nodes().size() == 2);
|
||||
@ -78,22 +74,22 @@ TEST(pattern, matcher_pass) {
|
||||
|
||||
{
|
||||
TestMatcherPass test_matcher;
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Relu>(a);
|
||||
auto c = make_shared<opset3::Relu>(b);
|
||||
auto f = std::make_shared<Function>(ngraph::NodeVector{b, c}, ParameterVector{a});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Relu>(a);
|
||||
auto c = make_shared<op::v0::Relu>(b);
|
||||
auto f = std::make_shared<Model>(ov::NodeVector{b, c}, ParameterVector{a});
|
||||
|
||||
ASSERT_FALSE(test_matcher.get_matcher()->match(c->output(0)));
|
||||
}
|
||||
|
||||
{
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
{
|
||||
auto a = make_shared<opset3::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<opset3::Relu>(a);
|
||||
auto c = make_shared<opset3::Relu>(b);
|
||||
auto d = make_shared<opset3::Relu>(c);
|
||||
f = std::make_shared<Function>(ngraph::NodeVector{d}, ParameterVector{a});
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto b = make_shared<op::v0::Relu>(a);
|
||||
auto c = make_shared<op::v0::Relu>(b);
|
||||
auto d = make_shared<op::v0::Relu>(c);
|
||||
f = std::make_shared<Model>(ov::NodeVector{d}, ParameterVector{a});
|
||||
}
|
||||
|
||||
pass::GraphRewrite pass;
|
||||
|
@ -1,37 +0,0 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "misc.hpp"
|
||||
|
||||
FILE* port_open(const char* command, const char* type) {
|
||||
#ifdef _WIN32
|
||||
return _popen(command, type);
|
||||
#elif defined(__linux) || defined(__EMSCRIPTEN__) || defined(__APPLE__)
|
||||
return popen(command, type);
|
||||
#endif
|
||||
}
|
||||
|
||||
int port_close(FILE* stream) {
|
||||
#ifdef _WIN32
|
||||
return _pclose(stream);
|
||||
#elif defined(__linux) || defined(__EMSCRIPTEN__) || defined(__APPLE__)
|
||||
return pclose(stream);
|
||||
#endif
|
||||
}
|
||||
|
||||
int set_environment(const char* name, const char* value, int overwrite) {
|
||||
#ifdef _WIN32
|
||||
return _putenv_s(name, value);
|
||||
#elif defined(__linux) || defined(__EMSCRIPTEN__) || defined(__APPLE__)
|
||||
return setenv(name, value, overwrite);
|
||||
#endif
|
||||
}
|
||||
|
||||
int unset_environment(const char* name) {
|
||||
#ifdef _WIN32
|
||||
return _putenv_s(name, "");
|
||||
#elif defined(__linux) || defined(__EMSCRIPTEN__) || defined(__APPLE__)
|
||||
return unsetenv(name);
|
||||
#endif
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
FILE* port_open(const char* command, const char* type);
|
||||
int port_close(FILE* stream);
|
||||
int set_environment(const char* name, const char* value, int overwrite);
|
||||
int unset_environment(const char* name);
|
@ -533,12 +533,12 @@ TEST(model, multiple_inputs_outputs_model_from_const_model) {
|
||||
TEST(model, parameter_result_function) {
|
||||
std::shared_ptr<ov::Model> function = nullptr;
|
||||
{
|
||||
auto param = std::make_shared<ov::opset8::Parameter>(ov::element::f16, ngraph::Shape({1, 3, 24, 24}));
|
||||
auto param = std::make_shared<ov::opset8::Parameter>(ov::element::f16, ov::Shape({1, 3, 24, 24}));
|
||||
param->set_friendly_name("param");
|
||||
param->output(0).get_tensor().set_names({"data"});
|
||||
auto result = std::make_shared<ov::opset8::Result>(param);
|
||||
result->set_friendly_name("result");
|
||||
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{param});
|
||||
function = std::make_shared<ov::Model>(ov::ResultVector{result}, ov::ParameterVector{param});
|
||||
function->set_friendly_name("ParamResult");
|
||||
}
|
||||
|
||||
@ -562,12 +562,12 @@ TEST(model, constant_result_function) {
|
||||
std::shared_ptr<ov::Node> constant = nullptr;
|
||||
|
||||
{
|
||||
constant = std::make_shared<ov::opset8::Constant>(ov::element::f32, ngraph::Shape({1, 3, 24, 24}));
|
||||
constant = std::make_shared<ov::opset8::Constant>(ov::element::f32, ov::Shape({1, 3, 24, 24}));
|
||||
constant->set_friendly_name("constant");
|
||||
constant->output(0).get_tensor().set_names({"data"});
|
||||
auto result = std::make_shared<ov::opset8::Result>(constant);
|
||||
result->set_friendly_name("result");
|
||||
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{});
|
||||
function = std::make_shared<ov::Model>(ov::ResultVector{result}, ov::ParameterVector{});
|
||||
function->set_friendly_name("ConstResult");
|
||||
}
|
||||
|
||||
@ -585,7 +585,7 @@ TEST(model, constant_result_function) {
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapedDynamicShapeLayout) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({-1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -594,21 +594,21 @@ TEST(model_reshape, ReshapedDynamicShapeLayout) {
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
|
||||
ov::ParameterVector params = {param};
|
||||
ngraph = std::make_shared<ov::Model>(relu, params);
|
||||
model = std::make_shared<ov::Model>(relu, params);
|
||||
}
|
||||
|
||||
EXPECT_TRUE(ngraph->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(model->input().get_partial_shape().is_dynamic());
|
||||
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor"] = ov::Shape{1, 3, 22, 22};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
|
||||
EXPECT_FALSE(ngraph->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_FALSE(ngraph->get_parameters().front()->get_partial_shape().is_dynamic());
|
||||
EXPECT_FALSE(model->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_FALSE(model->get_parameters().front()->get_partial_shape().is_dynamic());
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeBatchReLU) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -620,24 +620,24 @@ TEST(model_reshape, ReshapeBatchReLU) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor2"] = ov::PartialShape{2, 3, 22, 22};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeSpatialReLU) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -649,24 +649,24 @@ TEST(model_reshape, ReshapeSpatialReLU) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor"] = ov::PartialShape{1, 3, 25, 25};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeSpatialReLUWithoutReplaceParameter) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -677,24 +677,24 @@ TEST(model_reshape, ReshapeSpatialReLUWithoutReplaceParameter) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
ngraph->get_parameters()[0]->set_partial_shape({1, 3, 25, 25});
|
||||
ngraph->validate_nodes_and_infer_types();
|
||||
model->get_parameters()[0]->set_partial_shape({1, 3, 25, 25});
|
||||
model->validate_nodes_and_infer_types();
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->input().get_partial_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(ngraph->output().get_partial_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(model->input().get_partial_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
EXPECT_EQ(model->output().get_partial_shape(), ov::Shape({1, 3, 25, 25}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeSpatialReLUStaticToDynamic) {
|
||||
const ov::PartialShape refShape{1, 3, ov::Dimension::dynamic(), 25};
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -706,27 +706,27 @@ TEST(model_reshape, ReshapeSpatialReLUStaticToDynamic) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor"] = refShape;
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_TRUE(ngraph->input(0).get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(ngraph->output(0).get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(ngraph->input(0).get_partial_shape(), refShape);
|
||||
EXPECT_EQ(ngraph->output(0).get_partial_shape(), refShape);
|
||||
EXPECT_TRUE(model->input(0).get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(model->output(0).get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(model->input(0).get_partial_shape(), refShape);
|
||||
EXPECT_EQ(model->output(0).get_partial_shape(), refShape);
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeSpatialReLUStaticToFullyDynamic) {
|
||||
const ov::PartialShape refShape = ov::PartialShape::dynamic();
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -738,27 +738,27 @@ TEST(model_reshape, ReshapeSpatialReLUStaticToFullyDynamic) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor"] = refShape;
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_TRUE(ngraph->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(ngraph->output().get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(ngraph->input().get_partial_shape(), refShape);
|
||||
EXPECT_EQ(ngraph->output().get_partial_shape(), refShape);
|
||||
EXPECT_TRUE(model->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(model->output().get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(model->input().get_partial_shape(), refShape);
|
||||
EXPECT_EQ(model->output().get_partial_shape(), refShape);
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeSpatialReLUDynamicToDynamic) {
|
||||
const ov::PartialShape refShape{1, 3, ov::Dimension::dynamic(), 25};
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, ov::Dimension::dynamic()});
|
||||
ov::element::Type type(ov::element::Type_t::f32);
|
||||
@ -770,22 +770,22 @@ TEST(model_reshape, ReshapeSpatialReLUDynamicToDynamic) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->input().get_partial_shape(), ov::PartialShape({1, 3, 22, ov::Dimension::dynamic()}));
|
||||
EXPECT_EQ(ngraph->output().get_partial_shape(), ov::PartialShape({1, 3, 22, ov::Dimension::dynamic()}));
|
||||
EXPECT_EQ(model->input().get_partial_shape(), ov::PartialShape({1, 3, 22, ov::Dimension::dynamic()}));
|
||||
EXPECT_EQ(model->output().get_partial_shape(), ov::PartialShape({1, 3, 22, ov::Dimension::dynamic()}));
|
||||
|
||||
{
|
||||
std::map<std::string, ov::PartialShape> new_shape;
|
||||
new_shape["tensor"] = refShape;
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_TRUE(ngraph->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(ngraph->output().get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(ngraph->input().get_partial_shape(), refShape);
|
||||
EXPECT_EQ(ngraph->output().get_partial_shape(), refShape);
|
||||
EXPECT_TRUE(model->input().get_partial_shape().is_dynamic());
|
||||
EXPECT_TRUE(model->output().get_partial_shape().is_dynamic());
|
||||
EXPECT_EQ(model->input().get_partial_shape(), refShape);
|
||||
EXPECT_EQ(model->output().get_partial_shape(), refShape);
|
||||
}
|
||||
|
||||
TEST(model_reshape, TestInvalidReshape) {
|
||||
@ -840,7 +840,7 @@ TEST(model_reshape, TestReshapeWithInvalidShapesForTheSameTensor) {
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeBatchReLUByIndex) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
ov::Output<ov::Node> port;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
@ -854,24 +854,24 @@ TEST(model_reshape, ReshapeBatchReLUByIndex) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<size_t, ov::PartialShape> new_shape;
|
||||
new_shape[0] = ov::PartialShape{2, 3, 22, 22};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeBatchReLUByPort) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
ov::Output<ov::Node> port;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
@ -885,24 +885,24 @@ TEST(model_reshape, ReshapeBatchReLUByPort) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
std::map<ov::Output<ov::Node>, ov::PartialShape> new_shape;
|
||||
new_shape[port] = ov::PartialShape{2, 3, 22, 22};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, ReshapeBatchReLUWithOneInput) {
|
||||
std::shared_ptr<ov::Model> ngraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
ov::Output<ov::Node> port;
|
||||
{
|
||||
ov::PartialShape shape({1, 3, 22, 22});
|
||||
@ -916,20 +916,20 @@ TEST(model_reshape, ReshapeBatchReLUWithOneInput) {
|
||||
ov::ParameterVector params = {param};
|
||||
ov::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ov::Model>(results, params);
|
||||
model = std::make_shared<ov::Model>(results, params);
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({1, 3, 22, 22}));
|
||||
|
||||
{
|
||||
ov::PartialShape new_shape;
|
||||
new_shape = ov::PartialShape{2, 3, 22, 22};
|
||||
EXPECT_NO_THROW(ngraph->reshape(new_shape));
|
||||
EXPECT_NO_THROW(model->reshape(new_shape));
|
||||
}
|
||||
|
||||
EXPECT_EQ(ngraph->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(ngraph->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_parameters()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
EXPECT_EQ(model->get_results()[0]->get_shape(), ov::Shape({2, 3, 22, 22}));
|
||||
}
|
||||
|
||||
TEST(model_reshape, IncoreectReshapeBatchWithMultipleInputs) {
|
||||
|
@ -2,22 +2,22 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
TEST(node_input_output, input_create) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto add = make_shared<op::v1::Add>(x, y);
|
||||
|
||||
auto add_in_0 = add->input(0);
|
||||
@ -41,8 +41,8 @@ TEST(node_input_output, input_create) {
|
||||
}
|
||||
|
||||
TEST(node_input_output, input_create_const) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto add = make_shared<const op::v1::Add>(x, y);
|
||||
|
||||
auto add_in_0 = add->input(0);
|
||||
@ -66,8 +66,8 @@ TEST(node_input_output, input_create_const) {
|
||||
}
|
||||
|
||||
TEST(node_input_output, output_create) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto add = make_shared<op::v1::Add>(x, y);
|
||||
|
||||
auto add_out_0 = add->output(0);
|
||||
@ -88,8 +88,8 @@ TEST(node_input_output, output_create) {
|
||||
}
|
||||
|
||||
TEST(node_input_output, output_create_const) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto add = make_shared<const op::v1::Add>(x, y);
|
||||
|
||||
auto add_out_0 = add->output(0);
|
||||
@ -105,8 +105,8 @@ TEST(node_input_output, output_create_const) {
|
||||
}
|
||||
|
||||
TEST(node_input_output, output_rt_info) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 2, 3, 4});
|
||||
auto add = make_shared<op::v1::Add>(x, y);
|
||||
auto add_const = make_shared<const op::v1::Add>(x, y);
|
||||
|
||||
@ -121,9 +121,9 @@ TEST(node_input_output, output_rt_info) {
|
||||
}
|
||||
|
||||
TEST(node_input_output, input_set_argument) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{2});
|
||||
auto z = make_shared<op::Parameter>(element::f32, Shape{3});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2});
|
||||
auto z = make_shared<ov::op::v0::Parameter>(element::f32, Shape{3});
|
||||
|
||||
auto add = make_shared<op::v1::Add>(x, y);
|
||||
|
||||
|
@ -2,44 +2,43 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/util/op_types.hpp"
|
||||
#include "openvino/opsets/opset.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(op, is_op) {
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
ASSERT_NE(nullptr, arg0);
|
||||
EXPECT_TRUE(op::is_parameter(arg0));
|
||||
EXPECT_TRUE(op::util::is_parameter(arg0));
|
||||
}
|
||||
|
||||
TEST(op, is_parameter) {
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, Shape{1});
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
ASSERT_NE(nullptr, arg0);
|
||||
auto t0 = make_shared<op::v1::Add>(arg0, arg0);
|
||||
ASSERT_NE(nullptr, t0);
|
||||
EXPECT_FALSE(op::is_parameter(t0));
|
||||
EXPECT_FALSE(op::util::is_parameter(t0));
|
||||
}
|
||||
|
||||
TEST(op, opset_multi_thread) {
|
||||
auto doTest = [&](std::function<const ngraph::OpSet&()> fun) {
|
||||
std::atomic<const ngraph::OpSet*> opset{nullptr};
|
||||
auto doTest = [&](std::function<const ov::OpSet&()> fun) {
|
||||
std::atomic<const ov::OpSet*> opset{nullptr};
|
||||
std::atomic_bool failed{false};
|
||||
auto threadFun = [&]() {
|
||||
const ngraph::OpSet* op = &fun();
|
||||
const ngraph::OpSet* current = opset;
|
||||
const ov::OpSet* op = &fun();
|
||||
const ov::OpSet* current = opset;
|
||||
do {
|
||||
if (current != nullptr && current != op) {
|
||||
failed = true;
|
||||
@ -53,11 +52,16 @@ TEST(op, opset_multi_thread) {
|
||||
t2.join();
|
||||
ASSERT_FALSE(failed);
|
||||
};
|
||||
doTest(ngraph::get_opset1);
|
||||
doTest(ngraph::get_opset2);
|
||||
doTest(ngraph::get_opset3);
|
||||
doTest(ngraph::get_opset4);
|
||||
doTest(ngraph::get_opset5);
|
||||
doTest(ngraph::get_opset6);
|
||||
doTest(ngraph::get_opset7);
|
||||
doTest(ov::get_opset1);
|
||||
doTest(ov::get_opset2);
|
||||
doTest(ov::get_opset3);
|
||||
doTest(ov::get_opset4);
|
||||
doTest(ov::get_opset5);
|
||||
doTest(ov::get_opset6);
|
||||
doTest(ov::get_opset7);
|
||||
doTest(ov::get_opset8);
|
||||
doTest(ov::get_opset9);
|
||||
doTest(ov::get_opset10);
|
||||
doTest(ov::get_opset11);
|
||||
doTest(ov::get_opset12);
|
||||
}
|
||||
|
@ -2,20 +2,16 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ngraph/opsets/opset1.hpp"
|
||||
#include "openvino/opsets/opset1.hpp"
|
||||
|
||||
#include <locale>
|
||||
#include <memory>
|
||||
#include <type_traits>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
using namespace ov;
|
||||
|
||||
namespace {
|
||||
string capitulate(string name) {
|
||||
|
@ -2,15 +2,21 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/validation_util.hpp"
|
||||
#include "openvino/core/partial_shape.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "ngraph/shape_util.hpp"
|
||||
#include "ngraph/validation_util.hpp"
|
||||
#include "openvino/core/coordinate_diff.hpp"
|
||||
#include "openvino/core/descriptor/tensor.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
|
||||
using namespace ov;
|
||||
|
||||
TEST(partial_shape, interators) {
|
||||
const PartialShape ps({1, 2, 3});
|
||||
const ov::PartialShape ps({1, 2, 3});
|
||||
ASSERT_TRUE(ps.is_static());
|
||||
{
|
||||
auto p = ps;
|
||||
@ -688,28 +694,28 @@ TEST(partial_shape, partial_shape_relaxes_refines_static_static_not_eq) {
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
TEST(partial_shape, partial_shape_project_rank_dynamic) {
|
||||
PartialShape s1{PartialShape::dynamic()};
|
||||
PartialShape s2 = project(s1, AxisSet{284, 0, 103});
|
||||
PartialShape s2 = ngraph::project(s1, AxisSet{284, 0, 103});
|
||||
|
||||
ASSERT_TRUE(s2.rank().is_dynamic());
|
||||
}
|
||||
|
||||
TEST(partial_shape, partial_shape_project_rank_static_dynamic) {
|
||||
PartialShape s1{Dimension::dynamic(), 2, Dimension::dynamic(), 3};
|
||||
PartialShape s2 = project(s1, AxisSet{0, 3});
|
||||
PartialShape s2 = ngraph::project(s1, AxisSet{0, 3});
|
||||
|
||||
ASSERT_TRUE(s2.same_scheme(PartialShape{Dimension::dynamic(), 3}));
|
||||
}
|
||||
|
||||
TEST(partial_shape, partial_shape_reduce_rank_dynamic) {
|
||||
PartialShape s1{PartialShape::dynamic()};
|
||||
PartialShape s2 = reduce(s1, AxisSet{284, 0, 103}, false);
|
||||
PartialShape s2 = ngraph::reduce(s1, AxisSet{284, 0, 103}, false);
|
||||
|
||||
ASSERT_TRUE(s2.rank().is_dynamic());
|
||||
}
|
||||
|
||||
TEST(partial_shape, partial_shape_reduce_rank_static_dynamic) {
|
||||
PartialShape s1{Dimension::dynamic(), 2, Dimension::dynamic(), 3};
|
||||
PartialShape s2 = reduce(s1, AxisSet{0, 3}, false);
|
||||
PartialShape s2 = ngraph::reduce(s1, AxisSet{0, 3}, false);
|
||||
|
||||
ASSERT_TRUE(s2.same_scheme(PartialShape{2, Dimension::dynamic()}));
|
||||
}
|
||||
@ -717,14 +723,14 @@ TEST(partial_shape, partial_shape_reduce_rank_static_dynamic) {
|
||||
TEST(partial_shape, partial_shape_inject_pairs_rank_dynamic) {
|
||||
PartialShape s1{PartialShape::dynamic()};
|
||||
PartialShape s2 =
|
||||
inject_pairs(s1, std::vector<std::pair<size_t, Dimension>>{{0, Dimension::dynamic()}, {207, 909}});
|
||||
ngraph::inject_pairs(s1, std::vector<std::pair<size_t, Dimension>>{{0, Dimension::dynamic()}, {207, 909}});
|
||||
|
||||
ASSERT_TRUE(s2.rank().is_dynamic());
|
||||
}
|
||||
|
||||
TEST(partial_shape, partial_shape_inject_pairs_rank_static) {
|
||||
PartialShape s1{1, Dimension::dynamic()};
|
||||
PartialShape s2 = inject_pairs(
|
||||
PartialShape s2 = ngraph::inject_pairs(
|
||||
s1,
|
||||
std::vector<std::pair<size_t, Dimension>>{{0, Dimension::dynamic()}, {2, 909}, {4, Dimension::dynamic()}});
|
||||
|
||||
@ -800,7 +806,7 @@ TEST(partial_shape, copy_with_back_inserter_iterator) {
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -810,7 +816,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_ok) {
|
||||
Strides window_dilation{1, 1, 1, 1};
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -826,7 +832,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_ok) {
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_data_dilation) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 0, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -838,7 +844,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_data
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -853,7 +859,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_data
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_window_dilation) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -865,7 +871,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_wind
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -880,7 +886,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_wind
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_window_strides) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -892,7 +898,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_wind
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -907,7 +913,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_wind
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -918,7 +924,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ok
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -933,7 +939,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ok
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_zero_data_post_padding) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, -1, 0, 0};
|
||||
@ -945,7 +951,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ze
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -960,7 +966,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ze
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_neg_padding_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), 4, 3, Dimension::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, -1, 0, 0};
|
||||
@ -970,7 +976,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ne
|
||||
Strides window_dilation{1, 1, 1, 1};
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -985,7 +991,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ne
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -996,7 +1002,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_ok
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1011,7 +1017,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_ok
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_dim_zero) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -1024,7 +1030,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1039,7 +1045,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_dilated_dim_zero) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -1052,7 +1058,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1067,7 +1073,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_all_in_padding_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 3, 0};
|
||||
@ -1078,7 +1084,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1093,7 +1099,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_all_in_padding_not_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 3, 0};
|
||||
@ -1106,7 +1112,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1121,7 +1127,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_wi
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_dilated_window_not_all_in_padding) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{PartialShape::dynamic()};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 3, 0};
|
||||
@ -1132,7 +1138,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_di
|
||||
bool is_window_all_in_padding_allowed = false;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1147,7 +1153,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_di
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -1158,7 +1164,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1174,7 +1180,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_with_padding_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 2, 0};
|
||||
@ -1185,7 +1191,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1201,7 +1207,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_with_padding_and_stride_ok) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 2, 0};
|
||||
@ -1212,7 +1218,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1228,7 +1234,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_too_big) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 0, 0};
|
||||
@ -1241,7 +1247,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1256,7 +1262,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_not_too_big_padding) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 5, 0};
|
||||
@ -1267,7 +1273,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
bool is_window_all_in_padding_allowed = true;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
@ -1283,7 +1289,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
}
|
||||
|
||||
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_dilated_too_big) {
|
||||
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto node = std::make_shared<ov::op::v0::Parameter>(element::f32, Shape{});
|
||||
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
|
||||
Strides data_dilation{1, 1, 1, 1};
|
||||
CoordinateDiff data_padding_below{0, 0, 5, 0};
|
||||
@ -1296,7 +1302,7 @@ TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dyn
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_THROW(
|
||||
{
|
||||
PartialShape result_shape = infer_windowed_reduction_output_shape(node.get(),
|
||||
PartialShape result_shape = ngraph::infer_windowed_reduction_output_shape(node.get(),
|
||||
data_shape,
|
||||
data_dilation,
|
||||
data_padding_below,
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,60 +4,56 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <ngraph/opsets/opset3.hpp>
|
||||
#include <ngraph/pass/graph_rewrite.hpp>
|
||||
#include <ngraph/pass/manager.hpp>
|
||||
#include <ngraph/pattern/op/wrap_type.hpp>
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "openvino/core/rtti.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/sigmoid.hpp"
|
||||
#include "openvino/pass/graph_rewrite.hpp"
|
||||
#include "openvino/pass/manager.hpp"
|
||||
#include "openvino/pass/pattern/op/wrap_type.hpp"
|
||||
|
||||
using namespace ::testing;
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace ov::pass;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
class RenameReLU : public ngraph::pass::MatcherPass {
|
||||
class RenameReLU : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("RanameReLU");
|
||||
RenameReLU() : MatcherPass() {
|
||||
auto relu = pattern::wrap_type<opset3::Relu>();
|
||||
ngraph::matcher_pass_callback callback = [](pattern::Matcher& m) {
|
||||
auto relu = ov::pass::pattern::wrap_type<ov::op::v0::Relu>();
|
||||
ov::matcher_pass_callback callback = [](pattern::Matcher& m) {
|
||||
auto relu = m.get_match_root();
|
||||
relu->set_friendly_name("renamed");
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(relu, "RenameReLU");
|
||||
auto m = std::make_shared<pass::pattern::Matcher>(relu, "RenameReLU");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(RenameReLU, "RenameReLU");
|
||||
|
||||
class RenameSigmoid : public ngraph::pass::MatcherPass {
|
||||
class RenameSigmoid : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("RenameSigmoid");
|
||||
RenameSigmoid() : MatcherPass() {
|
||||
auto sigmoid = pattern::wrap_type<opset3::Sigmoid>();
|
||||
ngraph::matcher_pass_callback callback = [](pattern::Matcher& m) {
|
||||
auto sigmoid = pattern::wrap_type<ov::op::v0::Sigmoid>();
|
||||
ov::matcher_pass_callback callback = [](pattern::Matcher& m) {
|
||||
auto sigmoid = m.get_match_root();
|
||||
sigmoid->set_friendly_name("renamed");
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(sigmoid, "RenameSigmoid");
|
||||
auto m = std::make_shared<ov::pass::pattern::Matcher>(sigmoid, "RenameSigmoid");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(RenameSigmoid, "RenameSigmoid");
|
||||
|
||||
class TestFunctionPass : public ngraph::pass::FunctionPass {
|
||||
class TestModelPass : public pass::ModelPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("TestModelPass");
|
||||
|
||||
bool run_on_model(const std::shared_ptr<Function>& f) override {
|
||||
bool run_on_model(const std::shared_ptr<ov::Model>& f) override {
|
||||
pass::Manager manager(get_pass_config());
|
||||
|
||||
manager.register_pass<RenameReLU, false /*disabled by default*/>();
|
||||
@ -68,36 +64,32 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(TestFunctionPass, "TestFunctionPass");
|
||||
|
||||
class TestGraphRewritePass : public ngraph::pass::GraphRewrite {
|
||||
class TestGraphRewritePass : public pass::GraphRewrite {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
OPENVINO_RTTI("TestGraphRewritePass");
|
||||
TestGraphRewritePass() {
|
||||
add_matcher<RenameReLU, false /*disabled by default*/>();
|
||||
add_matcher<RenameSigmoid>();
|
||||
}
|
||||
};
|
||||
|
||||
NGRAPH_RTTI_DEFINITION(TestGraphRewritePass, "TestGraphRewritePass");
|
||||
|
||||
std::tuple<std::shared_ptr<Function>, std::shared_ptr<Node>, std::shared_ptr<Node>> get_test_function() {
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
|
||||
auto relu = std::make_shared<ngraph::opset3::Relu>(data);
|
||||
std::tuple<std::shared_ptr<Model>, std::shared_ptr<Node>, std::shared_ptr<Node>> get_test_function() {
|
||||
auto data = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{3, 1, 2});
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(data);
|
||||
relu->set_friendly_name("relu");
|
||||
auto sigmoid = std::make_shared<ngraph::opset3::Sigmoid>(relu);
|
||||
auto sigmoid = std::make_shared<ov::op::v0::Sigmoid>(relu);
|
||||
sigmoid->set_friendly_name("sigmoid");
|
||||
auto f = std::make_shared<ngraph::Function>(ngraph::NodeVector{sigmoid}, ngraph::ParameterVector{data});
|
||||
return std::tuple<std::shared_ptr<Function>, std::shared_ptr<Node>, std::shared_ptr<Node>>(f, relu, sigmoid);
|
||||
auto f = std::make_shared<ov::Model>(ov::NodeVector{sigmoid}, ov::ParameterVector{data});
|
||||
return std::tuple<std::shared_ptr<Model>, std::shared_ptr<Node>, std::shared_ptr<Node>>(f, relu, sigmoid);
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses1) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestFunctionPass>();
|
||||
manager.register_pass<TestModelPass>();
|
||||
manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(relu->get_friendly_name(), "relu");
|
||||
@ -105,12 +97,12 @@ TEST(PassConfig, EnableDisablePasses1) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses2) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestFunctionPass>();
|
||||
manager.register_pass<TestModelPass>();
|
||||
|
||||
auto pass_config = manager.get_pass_config();
|
||||
pass_config->disable<RenameSigmoid>();
|
||||
@ -128,12 +120,12 @@ TEST(PassConfig, EnableDisablePasses2) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses3) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestFunctionPass>();
|
||||
manager.register_pass<TestModelPass>();
|
||||
|
||||
auto pass_config = manager.get_pass_config();
|
||||
pass_config->enable<RenameReLU>();
|
||||
@ -144,12 +136,12 @@ TEST(PassConfig, EnableDisablePasses3) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses4) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
pass::Manager manager;
|
||||
manager.register_pass<TestFunctionPass>();
|
||||
manager.register_pass<TestModelPass>();
|
||||
|
||||
auto pass_config = manager.get_pass_config();
|
||||
pass_config->enable<RenameReLU>();
|
||||
@ -166,7 +158,7 @@ TEST(PassConfig, EnableDisablePasses4) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses5) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -179,7 +171,7 @@ TEST(PassConfig, EnableDisablePasses5) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses6) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -202,7 +194,7 @@ TEST(PassConfig, EnableDisablePasses6) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses7) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -218,7 +210,7 @@ TEST(PassConfig, EnableDisablePasses7) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses8) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -241,7 +233,7 @@ TEST(PassConfig, EnableDisablePasses8) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses9) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -265,19 +257,19 @@ TEST(PassConfig, EnableDisablePasses9) {
|
||||
ASSERT_EQ(sigmoid->get_friendly_name(), "renamed");
|
||||
}
|
||||
|
||||
class TestNestedMatcher : public ngraph::pass::MatcherPass {
|
||||
class TestNestedMatcher : public ov::pass::MatcherPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
TestNestedMatcher() : MatcherPass() {
|
||||
auto any_op = pattern::any_input();
|
||||
ngraph::matcher_pass_callback callback = [this](pattern::Matcher& m) {
|
||||
ov::matcher_pass_callback callback = [this](pattern::Matcher& m) {
|
||||
auto root = m.get_match_root();
|
||||
auto pass_config = this->get_pass_config();
|
||||
if (std::dynamic_pointer_cast<opset3::Relu>(root) && !pass_config->is_disabled<RenameReLU>()) {
|
||||
if (std::dynamic_pointer_cast<op::v0::Relu>(root) && !pass_config->is_disabled<RenameReLU>()) {
|
||||
auto pass = std::make_shared<RenameReLU>();
|
||||
pass->set_pass_config(pass_config);
|
||||
pass->apply(root);
|
||||
} else if (std::dynamic_pointer_cast<opset3::Sigmoid>(root) && !pass_config->is_disabled<RenameSigmoid>()) {
|
||||
} else if (std::dynamic_pointer_cast<op::v0::Sigmoid>(root) && !pass_config->is_disabled<RenameSigmoid>()) {
|
||||
auto pass = std::make_shared<RenameSigmoid>();
|
||||
pass->set_pass_config(pass_config);
|
||||
pass->apply(root);
|
||||
@ -285,7 +277,7 @@ public:
|
||||
return false;
|
||||
};
|
||||
|
||||
auto m = std::make_shared<ngraph::pattern::Matcher>(any_op, "TestNestedMatcher");
|
||||
auto m = std::make_shared<pass::pattern::Matcher>(any_op, "TestNestedMatcher");
|
||||
this->register_matcher(m, callback);
|
||||
}
|
||||
};
|
||||
@ -303,7 +295,7 @@ public:
|
||||
NGRAPH_RTTI_DEFINITION(TestNestedGraphRewrite, "TestNestedGraphRewrite");
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses10) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
@ -327,7 +319,7 @@ TEST(PassConfig, EnableDisablePasses10) {
|
||||
}
|
||||
|
||||
TEST(PassConfig, EnableDisablePasses11) {
|
||||
std::shared_ptr<Function> f;
|
||||
std::shared_ptr<Model> f;
|
||||
std::shared_ptr<Node> relu, sigmoid;
|
||||
std::tie(f, relu, sigmoid) = get_test_function();
|
||||
|
||||
|
@ -2,22 +2,26 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "openvino/core/graph_util.hpp"
|
||||
#include "openvino/core/model.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/matmul.hpp"
|
||||
#include "openvino/op/multiply.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/pass/manager.hpp"
|
||||
#include "openvino/pass/pass.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
std::shared_ptr<ov::Model> make_test_graph() {
|
||||
auto arg_0 = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{2, 2});
|
||||
auto arg_1 = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{2, 2});
|
||||
@ -74,7 +78,7 @@ TEST(pass_manager, add) {
|
||||
|
||||
auto graph = make_test_graph();
|
||||
size_t node_count = 0;
|
||||
traverse_nodes(graph, [&](shared_ptr<Node> /* node */) {
|
||||
ov::traverse_nodes(graph, [&](shared_ptr<Node> /* node */) {
|
||||
node_count++;
|
||||
});
|
||||
pass_manager.run_passes(graph);
|
||||
@ -82,13 +86,3 @@ TEST(pass_manager, add) {
|
||||
EXPECT_EQ(node_count, sorted.size());
|
||||
EXPECT_TRUE(validate_list(sorted));
|
||||
}
|
||||
|
||||
namespace {
|
||||
class DummyPass : public pass::FunctionPass {
|
||||
public:
|
||||
DummyPass() {}
|
||||
bool run_on_model(const std::shared_ptr<ngraph::Function>& /* f */) override {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
} // namespace
|
||||
|
@ -2,61 +2,59 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <common_test_utils/ngraph_test_utils.hpp>
|
||||
#include <cstdio>
|
||||
#include <iostream>
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <ngraph/pattern/op/wrap_type.hpp>
|
||||
|
||||
#include "common_test_utils/matcher.hpp"
|
||||
#include "common_test_utils/ngraph_test_utils.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "ngraph/graph_util.hpp"
|
||||
#include "ngraph/log.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/op/add.hpp"
|
||||
#include "ngraph/op/batch_norm.hpp"
|
||||
#include "ngraph/op/constant.hpp"
|
||||
#include "ngraph/op/divide.hpp"
|
||||
#include "ngraph/op/multiply.hpp"
|
||||
#include "ngraph/op/sqrt.hpp"
|
||||
#include "ngraph/op/subtract.hpp"
|
||||
#include "ngraph/op/util/op_types.hpp"
|
||||
#include "ngraph/pass/graph_rewrite.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "ngraph/pattern/matcher.hpp"
|
||||
#include "ngraph/pattern/op/branch.hpp"
|
||||
#include "ngraph/pattern/op/label.hpp"
|
||||
#include "ngraph/pattern/op/or.hpp"
|
||||
#include "ngraph/pattern/op/skip.hpp"
|
||||
#include "ngraph/pattern/op/true.hpp"
|
||||
#include "openvino/util/log.hpp"
|
||||
#include "openvino/op/abs.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/broadcast.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/divide.hpp"
|
||||
#include "openvino/op/multiply.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/reduce_sum.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/subtract.hpp"
|
||||
#include "openvino/op/util/op_types.hpp"
|
||||
#include "openvino/pass/graph_rewrite.hpp"
|
||||
#include "openvino/pass/manager.hpp"
|
||||
#include "openvino/pass/pattern/matcher.hpp"
|
||||
#include "openvino/pass/pattern/op/branch.hpp"
|
||||
#include "openvino/pass/pattern/op/label.hpp"
|
||||
#include "openvino/pass/pattern/op/or.hpp"
|
||||
#include "openvino/pass/pattern/op/true.hpp"
|
||||
#include "openvino/pass/pattern/op/wrap_type.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace ov::pass;
|
||||
using namespace std;
|
||||
|
||||
static std::shared_ptr<Node> construct_constant_node(int n) {
|
||||
return op::Constant::create(element::i32, Shape{}, {n});
|
||||
return ov::op::v0::Constant::create(element::i32, Shape{}, {n});
|
||||
}
|
||||
|
||||
static std::shared_ptr<pattern::op::Label> construct_variance_graph() {
|
||||
static std::shared_ptr<pass::pattern::op::Label> construct_variance_graph() {
|
||||
// construct varaiance
|
||||
auto N = op::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto input = std::make_shared<pattern::op::Label>(element::f32, Shape{2, 3});
|
||||
auto N = ov::op::v0::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto input = std::make_shared<pass::pattern::op::Label>(element::f32, Shape{2, 3});
|
||||
auto input_sq = std::make_shared<op::v1::Multiply>(input, input);
|
||||
auto sum_input = std::make_shared<op::v1::ReduceSum>(input, op::Constant::create(element::i64, {1}, {0}));
|
||||
auto sum_input = std::make_shared<op::v1::ReduceSum>(input, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto square_sumed_input = std::make_shared<op::v1::Multiply>(sum_input, sum_input);
|
||||
auto sum_squared_input =
|
||||
std::make_shared<op::v1::ReduceSum>(input_sq, op::Constant::create(element::i64, {1}, {0}));
|
||||
std::make_shared<op::v1::ReduceSum>(input_sq, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto avg_input_sum_sq = std::make_shared<op::v1::Divide>(square_sumed_input, N);
|
||||
auto xmu = std::make_shared<op::v1::Subtract>(sum_squared_input, avg_input_sum_sq);
|
||||
auto variance = std::make_shared<op::v1::Divide>(xmu, N);
|
||||
auto variance_label = std::make_shared<pattern::op::Label>(variance, nullptr, NodeVector{variance});
|
||||
auto variance_label = std::make_shared<pass::pattern::op::Label>(variance, nullptr, NodeVector{variance});
|
||||
|
||||
return variance_label;
|
||||
}
|
||||
@ -64,14 +62,14 @@ static std::shared_ptr<pattern::op::Label> construct_variance_graph() {
|
||||
static std::shared_ptr<pattern::op::Label> construct_mean_graph() {
|
||||
// construct mean;
|
||||
auto input = std::make_shared<pattern::op::Label>(element::f32, Shape{2, 3});
|
||||
auto N = op::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto sum_input1 = std::make_shared<op::v1::ReduceSum>(input, op::Constant::create(element::i64, {1}, {0}));
|
||||
auto N = ov::op::v0::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto sum_input1 = std::make_shared<op::v1::ReduceSum>(input, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto mean = std::make_shared<op::v1::Divide>(sum_input1, N);
|
||||
auto mean_label = std::make_shared<pattern::op::Label>(mean, nullptr, NodeVector{mean});
|
||||
return mean_label;
|
||||
}
|
||||
|
||||
class TestGraphRewrite : public ngraph::pass::GraphRewrite {
|
||||
class TestGraphRewrite : public ov::pass::GraphRewrite {
|
||||
public:
|
||||
void construct_multiply_by_one() {
|
||||
// pattern #1 : a * 1 = a
|
||||
@ -85,8 +83,8 @@ public:
|
||||
auto pattern_map = m.get_pattern_map();
|
||||
|
||||
size_t const_node_index = m.get_match_root()->input_value(0).get_node_shared_ptr() == pattern_map[pattern];
|
||||
auto const_node =
|
||||
ov::as_type_ptr<op::Constant>(m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
|
||||
auto const_node = ov::as_type_ptr<ov::op::v0::Constant>(
|
||||
m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
|
||||
auto second_node = m.get_match_root()->input_value(const_node_index).get_node_shared_ptr();
|
||||
OPENVINO_DEBUG << "second_node = " << second_node->get_name()
|
||||
<< " , pattern = " << pattern_map[pattern]->get_name();
|
||||
@ -143,8 +141,8 @@ public:
|
||||
auto pattern_map = m.get_pattern_map();
|
||||
|
||||
size_t const_node_index = m.get_match_root()->input_value(0).get_node_shared_ptr() == pattern_map[pattern];
|
||||
auto const_node =
|
||||
ov::as_type_ptr<op::Constant>(m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
|
||||
auto const_node = ov::as_type_ptr<ov::op::v0::Constant>(
|
||||
m.get_match_root()->input_value(const_node_index).get_node_shared_ptr());
|
||||
auto second_node = m.get_match_root()->input_value(const_node_index).get_node_shared_ptr();
|
||||
OPENVINO_DEBUG << "second_node = " << second_node->get_name()
|
||||
<< " , pattern = " << pattern_map[pattern]->get_name();
|
||||
@ -198,8 +196,8 @@ public:
|
||||
|
||||
static void run_passes(pass::Manager& pass_manager,
|
||||
shared_ptr<Node> graph,
|
||||
std::vector<shared_ptr<op::Parameter>> parms) {
|
||||
auto func = make_shared<Function>(graph, ParameterVector{parms});
|
||||
std::vector<shared_ptr<op::v0::Parameter>> parms) {
|
||||
auto func = make_shared<Model>(graph, ParameterVector{parms});
|
||||
pass_manager.run_passes(func);
|
||||
}
|
||||
|
||||
@ -209,14 +207,14 @@ TEST(pattern, graph_rewrite) {
|
||||
pass_manager.register_pass<TestGraphRewrite>();
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto c = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto c = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst0 = construct_constant_node(0);
|
||||
auto graph_a = make_shared<op::v1::Add>(a, iconst0);
|
||||
auto graph_b = make_shared<op::v1::Add>(b, iconst0);
|
||||
|
||||
auto f = std::make_shared<Function>(ngraph::NodeVector{a, b, graph_a, c, graph_b}, ParameterVector{a, b, c});
|
||||
auto f = std::make_shared<Model>(ngraph::NodeVector{a, b, graph_a, c, graph_b}, ParameterVector{a, b, c});
|
||||
pass_manager.run_passes(f);
|
||||
|
||||
ASSERT_TRUE(graph_a->get_output_target_inputs(0).empty());
|
||||
@ -227,8 +225,8 @@ TEST(pattern, graph_rewrite) {
|
||||
}
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst0 = construct_constant_node(0);
|
||||
auto sum = make_shared<op::v1::Add>(a, iconst0);
|
||||
auto graph = make_shared<op::v1::Add>(b, sum);
|
||||
@ -240,8 +238,8 @@ TEST(pattern, graph_rewrite) {
|
||||
}
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst1 = construct_constant_node(1);
|
||||
auto mul = make_shared<op::v1::Multiply>(a, iconst1);
|
||||
auto graph = make_shared<op::v1::Add>(b, mul);
|
||||
@ -253,8 +251,8 @@ TEST(pattern, graph_rewrite) {
|
||||
}
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst1 = construct_constant_node(1);
|
||||
auto multiply = make_shared<op::v1::Multiply>(make_shared<op::v1::Multiply>(a, iconst1), iconst1);
|
||||
multiply = make_shared<op::v1::Multiply>(make_shared<op::v1::Multiply>(multiply, iconst1), iconst1);
|
||||
@ -266,8 +264,8 @@ TEST(pattern, graph_rewrite) {
|
||||
}
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst0 = construct_constant_node(0);
|
||||
auto iconst1 = construct_constant_node(1);
|
||||
auto mul = make_shared<op::v1::Multiply>(make_shared<op::v1::Add>(a, iconst0), iconst1);
|
||||
@ -279,8 +277,8 @@ TEST(pattern, graph_rewrite) {
|
||||
}
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst1 = construct_constant_node(1);
|
||||
auto mul = make_shared<op::v1::Multiply>(iconst1, make_shared<op::v1::Multiply>(iconst1, a));
|
||||
mul = make_shared<op::v1::Multiply>(iconst1, make_shared<op::v1::Multiply>(iconst1, mul));
|
||||
@ -294,12 +292,12 @@ TEST(pattern, graph_rewrite) {
|
||||
|
||||
TEST(pattern, matcher) {
|
||||
Shape shape{};
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
TestMatcher n;
|
||||
ASSERT_TRUE(n.match(a, a));
|
||||
ASSERT_EQ(n.get_matched_nodes(), (NodeVector{a}));
|
||||
|
||||
auto abs = make_shared<op::Abs>(a);
|
||||
auto abs = make_shared<op::v0::Abs>(a);
|
||||
auto any = std::make_shared<pattern::op::Skip>(a);
|
||||
ASSERT_TRUE(n.match(any, abs));
|
||||
ASSERT_EQ(n.get_matched_nodes(), (NodeVector{abs, a}));
|
||||
@ -320,10 +318,10 @@ TEST(pattern, matcher) {
|
||||
ASSERT_FALSE(n.match(pattern_false, a));
|
||||
ASSERT_EQ(n.get_matched_nodes(), (NodeVector{}));
|
||||
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
|
||||
auto is_bea = [](std::shared_ptr<Node> node) -> bool {
|
||||
return op::is_binary_elementwise_arithmetic(node);
|
||||
return op::util::is_binary_elementwise_arithmetic(node);
|
||||
};
|
||||
auto bea = std::make_shared<pattern::op::Any>(a, is_bea, NodeVector{a, b});
|
||||
auto add_ab = std::make_shared<op::v1::Add>(a, b);
|
||||
@ -355,7 +353,7 @@ TEST(pattern, matcher) {
|
||||
ASSERT_TRUE(n.match(bea_label, ab));
|
||||
ASSERT_EQ(n.get_pattern_map()[bea_label], ab);
|
||||
|
||||
auto d = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto d = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
ASSERT_FALSE(n.match(d, b));
|
||||
|
||||
ASSERT_FALSE(n.match(std::make_shared<op::v1::Add>(abs, b), std::make_shared<op::v1::Add>(b, b)));
|
||||
@ -373,7 +371,7 @@ TEST(pattern, matcher) {
|
||||
ASSERT_EQ(n.get_pattern_map()[pattern], abs);
|
||||
ASSERT_EQ(n.get_matched_nodes(), (NodeVector{add_absb, abs, b}));
|
||||
|
||||
auto c = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto c = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto mul_add_absb = std::make_shared<op::v1::Multiply>(c, add_absb);
|
||||
ASSERT_TRUE(
|
||||
n.match(std::make_shared<op::v1::Multiply>(c, std::make_shared<op::v1::Add>(b, pattern)), mul_add_absb));
|
||||
@ -399,7 +397,7 @@ TEST(pattern, matcher) {
|
||||
ASSERT_TRUE(n.match(make_shared<op::v1::Multiply>(pattern, iconst1_0),
|
||||
make_shared<op::v1::Multiply>(a, iconst1_1))); // different iconst
|
||||
ASSERT_EQ(n.get_pattern_map()[pattern], a);
|
||||
auto fconst1_0 = op::Constant::create(element::f32, shape, {1});
|
||||
auto fconst1_0 = ov::op::v0::Constant::create(element::f32, shape, {1});
|
||||
auto patternf = std::make_shared<pattern::op::Label>(fconst1_0);
|
||||
ASSERT_TRUE(n.match(make_shared<op::v1::Multiply>(patternf, fconst1_0),
|
||||
make_shared<op::v1::Multiply>(a, iconst1_1))); // different iconst
|
||||
@ -413,7 +411,7 @@ TEST(pattern, matcher) {
|
||||
|
||||
ASSERT_FALSE(n.match(label, std::make_shared<op::v1::Subtract>(a, b)));
|
||||
|
||||
ASSERT_TRUE(n.match(make_shared<op::Abs>(label), make_shared<op::Abs>(add)));
|
||||
ASSERT_TRUE(n.match(make_shared<op::v0::Abs>(label), make_shared<op::v0::Abs>(add)));
|
||||
ASSERT_EQ(n.get_pattern_map()[label], add);
|
||||
|
||||
// Correct argument order
|
||||
@ -468,17 +466,17 @@ TEST(pattern, matcher) {
|
||||
{
|
||||
TestMatcher sm(Output<Node>{}, "TestMatcher", true);
|
||||
// exact shape and type
|
||||
auto scalar_param = make_shared<op::Parameter>(element::i32, Shape{});
|
||||
auto scalar_param = make_shared<op::v0::Parameter>(element::i32, Shape{});
|
||||
auto label_dynamic_shape = make_shared<pattern::op::Label>(element::i32, PartialShape::dynamic());
|
||||
auto param = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto param = make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
ASSERT_TRUE(sm.match(label_dynamic_shape, scalar_param));
|
||||
// wrong type
|
||||
auto scalar_param_wrong_type = make_shared<op::Parameter>(element::f32, Shape{});
|
||||
auto scalar_param_wrong_type = make_shared<op::v0::Parameter>(element::f32, Shape{});
|
||||
ASSERT_FALSE(sm.match(label, scalar_param_wrong_type));
|
||||
// dynamic dimension
|
||||
auto label_dynamic_dimension =
|
||||
make_shared<pattern::op::Label>(element::i32, PartialShape{Dimension::dynamic()});
|
||||
auto vector_param = make_shared<op::Parameter>(element::i32, Shape{10});
|
||||
auto vector_param = make_shared<op::v0::Parameter>(element::i32, Shape{10});
|
||||
ASSERT_TRUE(sm.match(label_dynamic_dimension, vector_param));
|
||||
// dynamic type
|
||||
auto label_dynamic_type = make_shared<pattern::op::Label>(element::dynamic, PartialShape{Dimension::dynamic()});
|
||||
@ -490,9 +488,9 @@ TEST(pattern, mean) {
|
||||
// construct mean
|
||||
TestMatcher n;
|
||||
|
||||
auto input = std::make_shared<op::Parameter>(element::f32, Shape{2, 3});
|
||||
auto N = op::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto sum_input1 = std::make_shared<op::v1::ReduceSum>(input, op::Constant::create(element::i64, {1}, {0}));
|
||||
auto input = std::make_shared<op::v0::Parameter>(element::f32, Shape{2, 3});
|
||||
auto N = ov::op::v0::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto sum_input1 = std::make_shared<op::v1::ReduceSum>(input, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto mean = std::make_shared<op::v1::Divide>(sum_input1, N);
|
||||
|
||||
auto mean_graph = construct_mean_graph();
|
||||
@ -503,13 +501,13 @@ TEST(pattern, mean) {
|
||||
TEST(pattern, variance) {
|
||||
// construct variance
|
||||
TestMatcher n;
|
||||
auto N = op::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto N = ov::op::v0::Constant::create(element::f32, Shape{3}, {2, 2, 2});
|
||||
auto input = std::make_shared<pattern::op::Label>(element::f32, Shape{2, 3});
|
||||
auto input_sq = std::make_shared<op::v1::Multiply>(input, input);
|
||||
auto sum_input = std::make_shared<op::v1::ReduceSum>(input, op::Constant::create(element::i64, {1}, {0}));
|
||||
auto sum_input = std::make_shared<op::v1::ReduceSum>(input, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto square_sumed_input = std::make_shared<op::v1::Multiply>(sum_input, sum_input);
|
||||
auto sum_squared_input =
|
||||
std::make_shared<op::v1::ReduceSum>(input_sq, op::Constant::create(element::i64, {1}, {0}));
|
||||
std::make_shared<op::v1::ReduceSum>(input_sq, ov::op::v0::Constant::create(element::i64, {1}, {0}));
|
||||
auto avg_input_sum_sq = std::make_shared<op::v1::Divide>(square_sumed_input, N);
|
||||
auto xmu = std::make_shared<op::v1::Subtract>(sum_squared_input, avg_input_sum_sq);
|
||||
auto variance = std::make_shared<op::v1::Divide>(xmu, N);
|
||||
@ -520,46 +518,44 @@ TEST(pattern, variance) {
|
||||
}
|
||||
|
||||
TEST(pattern, previous_matches) {
|
||||
using ngraph::pattern::Matcher;
|
||||
Shape shape{};
|
||||
Matcher::PatternMap previous_matches;
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
ov::pass::pattern::Matcher::PatternMap previous_matches;
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto pattern = std::make_shared<pattern::op::Label>(b);
|
||||
auto abs = make_shared<op::Abs>(a);
|
||||
auto abs = make_shared<op::v0::Abs>(a);
|
||||
auto add = make_shared<op::v1::Add>(abs, b);
|
||||
{
|
||||
Matcher n(make_shared<op::v1::Add>(pattern, b));
|
||||
pattern::Matcher n(make_shared<op::v1::Add>(pattern, b));
|
||||
ASSERT_TRUE(n.match(add, previous_matches));
|
||||
ASSERT_EQ(n.get_pattern_map()[pattern], abs);
|
||||
}
|
||||
|
||||
{
|
||||
Matcher n(make_shared<op::v1::Add>(pattern, b));
|
||||
pattern::Matcher n(make_shared<op::v1::Add>(pattern, b));
|
||||
previous_matches.insert(std::make_pair(pattern, a));
|
||||
ASSERT_FALSE(n.match(add, previous_matches));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(pattern, test_sort) {
|
||||
using ngraph::pattern::Matcher;
|
||||
Shape shape{};
|
||||
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto abs1 = make_shared<op::Abs>(a);
|
||||
auto abs2 = make_shared<op::Abs>(b);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto abs1 = make_shared<op::v0::Abs>(a);
|
||||
auto abs2 = make_shared<op::v0::Abs>(b);
|
||||
shared_ptr<Node> add = make_shared<op::v1::Add>(abs1, abs2);
|
||||
|
||||
auto pa = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto pb = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto pabs1 = make_shared<op::Abs>(pa);
|
||||
auto pa = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto pb = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto pabs1 = make_shared<op::v0::Abs>(pa);
|
||||
auto pabs1_label = std::make_shared<pattern::op::Label>(pabs1);
|
||||
auto pabs2 = make_shared<op::Abs>(b);
|
||||
auto pabs2 = make_shared<op::v0::Abs>(b);
|
||||
shared_ptr<Node> padd = make_shared<op::v1::Add>(pabs1_label, pabs2);
|
||||
|
||||
{
|
||||
Matcher n1(padd);
|
||||
pattern::Matcher n1(padd);
|
||||
ASSERT_TRUE(n1.match(add));
|
||||
auto r1 = n1.get_pattern_map()[pabs1_label];
|
||||
ASSERT_TRUE(n1.match(add));
|
||||
@ -568,20 +564,19 @@ TEST(pattern, test_sort) {
|
||||
}
|
||||
|
||||
TEST(pattern, recurrent_pattern) {
|
||||
using ngraph::pattern::RecurrentMatcher;
|
||||
Shape shape{};
|
||||
ngraph::pattern::Matcher::PatternMap previous_matches;
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
pass::pattern::Matcher::PatternMap previous_matches;
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto rpattern = std::make_shared<pattern::op::Label>(b);
|
||||
auto iconst0 = construct_constant_node(0);
|
||||
auto abs = make_shared<op::Abs>(a);
|
||||
auto abs = make_shared<op::v0::Abs>(a);
|
||||
auto add1 = make_shared<op::v1::Add>(iconst0, b);
|
||||
auto add2 = make_shared<op::v1::Add>(iconst0, add1);
|
||||
auto add3 = make_shared<op::v1::Add>(iconst0, add2);
|
||||
auto padd = make_shared<op::v1::Add>(iconst0, rpattern);
|
||||
std::set<std::shared_ptr<pattern::op::Label>> empty_correlated_matches;
|
||||
RecurrentMatcher rm(padd, rpattern, empty_correlated_matches);
|
||||
pattern::RecurrentMatcher rm(padd, rpattern, empty_correlated_matches);
|
||||
ASSERT_TRUE(rm.match(add3));
|
||||
ASSERT_EQ(rm.get_number_of_bound_labels(), 3);
|
||||
auto recurrent_matches = rm.get_bound_nodes_for_pattern(rpattern);
|
||||
@ -595,7 +590,7 @@ TEST(pattern, recurrent_pattern) {
|
||||
auto add2_2 = make_shared<op::v1::Add>(iconst1, add1);
|
||||
auto add3_2 = make_shared<op::v1::Add>(iconst0, add2_2);
|
||||
auto padd2 = make_shared<op::v1::Add>(iconst_label, rpattern);
|
||||
RecurrentMatcher rm2(padd2, rpattern, empty_correlated_matches);
|
||||
pattern::RecurrentMatcher rm2(padd2, rpattern, empty_correlated_matches);
|
||||
ASSERT_TRUE(rm2.match(add3_2));
|
||||
ASSERT_EQ(rm2.get_number_of_bound_labels(), 4);
|
||||
recurrent_matches = rm2.get_bound_nodes_for_pattern(rpattern);
|
||||
@ -610,7 +605,7 @@ TEST(pattern, recurrent_pattern) {
|
||||
// Non-matching correlated labels
|
||||
std::set<std::shared_ptr<pattern::op::Label>> correlated_matches;
|
||||
correlated_matches.insert(iconst_label);
|
||||
RecurrentMatcher rm3(padd2, rpattern, correlated_matches);
|
||||
pattern::RecurrentMatcher rm3(padd2, rpattern, correlated_matches);
|
||||
ASSERT_TRUE(rm3.match(add3_2));
|
||||
ASSERT_EQ(rm3.get_number_of_bound_labels(), 4);
|
||||
iconst_matches = rm3.get_bound_nodes_for_pattern(iconst_label);
|
||||
@ -631,6 +626,7 @@ TEST(pattern, recurrent_pattern) {
|
||||
ASSERT_EQ(iconst_matches.at(2), iconst0);
|
||||
}
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
class TestRecurrentGraphRewrite : public ngraph::pass::RecurrentGraphRewrite {
|
||||
public:
|
||||
void construct_recurrent_add() {
|
||||
@ -684,21 +680,21 @@ TEST(pattern, recurrent_graph_rewrite) {
|
||||
pass_manager.register_pass<TestRecurrentGraphRewrite>();
|
||||
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto iconst0 = construct_constant_node(0);
|
||||
auto add_a1 = make_shared<op::v1::Add>(a, iconst0);
|
||||
auto add_a2 = make_shared<op::v1::Add>(add_a1, iconst0);
|
||||
auto add_a3 = make_shared<op::v1::Add>(add_a2, iconst0);
|
||||
auto abs_add_a3 = std::make_shared<op::Abs>(add_a3);
|
||||
auto abs_add_a3 = std::make_shared<op::v0::Abs>(add_a3);
|
||||
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto add_b1 = make_shared<op::v1::Add>(b, iconst0);
|
||||
auto add_b2 = make_shared<op::v1::Add>(add_b1, iconst0);
|
||||
auto abs_add_b2 = std::make_shared<op::Abs>(add_b2);
|
||||
auto abs_add_b2 = std::make_shared<op::v0::Abs>(add_b2);
|
||||
|
||||
auto graph = make_shared<op::v1::Multiply>(abs_add_a3, abs_add_b2);
|
||||
|
||||
auto f = std::make_shared<Function>(ngraph::NodeVector{graph}, ParameterVector{a, b});
|
||||
auto f = std::make_shared<Model>(ngraph::NodeVector{graph}, ParameterVector{a, b});
|
||||
pass_manager.run_passes(f);
|
||||
|
||||
auto left_abs = graph->input_value(0).get_node_shared_ptr();
|
||||
@ -710,21 +706,24 @@ TEST(pattern, recurrent_graph_rewrite) {
|
||||
ASSERT_EQ(add_b, b);
|
||||
}
|
||||
}
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
|
||||
TEST(pattern, label_on_skip) {
|
||||
Shape shape{2, 2};
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::Parameter>(element::i32, Shape{});
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<op::v0::Parameter>(element::i32, Shape{});
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
auto iconst = ngraph::make_zero(element::i32, Shape{});
|
||||
auto label = std::make_shared<pattern::op::Label>(iconst);
|
||||
auto const_label = std::make_shared<pattern::op::Label>(iconst, ngraph::is_zero, NodeVector{iconst});
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
|
||||
auto bcst_pred = [](std::shared_ptr<Node> n) {
|
||||
return ov::as_type_ptr<op::v1::Broadcast>(n) != nullptr;
|
||||
};
|
||||
|
||||
auto shape_const = op::Constant::create(element::u64, Shape{shape.size()}, shape);
|
||||
auto axes_const = op::Constant::create(element::u8, Shape{}, {0});
|
||||
auto shape_const = ov::op::v0::Constant::create(element::u64, Shape{shape.size()}, shape);
|
||||
auto axes_const = ov::op::v0::Constant::create(element::u8, Shape{}, {0});
|
||||
auto bcst = std::make_shared<pattern::op::Skip>(OutputVector{const_label, shape_const, axes_const}, bcst_pred);
|
||||
auto bcst_label = std::make_shared<pattern::op::Label>(bcst, nullptr, NodeVector{bcst});
|
||||
auto matcher =
|
||||
@ -745,32 +744,32 @@ TEST(pattern, label_on_skip) {
|
||||
|
||||
TEST(pattern, is_contained_match) {
|
||||
Shape shape{};
|
||||
auto a = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto absn = make_shared<op::Abs>(a);
|
||||
auto a = make_shared<op::v0::Parameter>(element::i32, shape);
|
||||
auto absn = make_shared<op::v0::Abs>(a);
|
||||
TestMatcher n;
|
||||
|
||||
auto label_a = std::make_shared<pattern::op::Label>(a);
|
||||
auto label_abs = make_shared<op::Abs>(a);
|
||||
auto label_abs = make_shared<op::v0::Abs>(a);
|
||||
ASSERT_TRUE(n.match(label_abs, absn));
|
||||
auto result_absn = make_shared<op::Result>(absn);
|
||||
auto result_absn = make_shared<ov::op::v0::Result>(absn);
|
||||
ASSERT_TRUE(n.is_contained_match());
|
||||
|
||||
auto absn2 = make_shared<op::Abs>(absn);
|
||||
auto result_absn2 = make_shared<op::Result>(absn2);
|
||||
auto label_abs2 = make_shared<op::Abs>(label_abs);
|
||||
auto absn2 = make_shared<op::v0::Abs>(absn);
|
||||
auto result_absn2 = make_shared<ov::op::v0::Result>(absn2);
|
||||
auto label_abs2 = make_shared<op::v0::Abs>(label_abs);
|
||||
ASSERT_TRUE(n.match(label_abs2, absn2));
|
||||
ASSERT_FALSE(n.is_contained_match());
|
||||
}
|
||||
|
||||
TEST(pattern, wrap_type_single_op) {
|
||||
auto a = make_shared<op::Parameter>(element::f32, Shape{1, 3, 64, 64});
|
||||
auto b = make_shared<op::Abs>(a);
|
||||
auto c = make_shared<op::Relu>(a);
|
||||
auto mul1 = make_shared<op::v1::Multiply>(a, op::Constant::create(element::f32, Shape{}, {1}));
|
||||
auto mul2 = make_shared<op::v1::Multiply>(op::Constant::create(element::f32, Shape{}, {1}), a);
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1, 3, 64, 64});
|
||||
auto b = make_shared<op::v0::Abs>(a);
|
||||
auto c = make_shared<ov::op::v0::Relu>(a);
|
||||
auto mul1 = make_shared<op::v1::Multiply>(a, ov::op::v0::Constant::create(element::f32, Shape{}, {1}));
|
||||
auto mul2 = make_shared<op::v1::Multiply>(ov::op::v0::Constant::create(element::f32, Shape{}, {1}), a);
|
||||
|
||||
{
|
||||
auto m = pattern::wrap_type<op::Abs>();
|
||||
auto m = pattern::wrap_type<op::v0::Abs>();
|
||||
auto matcher = std::make_shared<pattern::Matcher>(m, "AbsMatcher");
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(b)));
|
||||
ASSERT_EQ(matcher->get_matched_nodes().size(), 1);
|
||||
@ -779,8 +778,8 @@ TEST(pattern, wrap_type_single_op) {
|
||||
ASSERT_FALSE(matcher->match(static_pointer_cast<Node>(c)));
|
||||
}
|
||||
{
|
||||
auto m1 = pattern::wrap_type<op::Parameter>();
|
||||
auto m2 = pattern::wrap_type<op::Abs>({m1});
|
||||
auto m1 = pattern::wrap_type<op::v0::Parameter>();
|
||||
auto m2 = pattern::wrap_type<op::v0::Abs>({m1});
|
||||
auto matcher = std::make_shared<pattern::Matcher>(m2, "ParamAbsMatcher");
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(b)));
|
||||
ASSERT_EQ(matcher->get_matched_nodes().size(), 2);
|
||||
@ -789,13 +788,15 @@ TEST(pattern, wrap_type_single_op) {
|
||||
ASSERT_FALSE(matcher->match(static_pointer_cast<Node>(c)));
|
||||
}
|
||||
{
|
||||
auto m1 = pattern::wrap_type<op::v1::Multiply>({pattern::any_input(), pattern::wrap_type<op::Constant>()});
|
||||
auto m1 =
|
||||
pattern::wrap_type<op::v1::Multiply>({pattern::any_input(), pattern::wrap_type<ov::op::v0::Constant>()});
|
||||
auto matcher = std::make_shared<pattern::Matcher>(m1, "MultiplyMatcher");
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul1)));
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul2)));
|
||||
}
|
||||
{
|
||||
auto m1 = pattern::wrap_type<op::v1::Multiply>({pattern::wrap_type<op::Constant>(), pattern::any_input()});
|
||||
auto m1 =
|
||||
pattern::wrap_type<op::v1::Multiply>({pattern::wrap_type<ov::op::v0::Constant>(), pattern::any_input()});
|
||||
auto matcher = std::make_shared<pattern::Matcher>(m1, "MultiplyMatcher");
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul1)));
|
||||
ASSERT_TRUE(matcher->match(static_pointer_cast<Node>(mul2)));
|
||||
@ -803,11 +804,11 @@ TEST(pattern, wrap_type_single_op) {
|
||||
}
|
||||
|
||||
TEST(pattern, wrap_type_multi_op) {
|
||||
auto a = make_shared<op::Parameter>(element::f32, Shape{1, 3, 64, 64});
|
||||
auto b = make_shared<op::Abs>(a);
|
||||
auto c = make_shared<op::Relu>(a);
|
||||
auto mul = make_shared<op::v1::Multiply>(a, op::Constant::create(element::f32, Shape{}, {1}));
|
||||
auto add = make_shared<op::v1::Add>(op::Constant::create(element::f32, Shape{}, {1}), a);
|
||||
auto a = make_shared<op::v0::Parameter>(element::f32, Shape{1, 3, 64, 64});
|
||||
auto b = make_shared<op::v0::Abs>(a);
|
||||
auto c = make_shared<ov::op::v0::Relu>(a);
|
||||
auto mul = make_shared<op::v1::Multiply>(a, ov::op::v0::Constant::create(element::f32, Shape{}, {1}));
|
||||
auto add = make_shared<op::v1::Add>(ov::op::v0::Constant::create(element::f32, Shape{}, {1}), a);
|
||||
|
||||
{
|
||||
auto m = pattern::wrap_type<op::v1::Multiply, op::v1::Add>();
|
||||
|
@ -5,8 +5,7 @@
|
||||
#include "common_test_utils/test_assertions.hpp"
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/ops.hpp"
|
||||
#include "openvino/core/except.hpp"
|
||||
#include "openvino/core/preprocess/pre_post_process.hpp"
|
||||
#include "openvino/opsets/opset8.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
@ -1991,7 +1990,7 @@ TEST(pre_post_process, exception_safety) {
|
||||
.custom([](const Output<Node>& node) -> Output<Node> {
|
||||
OPENVINO_THROW("test error");
|
||||
});
|
||||
p.build(), ngraph::ngraph_error);
|
||||
p.build(), ov::Exception);
|
||||
EXPECT_EQ(f->get_parameters().size(), 2);
|
||||
|
||||
EXPECT_EQ(f->input(0).get_element_type(), element::f32);
|
||||
|
@ -2,12 +2,20 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common_test_utils/type_prop.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "openvino/core/model.hpp"
|
||||
#include "openvino/op/add.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/multiply.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/split.hpp"
|
||||
#include "openvino/op/subtract.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
//
|
||||
// Graph before (params in [] brackets, constants in () parens, results in {} braces):
|
||||
@ -47,24 +55,24 @@ using namespace ngraph;
|
||||
// {r}
|
||||
//
|
||||
TEST(replace_node, replace_nodes) {
|
||||
auto x = make_shared<op::Parameter>(element::f32, Shape{2});
|
||||
auto y = make_shared<op::Parameter>(element::f32, Shape{2});
|
||||
auto z = make_shared<op::Parameter>(element::f32, Shape{2});
|
||||
auto x = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2});
|
||||
auto y = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2});
|
||||
auto z = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2});
|
||||
|
||||
auto add = make_shared<op::v1::Add>(x, y);
|
||||
auto k = make_shared<op::Constant>(element::f32, Shape{2}, vector<float>{1, 2});
|
||||
auto k = make_shared<ov::op::v0::Constant>(element::f32, Shape{2}, vector<float>{1, 2});
|
||||
auto mul = make_shared<op::v1::Multiply>(add, k);
|
||||
auto sub = make_shared<op::v1::Subtract>(mul, z);
|
||||
|
||||
auto f = make_shared<Function>(NodeVector{sub}, ParameterVector{x, y, z});
|
||||
auto f = make_shared<Model>(NodeVector{sub}, ParameterVector{x, y, z});
|
||||
|
||||
unordered_map<shared_ptr<op::Parameter>, shared_ptr<op::Parameter>> parameter_replacement_map;
|
||||
auto x_replacement = make_shared<op::Parameter>(element::f32, Shape{2});
|
||||
unordered_map<shared_ptr<ov::op::v0::Parameter>, shared_ptr<ov::op::v0::Parameter>> parameter_replacement_map;
|
||||
auto x_replacement = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2});
|
||||
parameter_replacement_map[x] = x_replacement;
|
||||
|
||||
unordered_map<shared_ptr<Node>, shared_ptr<Node>> body_replacement_map;
|
||||
auto y_replacement = make_shared<op::Constant>(element::f32, Shape{2}, vector<float>{3, 4});
|
||||
auto k_replacement = make_shared<op::Constant>(element::f32, Shape{2}, vector<float>{5, 6});
|
||||
auto y_replacement = make_shared<ov::op::v0::Constant>(element::f32, Shape{2}, vector<float>{3, 4});
|
||||
auto k_replacement = make_shared<ov::op::v0::Constant>(element::f32, Shape{2}, vector<float>{5, 6});
|
||||
auto z_replacement = make_shared<op::v1::Add>(x_replacement, mul);
|
||||
body_replacement_map[y] = y_replacement;
|
||||
body_replacement_map[k] = k_replacement;
|
||||
@ -108,12 +116,12 @@ TEST(replace_node, replace_nodes) {
|
||||
}
|
||||
|
||||
TEST(replace_node, simple_node_replacement) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{1, 64});
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{1, 64});
|
||||
param->output(0).get_tensor().set_names({"a", "b"});
|
||||
auto relu = std::make_shared<op::Relu>(param);
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
relu->output(0).get_tensor().set_names({"c", "d"});
|
||||
|
||||
auto new_relu = std::make_shared<op::Relu>(param);
|
||||
auto new_relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
new_relu->output(0).get_tensor().set_names({"f"});
|
||||
replace_node(relu, new_relu);
|
||||
|
||||
@ -121,11 +129,11 @@ TEST(replace_node, simple_node_replacement) {
|
||||
}
|
||||
|
||||
TEST(replace_node, node_elimination) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{1, 64});
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{1, 64});
|
||||
param->output(0).get_tensor().set_names({"a", "b"});
|
||||
auto relu1 = std::make_shared<op::Relu>(param);
|
||||
auto relu1 = std::make_shared<ov::op::v0::Relu>(param);
|
||||
relu1->output(0).get_tensor().set_names({"c", "d"});
|
||||
auto relu2 = std::make_shared<op::Relu>(relu1);
|
||||
auto relu2 = std::make_shared<ov::op::v0::Relu>(relu1);
|
||||
relu2->output(0).get_tensor().set_names({"e", "f"});
|
||||
|
||||
ASSERT_TRUE(replace_output_update_name(relu2->output(0), relu2->input_value(0)));
|
||||
@ -134,11 +142,11 @@ TEST(replace_node, node_elimination) {
|
||||
}
|
||||
|
||||
TEST(replace_node, node_elimination_1) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{3, 64});
|
||||
auto split = std::make_shared<op::v1::Split>(param, op::Constant::create(element::i64, Shape{}, {0}), 3);
|
||||
auto relu1 = std::make_shared<op::Relu>(split->output(2));
|
||||
auto relu2 = std::make_shared<op::Relu>(relu1);
|
||||
auto result2 = std::make_shared<op::Result>(relu2);
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{3, 64});
|
||||
auto split = std::make_shared<op::v1::Split>(param, ov::op::v0::Constant::create(element::i64, Shape{}, {0}), 3);
|
||||
auto relu1 = std::make_shared<ov::op::v0::Relu>(split->output(2));
|
||||
auto relu2 = std::make_shared<ov::op::v0::Relu>(relu1);
|
||||
auto result2 = std::make_shared<ov::op::v0::Result>(relu2);
|
||||
|
||||
// relu1 can be removed because we don't have to preserve name
|
||||
ASSERT_TRUE(replace_output_update_name(relu1->output(0), relu1->input_value(0)));
|
||||
@ -148,44 +156,44 @@ TEST(replace_node, node_elimination_1) {
|
||||
}
|
||||
|
||||
TEST(replace_node, node_elimination_2) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<op::Relu>(param);
|
||||
auto result1 = std::make_shared<op::Result>(relu1);
|
||||
auto relu2 = std::make_shared<op::Relu>(relu1);
|
||||
auto result2 = std::make_shared<op::Result>(relu2);
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<ov::op::v0::Relu>(param);
|
||||
auto result1 = std::make_shared<ov::op::v0::Result>(relu1);
|
||||
auto relu2 = std::make_shared<ov::op::v0::Relu>(relu1);
|
||||
auto result2 = std::make_shared<ov::op::v0::Result>(relu2);
|
||||
|
||||
// relu2 can't be removed because relu1 has Result as consumer
|
||||
ASSERT_FALSE(replace_output_update_name(relu2->output(0), relu2->input_value(0)));
|
||||
}
|
||||
|
||||
TEST(replace_node, node_elimination_3) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<op::Relu>(param);
|
||||
auto relu2 = std::make_shared<op::Relu>(relu1);
|
||||
auto relu3 = std::make_shared<op::Relu>(relu1);
|
||||
auto result2 = std::make_shared<op::Result>(relu3);
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<ov::op::v0::Relu>(param);
|
||||
auto relu2 = std::make_shared<ov::op::v0::Relu>(relu1);
|
||||
auto relu3 = std::make_shared<ov::op::v0::Relu>(relu1);
|
||||
auto result2 = std::make_shared<ov::op::v0::Result>(relu3);
|
||||
|
||||
// relu3 can be removed because relu1 has no Result as consumer
|
||||
ASSERT_TRUE(replace_output_update_name(relu3->output(0), relu3->input_value(0)));
|
||||
}
|
||||
|
||||
TEST(replace_node, node_elimination_4) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<op::Relu>(param);
|
||||
auto split = std::make_shared<op::v1::Split>(relu1, op::Constant::create(element::i64, Shape{}, {0}), 3);
|
||||
auto relu2 = std::make_shared<op::Relu>(split->output(2));
|
||||
auto result2 = std::make_shared<op::Result>(relu2);
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{3, 64});
|
||||
auto relu1 = std::make_shared<ov::op::v0::Relu>(param);
|
||||
auto split = std::make_shared<op::v1::Split>(relu1, ov::op::v0::Constant::create(element::i64, Shape{}, {0}), 3);
|
||||
auto relu2 = std::make_shared<ov::op::v0::Relu>(split->output(2));
|
||||
auto result2 = std::make_shared<ov::op::v0::Result>(relu2);
|
||||
|
||||
ASSERT_TRUE(replace_output_update_name(split->output(2), split->input_value(0)));
|
||||
}
|
||||
|
||||
TEST(replace_node, output_replacement) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{1, 64});
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{1, 64});
|
||||
param->output(0).get_tensor().set_names({"a", "b"});
|
||||
auto relu = std::make_shared<op::Relu>(param);
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
relu->output(0).get_tensor().set_names({"c", "d"});
|
||||
|
||||
auto new_relu = std::make_shared<op::Relu>(param);
|
||||
auto new_relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
new_relu->output(0).get_tensor().set_names({"f"});
|
||||
|
||||
relu->output(0).replace(new_relu->output(0));
|
||||
@ -194,13 +202,13 @@ TEST(replace_node, output_replacement) {
|
||||
}
|
||||
|
||||
TEST(replace_node, source_replacement) {
|
||||
auto param = std::make_shared<op::Parameter>(element::i64, Shape{1, 64});
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{1, 64});
|
||||
param->output(0).get_tensor().set_names({"a", "b"});
|
||||
|
||||
auto param1 = std::make_shared<op::Parameter>(element::i64, Shape{1, 64});
|
||||
auto param1 = std::make_shared<ov::op::v0::Parameter>(element::i64, Shape{1, 64});
|
||||
param1->output(0).get_tensor().set_names({"c", "d"});
|
||||
|
||||
auto relu = std::make_shared<op::Relu>(param);
|
||||
auto relu = std::make_shared<ov::op::v0::Relu>(param);
|
||||
relu->input(0).replace_source_output(param1->output(0));
|
||||
|
||||
ASSERT_EQ(param->output(0).get_tensor().get_names(), std::unordered_set<std::string>({"a", "b"}));
|
||||
|
@ -2,15 +2,16 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
|
||||
#include "common_test_utils/ndarray.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/axis_vector.hpp"
|
||||
#include "ngraph/runtime/opt_kernel/reshape.hpp"
|
||||
#include "ngraph/shape.hpp"
|
||||
#include "openvino/core/axis_vector.hpp"
|
||||
|
||||
using namespace ov;
|
||||
using namespace ngraph;
|
||||
|
||||
namespace {
|
||||
@ -31,8 +32,8 @@ AxisVector get_axis_order(AxisOrder order, size_t size) {
|
||||
|
||||
struct TestParams {
|
||||
AxisOrder order;
|
||||
test::NDArrayBase<ElementValue> input;
|
||||
test::NDArrayBase<ElementValue> output;
|
||||
ngraph::test::NDArrayBase<ElementValue> input;
|
||||
ngraph::test::NDArrayBase<ElementValue> output;
|
||||
};
|
||||
|
||||
struct ReshapeOptKernel : ::testing::TestWithParam<TestParams> {};
|
||||
@ -50,7 +51,7 @@ TEST_P(ReshapeOptKernel, reshape_opt_kernel) {
|
||||
for (size_t i = 0; i < out_shape.size(); i++)
|
||||
out_shape[i] = in_shape[axis_order[i]];
|
||||
|
||||
runtime::opt_kernel::reshape((const char*)p.input.data(),
|
||||
ngraph::runtime::opt_kernel::reshape((const char*)p.input.data(),
|
||||
(char*)output_buff.data(),
|
||||
in_shape,
|
||||
axis_order,
|
||||
|
@ -4,10 +4,9 @@
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/node.hpp"
|
||||
#include "openvino/op/op.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
class OpType : public ov::op::Op {
|
||||
@ -50,7 +49,6 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
TEST(rtti, op_with_type) {
|
||||
auto op = OpType();
|
||||
auto type_info = op.get_type_info();
|
||||
|
@ -2,16 +2,17 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "openvino/core/shape.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(shape, test_shape_size) {
|
||||
ASSERT_EQ(1, shape_size(Shape{}));
|
||||
ASSERT_EQ(1, shape_size(ov::Shape{}));
|
||||
ASSERT_EQ(2 * 3 * 5, shape_size(Shape{2, 3, 5}));
|
||||
}
|
||||
|
||||
|
@ -10,26 +10,24 @@
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/function.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/opsets/opset6.hpp"
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "ngraph/node.hpp"
|
||||
#include "openvino/core/model.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "tensor_conversion_util.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(tensor, tensor_names) {
|
||||
auto arg0 = make_shared<opset6::Parameter>(element::f32, Shape{1});
|
||||
auto arg0 = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1});
|
||||
arg0->set_friendly_name("data");
|
||||
arg0->get_output_tensor(0).set_names({"input"});
|
||||
|
||||
auto relu = make_shared<opset6::Relu>(arg0);
|
||||
auto relu = make_shared<ov::op::v0::Relu>(arg0);
|
||||
relu->set_friendly_name("relu");
|
||||
relu->get_output_tensor(0).set_names({"relu_t", "identity"});
|
||||
auto f0 = make_shared<Function>(relu, ParameterVector{arg0});
|
||||
auto f0 = make_shared<Model>(relu, ParameterVector{arg0});
|
||||
|
||||
ASSERT_EQ(arg0->get_output_tensor(0).get_names(), relu->get_input_tensor(0).get_names());
|
||||
ASSERT_EQ(arg0->get_output_tensor(0).get_names(), relu->input_value(0).get_tensor().get_names());
|
||||
@ -39,14 +37,18 @@ TEST(tensor, tensor_names) {
|
||||
|
||||
TEST(tensor, wrap_tensor_with_unspecified_type) {
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(element::undefined, ov::PartialShape{});
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
auto tensor = ov::util::wrap_tensor(param->output(0));
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
// !tensor means that the tensor is not initialized
|
||||
EXPECT_EQ(!tensor, true);
|
||||
}
|
||||
|
||||
TEST(tensor, wrap_tensor_with_unspecified_type_from_host_tensor) {
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
auto host_tensor = std::make_shared<ngraph::HostTensor>(element::undefined, ov::PartialShape{});
|
||||
auto tensor = ov::util::wrap_tensor(host_tensor);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
// !tensor means that the tensor is not initialized
|
||||
EXPECT_EQ(!tensor, true);
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
#include "openvino/opsets/opset8.hpp"
|
||||
#include "ov_ops/type_relaxed.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
std::shared_ptr<ov::Model> create_complex_function(size_t wide = 50) {
|
||||
@ -70,7 +70,7 @@ std::shared_ptr<ov::Model> create_complex_function(size_t wide = 50) {
|
||||
nodes.push(out);
|
||||
}
|
||||
auto result = std::make_shared<ov::opset8::Result>(nodes.front());
|
||||
return std::make_shared<Function>(ov::ResultVector{result}, ov::ParameterVector{parameter});
|
||||
return std::make_shared<Model>(ov::ResultVector{result}, ov::ParameterVector{parameter});
|
||||
}
|
||||
|
||||
TEST(threading, get_friendly_name) {
|
||||
@ -83,14 +83,14 @@ TEST(threading, get_friendly_name) {
|
||||
auto add_a3 = make_shared<ov::opset8::Add>(add_a2, iconst0);
|
||||
auto abs_add_a3 = std::make_shared<ov::opset8::Abs>(add_a3);
|
||||
|
||||
auto b = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto b = make_shared<ov::op::v0::Parameter>(element::i32, shape);
|
||||
auto add_b1 = make_shared<ov::opset8::Add>(b, iconst0);
|
||||
auto add_b2 = make_shared<ov::opset8::Add>(add_b1, iconst0);
|
||||
auto abs_add_b2 = std::make_shared<ov::opset8::Abs>(add_b2);
|
||||
|
||||
auto graph = make_shared<ov::opset8::Multiply>(abs_add_a3, abs_add_b2);
|
||||
|
||||
auto f = std::make_shared<Function>(ov::NodeVector{graph}, ParameterVector{a, b});
|
||||
auto f = std::make_shared<Model>(ov::NodeVector{graph}, ParameterVector{a, b});
|
||||
|
||||
const auto compare_names = [](const std::vector<std::string>& names) {
|
||||
static std::unordered_set<std::string> ref_names;
|
||||
@ -104,7 +104,7 @@ TEST(threading, get_friendly_name) {
|
||||
}
|
||||
};
|
||||
|
||||
const auto get_friendly_name = [&](const std::shared_ptr<ngraph::Function>& f) {
|
||||
const auto get_friendly_name = [&](const std::shared_ptr<ov::Model>& f) {
|
||||
std::vector<std::string> names;
|
||||
for (const auto& op : f->get_ops()) {
|
||||
names.emplace_back(op->get_friendly_name());
|
||||
@ -149,12 +149,12 @@ TEST(threading, check_atomic_guard) {
|
||||
|
||||
TEST(threading, clone_with_new_inputs) {
|
||||
auto function = create_complex_function(100);
|
||||
const auto cloneNodes = [&](const std::shared_ptr<const ngraph::Function>& f) {
|
||||
const auto cloneNodes = [&](const std::shared_ptr<const ov::Model>& f) {
|
||||
auto orderedOps = function->get_ordered_ops();
|
||||
std::vector<std::shared_ptr<ov::Node>> nodes;
|
||||
for (const auto& op : orderedOps) {
|
||||
ngraph::OutputVector inputsForShapeInfer;
|
||||
std::shared_ptr<ngraph::Node> opToShapeInfer;
|
||||
ov::OutputVector inputsForShapeInfer;
|
||||
std::shared_ptr<ov::Node> opToShapeInfer;
|
||||
|
||||
const auto inSize = op->get_input_size();
|
||||
for (size_t i = 0; i < inSize; i++) {
|
||||
|
@ -8,7 +8,6 @@
|
||||
#include "openvino/opsets/opset.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
TEST(type_info, compare_old_type) {
|
||||
ov::DiscreteTypeInfo type1("type1");
|
||||
ov::DiscreteTypeInfo type2("type2");
|
||||
|
@ -2,31 +2,33 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/op/ctc_greedy_decoder.hpp"
|
||||
#include "ngraph/op/interpolate.hpp"
|
||||
#include "ngraph/op/region_yolo.hpp"
|
||||
#include "ngraph/op/reorg_yolo.hpp"
|
||||
#include "ngraph/op/roi_pooling.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/ctc_greedy_decoder.hpp"
|
||||
#include "openvino/op/interpolate.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/region_yolo.hpp"
|
||||
#include "openvino/op/reorg_yolo.hpp"
|
||||
#include "openvino/op/roi_pooling.hpp"
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
|
||||
TEST(type_prop_layers, ctc_greedy_decoder) {
|
||||
auto input = make_shared<op::Parameter>(element::f32, Shape{88, 2, 48});
|
||||
auto seq_len = make_shared<op::Parameter>(element::f32, Shape{88, 2});
|
||||
auto op = make_shared<op::CTCGreedyDecoder>(input, seq_len, false);
|
||||
auto input = make_shared<ov::op::v0::Parameter>(element::f32, Shape{88, 2, 48});
|
||||
auto seq_len = make_shared<ov::op::v0::Parameter>(element::f32, Shape{88, 2});
|
||||
auto op = make_shared<op::v0::CTCGreedyDecoder>(input, seq_len, false);
|
||||
ASSERT_EQ(op->get_shape(), (Shape{2, 88, 1, 1}));
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, interpolate) {
|
||||
auto image = make_shared<op::Parameter>(element::f32, Shape{2, 2, 33, 65});
|
||||
auto dyn_output_shape = make_shared<op::Parameter>(element::i64, Shape{2});
|
||||
auto image = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 2, 33, 65});
|
||||
auto dyn_output_shape = make_shared<ov::op::v0::Parameter>(element::i64, Shape{2});
|
||||
auto output_shape = op::v0::Constant::create<int64_t>(element::i64, Shape{2}, {15, 30});
|
||||
|
||||
op::v0::InterpolateAttrs attrs;
|
||||
op::v0::Interpolate::Attributes attrs;
|
||||
attrs.axes = {2, 3};
|
||||
attrs.mode = "nearest";
|
||||
attrs.align_corners = true;
|
||||
@ -42,32 +44,32 @@ TEST(type_prop_layers, interpolate) {
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, region_yolo1) {
|
||||
auto inputs = make_shared<op::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::RegionYolo>(inputs, 0, 0, 0, true, std::vector<int64_t>{}, 0, 1);
|
||||
auto inputs = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::v0::RegionYolo>(inputs, 0, 0, 0, true, std::vector<int64_t>{}, 0, 1);
|
||||
ASSERT_EQ(op->get_shape(), (Shape{1 * 125, 13, 13}));
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, region_yolo2) {
|
||||
auto inputs = make_shared<op::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::RegionYolo>(inputs, 0, 0, 0, true, std::vector<int64_t>{}, 0, 2);
|
||||
auto inputs = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::v0::RegionYolo>(inputs, 0, 0, 0, true, std::vector<int64_t>{}, 0, 2);
|
||||
ASSERT_EQ(op->get_shape(), (Shape{1 * 125 * 13, 13}));
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, region_yolo3) {
|
||||
auto inputs = make_shared<op::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::RegionYolo>(inputs, 4, 80, 1, false, std::vector<int64_t>{6, 7, 8}, 0, -1);
|
||||
auto inputs = make_shared<ov::op::v0::Parameter>(element::f32, Shape{1, 125, 13, 13});
|
||||
auto op = make_shared<op::v0::RegionYolo>(inputs, 4, 80, 1, false, std::vector<int64_t>{6, 7, 8}, 0, -1);
|
||||
ASSERT_EQ(op->get_shape(), (Shape{1, (80 + 4 + 1) * 3, 13, 13}));
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, reorg_yolo) {
|
||||
auto inputs = make_shared<op::Parameter>(element::f32, Shape{2, 24, 34, 62});
|
||||
auto op = make_shared<op::ReorgYolo>(inputs, Strides{2});
|
||||
auto inputs = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 24, 34, 62});
|
||||
auto op = make_shared<op::v0::ReorgYolo>(inputs, Strides{2});
|
||||
ASSERT_EQ(op->get_shape(), (Shape{2, 96, 17, 31}));
|
||||
}
|
||||
|
||||
TEST(type_prop_layers, roi_pooling) {
|
||||
auto inputs = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4, 5});
|
||||
auto coords = make_shared<op::Parameter>(element::f32, Shape{150, 5});
|
||||
auto op = make_shared<op::ROIPooling>(inputs, coords, Shape{6, 6}, 0.0625f, "max");
|
||||
auto inputs = make_shared<ov::op::v0::Parameter>(element::f32, Shape{2, 3, 4, 5});
|
||||
auto coords = make_shared<ov::op::v0::Parameter>(element::f32, Shape{150, 5});
|
||||
auto op = make_shared<op::v0::ROIPooling>(inputs, coords, Shape{6, 6}, 0.0625f, "max");
|
||||
ASSERT_EQ(op->get_shape(), (Shape{150, 3, 6, 6}));
|
||||
}
|
||||
|
@ -40,8 +40,8 @@ TEST_F(TypeRelaxedThreading, TypeRelaxedCloning) {
|
||||
auto inp1 = std::make_shared<op::v0::Parameter>(element::i8, PartialShape{-1, -1, -1, -1});
|
||||
auto inp2 = std::make_shared<op::v0::Parameter>(element::i8, PartialShape{-1, -1, -1, -1});
|
||||
|
||||
auto matMulRelaxed = std::make_shared<ov::op::TypeRelaxed<ngraph::opset3::MatMul>>(
|
||||
*as_type_ptr<ngraph::opset3::MatMul>(ngraph::builder::makeMatMul(inp1_f32, inp2_f32, false, false)),
|
||||
auto matMulRelaxed = std::make_shared<ov::op::TypeRelaxed<ov::op::v0::MatMul>>(
|
||||
*as_type_ptr<op::v0::MatMul>(ngraph::builder::makeMatMul(inp1_f32, inp2_f32, false, false)),
|
||||
element::f32);
|
||||
auto matMul = matMulRelaxed->clone_with_new_inputs({inp1, inp2});
|
||||
|
||||
|
@ -4,14 +4,14 @@
|
||||
|
||||
#include "common_test_utils/test_tools.hpp"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace std;
|
||||
|
||||
TEST(uint4, convert_u4_to_string) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::u4, Shape{3}, &values[0]);
|
||||
auto constant = make_shared<ov::op::v0::Constant>(element::u4, Shape{3}, &values[0]);
|
||||
|
||||
vector<string> ref{"10", "11", "1"};
|
||||
for (size_t i = 0; i < 3; ++i) {
|
||||
@ -21,7 +21,7 @@ TEST(uint4, convert_u4_to_string) {
|
||||
|
||||
TEST(uint4, tensor_or_constant_size) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::u4, Shape{3}, &values[0]);
|
||||
auto constant = make_shared<op::v0::Constant>(element::u4, Shape{3}, &values[0]);
|
||||
EXPECT_EQ(2, constant->get_byte_size());
|
||||
|
||||
ov::Tensor runtime_tensor(ov::element::u4, ov::Shape{3});
|
||||
@ -30,7 +30,7 @@ TEST(uint4, tensor_or_constant_size) {
|
||||
|
||||
TEST(u1, tensor_or_constant_size) {
|
||||
vector<uint8_t> values{171, 16};
|
||||
auto constant = make_shared<op::Constant>(element::u1, Shape{3}, &values[0]);
|
||||
auto constant = make_shared<op::v0::Constant>(element::u1, Shape{3}, &values[0]);
|
||||
EXPECT_EQ(1, constant->get_byte_size());
|
||||
|
||||
ov::Tensor runtime_tensor(ov::element::u1, ov::Shape{3});
|
||||
|
@ -39,7 +39,7 @@ TEST(get_constant_from_source, invalidation_check) {
|
||||
}
|
||||
|
||||
TEST(get_constant_from_source, extract_static_dim_from_dynamic_shape_check) {
|
||||
auto data = std::make_shared<ov::opset8::Parameter>(ngraph::element::f32, ov::PartialShape{-1, 1, 128});
|
||||
auto data = std::make_shared<ov::opset8::Parameter>(ov::element::f32, ov::PartialShape{-1, 1, 128});
|
||||
auto shape = std::make_shared<ov::opset8::ShapeOf>(data);
|
||||
auto one = ov::opset8::Constant::create(ov::element::i64, {1}, {1});
|
||||
auto zero = ov::opset8::Constant::create(ov::element::i64, {1}, {0});
|
||||
|
Loading…
Reference in New Issue
Block a user