Remove obsoleted v0::Not operator (#2846)

* Remove obsoleted v0::Not operator

* restore tests from ngraph/test/backend
This commit is contained in:
Mateusz Tabaka 2020-10-30 05:33:26 +01:00 committed by GitHub
parent 34af04e1a8
commit d6a9ef3a8f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 11 additions and 149 deletions

View File

@ -45,36 +45,5 @@ namespace ngraph
const HostTensorVector& inputs) const override;
};
}
namespace v0
{
/// \brief Elementwise logical negation operation.
class NGRAPH_DEPRECATED(
"This operation is deprecated and will be removed soon. "
"Use v1::LogicalNot instead of it.") NGRAPH_API Not : public Op
{
NGRAPH_SUPPRESS_DEPRECATED_START
public:
static constexpr NodeTypeInfo type_info{"Not", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a logical negation operation.
Not() = default;
/// \brief Constructs a logical negation operation.
///
/// \param arg Node that produces the input tensor.
Not(const Output<Node>& arg);
void validate_and_infer_types() override;
virtual std::shared_ptr<Node>
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
NGRAPH_SUPPRESS_DEPRECATED_END
};
}
NGRAPH_SUPPRESS_DEPRECATED_START
using v0::Not;
NGRAPH_SUPPRESS_DEPRECATED_END
} // namespace op
} // namespace ngraph

View File

@ -117,7 +117,6 @@ NGRAPH_OP(NonMaxSuppression, ngraph::op::v1, 1)
NGRAPH_OP(NonMaxSuppression, ngraph::op::v3, 3)
NGRAPH_OP(NonZero, ngraph::op::v3, 3)
NGRAPH_OP(NormalizeL2, ngraph::op::v0, 0)
NGRAPH_OP(Not, ngraph::op::v0, 0)
NGRAPH_OP(NotEqual, ngraph::op::v0, 0)
NGRAPH_OP(NotEqual, ngraph::op::v1, 1)
NGRAPH_OP(OneHot, ngraph::op::v1, 1)

View File

@ -101,33 +101,3 @@ bool op::v1::LogicalNot::evaluate(const HostTensorVector& outputs,
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v1::LogicalNot::evaluate");
return notop::evaluate_not(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}
constexpr NodeTypeInfo op::v0::Not::type_info;
op::v0::Not::Not(const Output<Node>& arg)
: Op({arg})
{
constructor_validate_and_infer_types();
}
// TODO(amprocte): Update this to allow only boolean, for consistency with logical binops.
void op::v0::Not::validate_and_infer_types()
{
auto args_et_pshape = ngraph::op::util::validate_and_infer_elementwise_args(this);
element::Type& args_et = std::get<0>(args_et_pshape);
PartialShape& args_pshape = std::get<1>(args_et_pshape);
set_output_type(0, args_et, args_pshape);
}
shared_ptr<Node> op::v0::Not::clone_with_new_inputs(const OutputVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<v0::Not>(new_args.at(0));
}
bool op::Not::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const
{
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::Not::evaluate");
return notop::evaluate_not(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}

View File

@ -297,6 +297,7 @@ set(MULTI_TEST_SRC
backend/interpolate.in.cpp
backend/log.in.cpp
backend/log_softmax.in.cpp
backend/logical_not.in.cpp
backend/logical_or.in.cpp
backend/logical_xor.in.cpp
backend/lrn.in.cpp
@ -309,7 +310,6 @@ set(MULTI_TEST_SRC
backend/negative.in.cpp
backend/node_name.in.cpp
backend/normalize_l2.in.cpp
backend/not.in.cpp
backend/non_zero.in.cpp
backend/numeric.in.cpp
backend/one_hot.in.cpp

View File

@ -49,7 +49,7 @@ NGRAPH_TEST(${BACKEND_NAME}, not)
{
Shape shape{2, 2};
auto A = make_shared<op::Parameter>(element::boolean, shape);
auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
auto f = make_shared<Function>(make_shared<op::v1::LogicalNot>(A), ParameterVector{A});
std::vector<char> a{1, 0, 1, 0};
@ -63,7 +63,7 @@ NGRAPH_TEST(${BACKEND_NAME}, not_i32)
{
Shape shape{2, 2};
auto A = make_shared<op::Parameter>(element::i32, shape);
auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
auto f = make_shared<Function>(make_shared<op::v1::LogicalNot>(A), ParameterVector{A});
std::vector<int32_t> a{1, 0, 2, 0};

View File

@ -194,27 +194,6 @@ NGRAPH_TEST(${BACKEND_NAME}, zero_sized_negative)
make_unary_empty_test<op::Negative>("${BACKEND_NAME}");
}
NGRAPH_TEST(${BACKEND_NAME}, zero_sized_not)
{
Shape shape{0};
auto A = make_shared<op::Parameter>(element::from<char>(), shape);
auto f = make_shared<Function>(make_shared<op::Not>(A), ParameterVector{A});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
auto a = backend->create_tensor(element::from<char>(), shape);
auto result = backend->create_tensor(element::from<char>(), shape);
auto handle = backend->compile(f);
handle->call_with_validate({result}, {a});
auto in_vec = read_vector<char>(a);
auto out_vec = read_vector<char>(result);
EXPECT_EQ(in_vec.size(), 0);
EXPECT_EQ(out_vec.size(), 0);
}
NGRAPH_TEST(${BACKEND_NAME}, zero_sized_sign)
{
make_unary_empty_test<op::Sign>("${BACKEND_NAME}");

View File

@ -1444,11 +1444,11 @@ TEST(constant_folding, const_concat_axis_1_bool_type)
ASSERT_EQ(values_expected, values_out);
}
TEST(constant_folding, const_not)
TEST(constant_folding, const_logical_not)
{
auto constant =
op::Constant::create(element::boolean, Shape{2, 3}, vector<char>{0, 1, 0, 0, 1, 1});
auto logical_not = make_shared<op::Not>(constant);
auto logical_not = make_shared<op::v1::LogicalNot>(constant);
logical_not->set_friendly_name("test");
auto f = make_shared<Function>(logical_not, ParameterVector{});
@ -1456,7 +1456,7 @@ TEST(constant_folding, const_not)
pass_manager.register_pass<pass::ConstantFolding>();
pass_manager.run_passes(f);
ASSERT_EQ(count_ops_of_type<op::Not>(f), 0);
ASSERT_EQ(count_ops_of_type<op::v1::LogicalNot>(f), 0);
ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
auto new_const =

View File

@ -1158,36 +1158,6 @@ TEST(eval, evaluate_tanh)
ASSERT_FLOAT_VECTORS_EQ(input, result_val);
}
TEST(eval, evaluate_not)
{
auto p = make_shared<op::Parameter>(element::boolean, Shape{2, 2});
auto op_not = make_shared<op::Not>(p);
auto fun = make_shared<Function>(OutputVector{op_not}, ParameterVector{p});
auto result = make_shared<HostTensor>();
ASSERT_TRUE(fun->evaluate(
{result}, {make_host_tensor<element::Type_t::boolean>(Shape{2, 2}, {1, 0, 1, 0})}));
EXPECT_EQ(result->get_element_type(), element::boolean);
auto result_val = read_vector<char>(result);
vector<char> expec{0, 1, 0, 1};
ASSERT_EQ(result_val, expec);
}
TEST(eval, evaluate_not_i32)
{
auto p = make_shared<op::Parameter>(element::i32, Shape{2, 2});
auto op_not = make_shared<op::Not>(p);
auto fun = make_shared<Function>(OutputVector{op_not}, ParameterVector{p});
auto result = make_shared<HostTensor>();
ASSERT_TRUE(fun->evaluate(
{result}, {make_host_tensor<element::Type_t::i32>(Shape{2, 2}, {100, 0, -2, 0})}));
EXPECT_EQ(result->get_element_type(), element::i32);
auto result_val = read_vector<int32_t>(result);
vector<int32_t> expec{0, 1, 0, 1};
ASSERT_EQ(result_val, expec);
}
TEST(eval, evaluate_logical_not)
{
auto p = make_shared<op::Parameter>(element::boolean, Shape{2, 2});

View File

@ -524,15 +524,6 @@ namespace
EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
}
void op_is_Not()
{
op::Not node;
EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
}
void op_is_NotEqual()
{
op::NotEqual node;

View File

@ -862,7 +862,6 @@ protected:
break;
}
case OP_TYPEID::LogicalNot_v1:
case OP_TYPEID::Not:
{
size_t element_count = shape_size(node.get_output_shape(0));
reference::logical_not(

View File

@ -98,7 +98,6 @@ NGRAPH_OP(Minimum, ngraph::op)
NGRAPH_OP(Multiply, ngraph::op)
NGRAPH_OP(MVN, ngraph::op)
NGRAPH_OP(Negative, ngraph::op)
NGRAPH_OP(Not, ngraph::op)
NGRAPH_OP(NotEqual, ngraph::op)
NGRAPH_OP(Or, ngraph::op)
NGRAPH_OP(Parameter, ngraph::op)

View File

@ -301,13 +301,6 @@ namespace opset0_downgrade
return op_cast_binary_elementwise_node<op::v0::LessEq, op::v1::LessEqual>(node);
}
shared_ptr<Node> op_cast(shared_ptr<op::v1::LogicalNot> node)
{
auto replacement_node = make_shared<op::v0::Not>(node->input_value(0));
replace_node(node, replacement_node);
return replacement_node;
}
shared_ptr<Node> op_cast(shared_ptr<op::v1::LogicalOr> node)
{
return op_cast_binary_elementwise_node<op::v0::Or, op::v1::LogicalOr>(node);

View File

@ -272,13 +272,6 @@ namespace opset1_upgrade
return op_cast_binary_elementwise_node<op::v0::Multiply, op::v1::Multiply>(node);
}
shared_ptr<Node> op_cast(shared_ptr<op::Not> node)
{
auto replacement_node = make_shared<op::v1::LogicalNot>(node->input_value(0));
replace_node(node, replacement_node);
return replacement_node;
}
shared_ptr<Node> op_cast(shared_ptr<op::NotEqual> node)
{
return op_cast_binary_elementwise_node<op::v0::NotEqual, op::v1::NotEqual>(node);

View File

@ -536,9 +536,9 @@ TEST(type_prop, logic_arith_compare_partial_et)
return std::make_shared<op::Greater>(param0, param1);
};
auto test_not = [](element::Type et) -> std::shared_ptr<Node> {
auto test_logical_not = [](element::Type et) -> std::shared_ptr<Node> {
auto param = std::make_shared<op::Parameter>(et, Shape{1, 2, 3});
return std::make_shared<op::Not>(param);
return std::make_shared<op::v1::LogicalNot>(param);
};
// Arith ops:
@ -598,7 +598,7 @@ TEST(type_prop, logic_arith_compare_partial_et)
// int -> !
// boo -> boo
// dyn -> boo
ASSERT_EQ(test_not(element::i32)->get_element_type(), element::i32);
ASSERT_EQ(test_not(element::boolean)->get_element_type(), element::boolean);
ASSERT_EQ(test_not(element::dynamic)->get_element_type(), element::dynamic);
ASSERT_EQ(test_logical_not(element::i32)->get_element_type(), element::i32);
ASSERT_EQ(test_logical_not(element::boolean)->get_element_type(), element::boolean);
ASSERT_EQ(test_logical_not(element::dynamic)->get_element_type(), element::dynamic);
}