Revise mod (#5060)

* clean the FusedOp from mod operation

* add backend and type_prop tests for mod operator

* convert taking autobrodcast to match binary elementwise arithmetic ops

* add type_prop/mod.cpp to CMakeLists.txt

* fix style

* fix style v2

* remove evaluate method and add backend test for negative numbers

* add copyright for type_prop/mod.cpp
This commit is contained in:
Bartek Szmelczynski 2021-04-03 18:07:01 +02:00 committed by GitHub
parent 6a16b70e0e
commit 4120344d11
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 178 additions and 69 deletions

View File

@ -4,11 +4,7 @@
#pragma once
#include "ngraph/node.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/op/util/fused_op.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
#include "ngraph/op/util/binary_elementwise_arithmetic.hpp"
namespace ngraph
{
@ -18,34 +14,28 @@ namespace ngraph
{
/// \brief Mod returns an element-wise division reminder with two given tensors applying
/// multi-directional broadcast rules.
class NGRAPH_API Mod : public ngraph::op::util::FusedOp
class NGRAPH_API Mod : public util::BinaryElementwiseArithmetic
{
public:
static constexpr NodeTypeInfo type_info{"Mod", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
Mod();
/// \brief Constructs a Mod node.
Mod()
: util::BinaryElementwiseArithmetic(AutoBroadcastSpec::NUMPY)
{
}
///
/// \param A - Dividend tensor
/// \param B - Divisor tensor
/// \param auto_broadcast Auto broadcast specification
Mod(const Output<Node>& A,
const Output<Node>& B,
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastType::NUMPY);
bool visit_attributes(AttributeVisitor& visitor) override;
virtual OutputVector decompose_op() const override;
const AutoBroadcastSpec& auto_broadcast =
AutoBroadcastSpec(AutoBroadcastType::NUMPY));
virtual std::shared_ptr<Node>
clone_with_new_inputs(const OutputVector& new_args) const override;
const AutoBroadcastSpec& get_auto_broadcast() const { return m_auto_broadcast; }
private:
AutoBroadcastSpec m_auto_broadcast;
};
}
} // namespace v1
}
}
NGRAPH_SUPPRESS_DEPRECATED_END

View File

@ -4,66 +4,25 @@
#include "ngraph/op/mod.hpp"
#include "itt.hpp"
#include "ngraph/attribute_visitor.hpp"
#include "ngraph/builder/make_constant.hpp"
#include "ngraph/op/abs.hpp"
#include "ngraph/op/convert.hpp"
#include "ngraph/op/divide.hpp"
#include "ngraph/op/multiply.hpp"
#include "ngraph/op/sign.hpp"
#include "ngraph/op/subtract.hpp"
using namespace std;
using namespace ngraph;
NGRAPH_SUPPRESS_DEPRECATED_START
// ------------------------------ v1 -------------------------------------------
constexpr NodeTypeInfo op::v1::Mod::type_info;
op::v1::Mod::Mod()
: FusedOp()
, m_auto_broadcast()
{
}
op::v1::Mod::Mod(const Output<Node>& A,
const Output<Node>& B,
op::v1::Mod::Mod(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast)
: FusedOp({A, B})
, m_auto_broadcast(auto_broadcast)
: BinaryElementwiseArithmetic(arg0, arg1, auto_broadcast)
{
constructor_validate_and_infer_types();
}
bool ngraph::op::v1::Mod::visit_attributes(AttributeVisitor& visitor)
{
NGRAPH_OP_SCOPE(v1_Mod_visit_attributes);
visitor.on_attribute("auto_broadcast", m_auto_broadcast);
return true;
}
OutputVector op::v1::Mod::decompose_op() const
{
const auto dividend = make_shared<op::Abs>(input_value(0));
const auto dividend_sign = make_shared<op::Sign>(input_value(0));
const auto dividend_et = dividend->get_element_type();
const auto divisor = make_shared<op::Abs>(input_value(1));
// truncated(a / b)
auto division = make_shared<op::Convert>(
make_shared<op::v1::Divide>(dividend, divisor, m_auto_broadcast), ngraph::element::i64);
division = make_shared<op::Convert>(division, dividend_et);
// truncated(a / b) * b
const auto multiplication = make_shared<op::v1::Multiply>(division, divisor, m_auto_broadcast);
// a mod b = a - truncated(a / b) * b
const auto mod = make_shared<op::v1::Subtract>(dividend, multiplication, m_auto_broadcast);
// apply sign of dividend
return {make_shared<op::v1::Multiply>(dividend_sign, mod, m_auto_broadcast)};
}
shared_ptr<Node> op::v1::Mod::clone_with_new_inputs(const OutputVector& new_args) const
{
NGRAPH_OP_SCOPE(v1_Mod_clone_with_new_inputs);
return make_shared<Mod>(new_args.at(0), new_args.at(1), m_auto_broadcast);
}
check_new_args_count(this, new_args);
return make_shared<Mod>(new_args.at(0), new_args.at(1), this->get_autob());
}

View File

@ -149,6 +149,7 @@ set(SRC
type_prop/max_pool.cpp
type_prop/minimum.cpp
type_prop/mish.cpp
type_prop/mod.cpp
type_prop/mvn.cpp
type_prop/non_max_suppression.cpp
type_prop/non_zero.cpp
@ -350,6 +351,7 @@ set(MULTI_TEST_SRC
backend/maximum.in.cpp
backend/max_pool.in.cpp
backend/minimum.in.cpp
backend/mod.in.cpp
backend/multiple_backends.in.cpp
backend/multiple_result.in.cpp
backend/multiply.in.cpp

View File

@ -0,0 +1,149 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <algorithm>
#include <cinttypes>
#include <cmath>
#include <cstdlib>
#include <random>
#include <string>
// clang-format off
#ifdef ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#define DEFAULT_FLOAT_TOLERANCE_BITS ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#endif
#ifdef ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#define DEFAULT_DOUBLE_TOLERANCE_BITS ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#endif
// clang-format on
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/engine/test_engines.hpp"
#include "util/test_case.hpp"
#include "util/test_control.hpp"
using namespace std;
using namespace ngraph;
static string s_manifest = "${MANIFEST}";
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
NGRAPH_TEST(${BACKEND_NAME}, mod_no_broadcast)
{
Shape shape{1, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto B = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{256, 56};
vector<float> b{256, 56};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape, {0, 0});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_no_broadcast_remainder)
{
Shape shape{2, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto B = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{256, 56, 21, 14};
vector<float> b{112, 56, 6, 8};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape, {32, 0, 3, 6});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_broadcast)
{
Shape shape_a{1, 2};
Shape shape_b{3, 2, 2};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
auto B = make_shared<op::Parameter>(element::f32, shape_b);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{1, 2};
vector<float> b{5, 6, 7, 8, 2, 3, 1, 5, 6, 7, 1, 3};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape_b, {1, 2, 1, 2, 1, 2, 0, 2, 1, 2, 0, 2});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_scalars)
{
Shape shape{};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto B = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{57};
vector<float> b{13};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape, {5});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_negative_numbers)
{
Shape shape{2, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto B = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{-57, -14, -12, -6};
vector<float> b{13, -7, 5, -5};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape, {-5, 0, -2, -1});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_vector_and_scalar)
{
Shape shape_a{2, 2};
Shape shape_b{};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
auto B = make_shared<op::Parameter>(element::f32, shape_b);
auto f = make_shared<Function>(make_shared<op::v1::Mod>(A, B), ParameterVector{A, B});
vector<float> a{2, 4, 7, 8};
vector<float> b{8};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape_a, {2, 4, 7, 0});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, mod_in_place)
{
Shape shape{2, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto B = make_shared<op::Parameter>(element::f32, shape);
auto T = make_shared<op::v1::Mod>(A, B);
auto T2 = make_shared<op::v1::Mod>(T, T);
auto f = make_shared<Function>(T2, ParameterVector{A, B});
vector<float> a{1, 2, 3, 4};
vector<float> b{5, 6, 7, 8};
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_multiple_inputs<float>({a, b});
test_case.add_expected_output<float>(shape, {0, 0 ,0 ,0});
test_case.run();
}

View File

@ -1106,7 +1106,7 @@ namespace
outputs[0]->get_data_ptr<T>(),
inputs[0]->get_shape(),
inputs[1]->get_shape(),
op->get_auto_broadcast());
op->get_autob());
return true;
}

View File

@ -0,0 +1,9 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "arithmetic_ops.hpp"
using Type = ::testing::Types<ngraph::op::v1::Mod>;
INSTANTIATE_TYPED_TEST_CASE_P(type_prop_mod, ArithmeticOperator, Type);

View File

@ -30,5 +30,5 @@ TEST(attributes, mod_op)
NodeBuilder builder(mod);
auto g_mod = as_type_ptr<opset1::Mod>(builder.create());
EXPECT_EQ(g_mod->get_auto_broadcast(), mod->get_auto_broadcast());
EXPECT_EQ(g_mod->get_autob(), mod->get_autob());
}