revise atan op (#6288)

* revise atan op

* use parametrized vistor API

* use new gtest macros

* Update docs/ops/arithmetic/Atan_1.md

Co-authored-by: Gabriele Galiero Casay <gabriele.galiero.casay@intel.com>

* Update docs/ops/arithmetic/Atan_1.md

Co-authored-by: Gabriele Galiero Casay <gabriele.galiero.casay@intel.com>

* Update ngraph/core/src/op/atan.cpp

Co-authored-by: Gabriele Galiero Casay <gabriele.galiero.casay@intel.com>

* update doc to follow the rules

* create type_prop for atan

* drop op_eval to be covered in backend

* add the missing type prop case

* add integer type ref impl

* fix clang issue

Co-authored-by: Gabriele Galiero Casay <gabriele.galiero.casay@intel.com>
This commit is contained in:
song, bell 2021-07-06 14:45:18 +08:00 committed by GitHub
parent e209a33fcc
commit bee21b5c5b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 67 additions and 24 deletions

View File

@ -6,31 +6,27 @@
**Short description**: *Atan* performs element-wise inverse tangent (arctangent) operation with given tensor.
**Attributes**:
No attributes available.
**Inputs**
* **1**: An tensor of type *T*. **Required.**
**Outputs**
* **1**: The result of element-wise atan operation. A tensor of type *T*.
**Types**
* *T*: any numeric type.
*atan* does the following with the input tensor *a*:
**Detailed description**: Operation takes one input tensor and performs the element-wise inverse tangent function on a given input tensor, based on the following mathematical formula:
\f[
a_{i} = atan(a_{i})
\f]
**Examples**
**Attributes**: *Atan* operation has no attributes.
*Example 1*
**Inputs**
* **1**: A tensor of type *T* and arbitrary shape. **Required.**
**Outputs**
* **1**: The result of element-wise *Atan* applied to the input tensor. A tensor of type *T* and same shape as the input tensor.
**Types**
* *T*: any supported numeric type.
**Examples**
```xml
<layer ... type="Atan">

View File

@ -65,6 +65,7 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
// List of operations that should be tested also with integer precision
const std::map<ActivationTypes, std::vector<std::vector<float>>> intActivationTypes = {
{Atan, {}},
{Negative, {}},
{Ceiling, {}},
{Cos, {}},

View File

@ -19,8 +19,7 @@ namespace ngraph
class NGRAPH_API Atan : public util::UnaryElementwiseArithmetic
{
public:
static constexpr NodeTypeInfo type_info{"Atan", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs an arctan operation.
Atan() = default;

View File

@ -13,7 +13,8 @@ namespace ngraph
{
namespace reference
{
template <typename T>
template <typename T,
typename std::enable_if<!std::is_integral<T>::value, bool>::type = true>
void atan(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
@ -21,6 +22,16 @@ namespace ngraph
out[i] = std::atan(arg[i]);
}
}
template <typename T,
typename std::enable_if<std::is_integral<T>::value, bool>::type = true>
void atan(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
{
out[i] = std::roundl(std::atan(arg[i]));
}
}
} // namespace reference
} // namespace runtime
} // namespace ngraph

View File

@ -23,7 +23,7 @@
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::Atan::type_info;
NGRAPH_RTTI_DEFINITION(op::v0::Atan, "Atan", 0, util::UnaryElementwiseArithmetic);
op::Atan::Atan(const Output<Node>& arg)
: UnaryElementwiseArithmetic(arg)

View File

@ -96,6 +96,7 @@ set(SRC
type_prop/adaptive_max_pool.cpp
type_prop/asin.cpp
type_prop/assign.cpp
type_prop/atan.cpp
type_prop/avg_pool.cpp
type_prop/batch_norm.cpp
type_prop/batch_to_space.cpp
@ -228,6 +229,7 @@ set(SRC
visitors/value_map.cpp
visitors/op/adaptive_avg_pool.cpp
visitors/op/adaptive_max_pool.cpp
visitors/op/atan.cpp
visitors/op/batch_norm.cpp
visitors/op/broadcast.cpp
visitors/op/bucketize.cpp

View File

@ -31,7 +31,7 @@ using namespace ngraph;
static string s_manifest = "${MANIFEST}";
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
NGRAPH_TEST(${BACKEND_NAME}, atan)
NGRAPH_TEST(${BACKEND_NAME}, atan_float)
{
Shape shape{11};
auto A = make_shared<op::Parameter>(element::f32, shape);
@ -53,3 +53,16 @@ NGRAPH_TEST(${BACKEND_NAME}, atan)
1.32581766f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, atan_int)
{
Shape shape{5};
auto A = make_shared<op::Parameter>(element::i32, shape);
auto f = make_shared<Function>(make_shared<op::Atan>(A), ParameterVector{A});
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<int32_t>({-2, -1, 0, 1, 2});
test_case.add_expected_output<int32_t>(shape,
{-1, -1, 0, 1, 1});
test_case.run();
}

View File

@ -0,0 +1,9 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "unary_ops.hpp"
using Type = ::testing::Types<ngraph::op::Atan>;
INSTANTIATE_TYPED_TEST_SUITE_P(type_prop_atan, UnaryOperator, Type);

View File

@ -0,0 +1,12 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "unary_ops.hpp"
using Types = ::testing::Types<UnaryOperatorType<ngraph::op::v0::Atan, element::f32>>;
INSTANTIATE_TYPED_TEST_SUITE_P(visitor_without_attribute,
UnaryOperatorVisitor,
Types,
UnaryOperatorTypeName);