Cosh revise. (#6516)

This commit is contained in:
Szymon Durawa 2021-07-08 08:03:59 +02:00 committed by GitHub
parent 23f4bf4e70
commit baef88c8fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 71 additions and 44 deletions

View File

@ -4,33 +4,29 @@
**Category**: Arithmetic unary operation
**Short description**: *Cosh* performs element-wise hyperbolic cosine operation with given tensor.
**Short description**: *Cosh* performs element-wise hyperbolic cosine operation on a given input tensor.
**Attributes**:
No attributes available.
**Inputs**
* **1**: An tensor of type *T*. **Required.**
**Outputs**
* **1**: The result of element-wise cosh operation. A tensor of type *T*.
**Types**
* *T*: any numeric type.
*Cosh* does the following with the input tensor *a*:
**Detailed description**: *Cosh* performs element-wise hyperbolic cosine (cosh) operation on a given input tensor, based on the following mathematical formula:
\f[
a_{i} = cosh(a_{i})
\f]
**Examples**
**Attributes**: *Cosh* operation has no attributes.
*Example 1*
**Inputs**
* **1**: A tensor of type *T* and arbitrary shape. **Required.**
**Outputs**
* **1**: The result of element-wise *Cosh* operation. A tensor of type *T* and the same shape as the input tensor.
**Types**
* *T*: any numeric type.
**Example**
```xml
<layer ... type="Cosh">

View File

@ -24,6 +24,7 @@ VERIFIED_OP_REFERENCES = [
'Convolution-1',
'Constant-1',
'Cos-1',
'Cosh-1',
'DeformableConvolution-1',
'DeformablePSROIPooling-1',
'DetectionOutput-1',

View File

@ -16,8 +16,8 @@ namespace ngraph
class NGRAPH_API Cosh : public util::UnaryElementwiseArithmetic
{
public:
static constexpr NodeTypeInfo type_info{"Cosh", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs a hyperbolic cosine operation.
Cosh() = default;
/// \brief Constructs a hyperbolic cosine operation.

View File

@ -13,7 +13,8 @@ namespace ngraph
{
namespace reference
{
template <typename T>
template <typename T,
typename std::enable_if<!std::is_integral<T>::value, bool>::type = true>
void cosh(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
@ -21,6 +22,16 @@ namespace ngraph
out[i] = std::cosh(arg[i]);
}
}
template <typename T,
typename std::enable_if<std::is_integral<T>::value, bool>::type = true>
void cosh(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
{
out[i] = std::roundl(std::cosh(arg[i]));
}
}
} // namespace reference
} // namespace runtime
} // namespace ngraph

View File

@ -5,16 +5,16 @@
#include "itt.hpp"
#include "ngraph/op/cosh.hpp"
#include "ngraph/op/multiply.hpp"
#include "ngraph/op/sinh.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/runtime/reference/cosh.hpp"
#include "ngraph/validation_util.hpp"
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::Cosh::type_info;
NGRAPH_RTTI_DEFINITION(op::v0::Cosh, "Cosh", 0, util::UnaryElementwiseArithmetic);
op::Cosh::Cosh(const Output<Node>& arg)
: UnaryElementwiseArithmetic(arg)
@ -68,6 +68,7 @@ namespace coshop
bool op::Cosh::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const
{
NGRAPH_OP_SCOPE(v0_Cosh_evaluate);
NGRAPH_CHECK(validate_host_tensor_vector(outputs, 1) && validate_host_tensor_vector(inputs, 1));
return coshop::evaluate_cosh(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}

View File

@ -110,6 +110,7 @@ set(SRC
type_prop/convolution.cpp
type_prop/convolution_backprop_data.cpp
type_prop/cos.cpp
type_prop/cosh.cpp
type_prop/ctc_greedy_decoder.cpp
type_prop/ctc_greedy_decoder_seq_len.cpp
type_prop/ctc_loss.cpp
@ -240,6 +241,7 @@ set(SRC
visitors/op/convert.cpp
visitors/op/convolution_backprop.cpp
visitors/op/cos.cpp
visitors/op/cosh.cpp
visitors/op/cum_sum.cpp
visitors/op/deformable_convolution.cpp
visitors/op/deformable_psroi_pooling.cpp

View File

@ -2,23 +2,6 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <algorithm>
#include <cinttypes>
#include <cmath>
#include <cstdlib>
#include <random>
#include <string>
// clang-format off
#ifdef ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#define DEFAULT_FLOAT_TOLERANCE_BITS ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#endif
#ifdef ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#define DEFAULT_DOUBLE_TOLERANCE_BITS ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#endif
// clang-format on
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/engine/test_engines.hpp"
@ -31,7 +14,7 @@ using namespace ngraph;
static string s_manifest = "${MANIFEST}";
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
NGRAPH_TEST(${BACKEND_NAME}, cosh)
NGRAPH_TEST(${BACKEND_NAME}, cosh_float)
{
Shape shape{6};
auto A = make_shared<op::Parameter>(element::f32, shape);
@ -49,3 +32,16 @@ NGRAPH_TEST(${BACKEND_NAME}, cosh)
test_case.add_expected_output<float>(shape, expected);
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, cosh_int)
{
Shape shape{5};
auto A = make_shared<op::Parameter>(element::i32, shape);
auto f = make_shared<Function>(make_shared<op::Cosh>(A), ParameterVector{A});
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<int32_t>({1, 5, 2, 3, 3});
test_case.add_expected_output<int32_t>(shape,
{2, 74, 4, 10, 10});
test_case.run();
}

View File

@ -0,0 +1,9 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "unary_ops.hpp"
using Type = ::testing::Types<ngraph::op::Cosh>;
INSTANTIATE_TYPED_TEST_SUITE_P(type_prop_cosh, UnaryOperator, Type);

View File

@ -0,0 +1,11 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "unary_ops.hpp"
using Type = ::testing::Types<UnaryOperatorType<ngraph::op::v0::Cosh, element::f32>>;
INSTANTIATE_TYPED_TEST_SUITE_P(visitor_without_attribute,
UnaryOperatorVisitor,
Type,
UnaryOperatorTypeName);