IsInf operator (#13383)

This commit is contained in:
Mateusz Mikolajczyk
2022-10-20 19:59:01 +02:00
committed by GitHub
parent 2f982b9490
commit ffc74c8fe2
31 changed files with 985 additions and 0 deletions

View File

@@ -92,6 +92,7 @@
If-8 <openvino_docs_ops_infrastructure_If_8>
Interpolate-1 <openvino_docs_ops_image_Interpolate_1>
Interpolate-4 <openvino_docs_ops_image_Interpolate_4>
IsInf-10 <openvino_docs_ops_comparison_IsInf_10>
IsNaN-10 <openvino_docs_ops_comparison_IsNaN_10>
LRN-1 <openvino_docs_ops_normalization_LRN_1>
LSTMCell-1 <openvino_docs_ops_sequence_LSTMCell_1>

View File

@@ -0,0 +1,63 @@
# IsInf {#openvino_docs_ops_comparison_IsInf_10}
**Versioned name**: *IsInf-10*
**Category**: *Comparison*
**Short description**: *IsInf* performs element-wise mapping of infinite values to True.
**Detailed description**: *IsInf* performs element-wise mapping of infinite values to true and other values to false. Attributes `detect_negative` and `detect_positive` can be used to control the mapping of negative and positive infinities. Setting both `detect_negative` and `detect_positive` to false will map all values to false.
**Attributes**
* *detect_negative*
* **Description**: specifies rules used for mapping values with negative infinity.
* **Range of values**:
* `false` - map negative infinity to `false`
* `true` - map negative infinity to `true`
* **Type**: `boolean`
* **Default value**: `true`
* **Required**: *no*
* *detect_positive*
* **Description**: specifies rules used for mapping values with positive infinity.
* **Range of values**:
* `false` - map positive infinity to `false`
* `true` - map positive infinity to `true`
* **Type**: `boolean`
* **Default value**: `true`
* **Required**: *no*
**Inputs**
* **1**: `data` - Input tensor of type `T_IN` with data and arbitrary shape. **Required.**
**Outputs**
* **1**: The result of the element-wise mapping of infinite values applied to the input tensor. A tensor of the `boolean` type and shape equal to the input tensor.
**Types**
* **T_IN**: any supported floating-point type.
**Example**
```xml
<layer ... type="IsInf" ...>
<data detect_negative="true" detect_positive="true"/>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="0" precision="BOOL">
<dim>256</dim>
<dim>128</dim>
</port>
</output>
</layer>
```

View File

@@ -87,6 +87,7 @@ declared in `namespace opset10`.
* [If](condition/If_8.md)
* [Interpolate](image/Interpolate_4.md)
* [IRDFT](signals/IRDFT_9.md)
* [IsInf](comparison/IsInf_10.md)
* [IsNaN](comparison/IsNaN_10.md)
* [Less](comparison/Less_1.md)
* [LessEqual](comparison/LessEqual_1.md)

View File

@@ -91,6 +91,7 @@ from ngraph.opset10 import idft
from ngraph.opset10 import if_op
from ngraph.opset10 import interpolate
from ngraph.opset10 import irdft
from ngraph.opset10 import is_inf
from ngraph.opset10 import is_nan
from ngraph.opset10 import i420_to_bgr
from ngraph.opset10 import i420_to_rgb

View File

@@ -75,6 +75,7 @@ from ngraph.opset7.ops import idft
from ngraph.opset8.ops import if_op
from ngraph.opset10.ops import interpolate
from ngraph.opset9.ops import irdft
from ngraph.opset10.ops import is_inf
from ngraph.opset10.ops import is_nan
from ngraph.opset8.ops import i420_to_bgr
from ngraph.opset8.ops import i420_to_rgb

View File

@@ -85,6 +85,36 @@ def interpolate(
return _get_node_factory_opset4().create("Interpolate", inputs, attrs)
@nameable_op
def is_inf(
data: NodeInput,
attributes: Optional[dict] = None,
name: Optional[str] = None,
) -> Node:
"""Return a node which performs IsInf operation.
:param data: The input tensor.
:param attributes: Optional dictionary containing IsInf attributes.
:param name: Optional name of the node.
Available attributes:
* detect_negative Specifies whether to map negative infinities to true in output map.
Range of values: true, false
Default value: true
Required: no
* detect_positive Specifies whether to map positive infinities to true in output map.
Range of values: true, false
Default value: true
Required: no
:return: A new IsInf node.
"""
if not attributes:
attributes = {}
return _get_node_factory_opset10().create("IsInf", as_nodes(data), attributes)
@nameable_op
def is_nan(data: NodeInput, name: Optional[str] = None) -> Node:
"""Performs element-wise mapping from NaN to True. Other values are mapped to False.

View File

@@ -76,6 +76,7 @@ from openvino.runtime.opset7.ops import idft
from openvino.runtime.opset8.ops import if_op
from openvino.runtime.opset10.ops import interpolate
from openvino.runtime.opset9.ops import irdft
from openvino.runtime.opset10.ops import is_inf
from openvino.runtime.opset10.ops import is_nan
from openvino.runtime.opset8.ops import i420_to_bgr
from openvino.runtime.opset8.ops import i420_to_rgb

View File

@@ -85,6 +85,36 @@ def interpolate(
return _get_node_factory_opset4().create("Interpolate", inputs, attrs)
@nameable_op
def is_inf(
data: NodeInput,
attributes: Optional[dict] = None,
name: Optional[str] = None,
) -> Node:
"""Return a node which performs IsInf operation.
:param data: The input tensor.
:param attributes: Optional dictionary containing IsInf attributes.
:param name: Optional name of the node.
Available attributes:
* detect_negative Specifies whether to map negative infinities to true in output map.
Range of values: true, false
Default value: true
Required: no
* detect_positive Specifies whether to map positive infinities to true in output map.
Range of values: true, false
Default value: true
Required: no
:return: A new IsInf node.
"""
if not attributes:
attributes = {}
return _get_node_factory_opset10().create("IsInf", as_nodes(data), attributes)
@nameable_op
def is_nan(data: NodeInput, name: Optional[str] = None) -> Node:
"""Performs element-wise mapping from NaN to True. Other values are mapped to False.

View File

@@ -2201,6 +2201,55 @@ def test_interpolate_opset10(dtype, expected_shape, shape_calculation_mode):
assert list(node.get_output_shape(0)) == expected_shape
def test_is_inf_opset10_default():
input_shape = [2, 2, 2, 2]
input_node = ov.parameter(input_shape, dtype=np.float, name="InputData")
node = ov_opset10.is_inf(input_node)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is True
assert node_attributes["detect_negative"] is True
def test_is_inf_opset10_custom_attribute():
input_shape = [2, 2, 2]
input_node = ov.parameter(input_shape, dtype=np.float, name="InputData")
attributes = {
"detect_positive": False,
}
node = ov_opset10.is_inf(input_node, attributes)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is False
assert node_attributes["detect_negative"] is True
def test_is_inf_opset10_custom_all_attributes():
input_shape = [2, 2, 2]
input_node = ov.parameter(input_shape, dtype=np.float, name="InputData")
attributes = {
"detect_negative": False,
"detect_positive": True,
}
node = ov_opset10.is_inf(input_node, attributes)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is True
assert node_attributes["detect_negative"] is False
def test_in_nan_opset10():
input_shape = [1, 2, 3, 4]
input_node = ov.parameter(input_shape, np.float, name="InputData")

View File

@@ -2266,6 +2266,55 @@ def test_interpolate_opset10(dtype, expected_shape, shape_calculation_mode):
assert list(node.get_output_shape(0)) == expected_shape
def test_is_inf_opset10_default():
input_shape = [2, 2, 2, 2]
input_node = ng.parameter(input_shape, dtype=np.float, name="InputData")
node = ng_opset10.is_inf(input_node)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is True
assert node_attributes["detect_negative"] is True
def test_is_inf_opset10_custom_attribute():
input_shape = [2, 2, 2]
input_node = ng.parameter(input_shape, dtype=np.float, name="InputData")
attributes = {
"detect_positive": False,
}
node = ng_opset10.is_inf(input_node, attributes)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is False
assert node_attributes["detect_negative"] is True
def test_is_inf_opset10_custom_all_attributes():
input_shape = [2, 2, 2]
input_node = ng.parameter(input_shape, dtype=np.float, name="InputData")
attributes = {
"detect_negative": False,
"detect_positive": True,
}
node = ng_opset10.is_inf(input_node, attributes)
assert node.get_type_name() == "IsInf"
assert node.get_output_size() == 1
assert list(node.get_output_shape(0)) == input_shape
node_attributes = node.get_attributes()
assert node_attributes["detect_positive"] is True
assert node_attributes["detect_negative"] is False
def test_in_nan_opset10():
input_shape = [1, 2, 3, 4]
input_node = ng.parameter(input_shape, np.float, name="InputData")

View File

@@ -0,0 +1,16 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "ngraph/op/op.hpp"
#include "openvino/op/is_inf.hpp"
namespace ngraph {
namespace op {
namespace v10 {
using ov::op::v10::IsInf;
} // namespace v10
} // namespace op
} // namespace ngraph

View File

@@ -83,6 +83,7 @@
#include "ngraph/op/if.hpp"
#include "ngraph/op/interpolate.hpp"
#include "ngraph/op/irdft.hpp"
#include "ngraph/op/is_inf.hpp"
#include "ngraph/op/is_nan.hpp"
#include "ngraph/op/less.hpp"
#include "ngraph/op/less_eq.hpp"

View File

@@ -0,0 +1,63 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/op/op.hpp"
namespace ov {
namespace op {
namespace v10 {
/// \brief Boolean mask that maps infinite values to true.
/// \ingroup ov_ops_cpp_api
class OPENVINO_API IsInf : public Op {
public:
OPENVINO_OP("IsInf", "opset10");
/// \brief A Structure which contains all IsInf attributes
struct Attributes {
// A flag which specifies whether to map negative infinities to true.
// If set to false, negative infinite will be mapped to false.
bool detect_negative = true;
// A flag which specifies whether to map positive infinities to true.
// If set to false, positive infinite will be mapped to false.
bool detect_positive = true;
Attributes() = default;
Attributes(bool detect_negative, bool detect_positive)
: detect_negative{detect_negative},
detect_positive{detect_positive} {}
};
IsInf() = default;
/// \brief Constructs a IsInf operation
///
/// \param data Input data tensor
IsInf(const Output<Node>& data);
/// \brief Constructs a IsInf operation
///
/// \param data Input data tensor
/// \param attrs IsInf attributes
IsInf(const Output<Node>& data, const Attributes& attributes);
bool visit_attributes(AttributeVisitor& visitor) override;
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
const Attributes& get_attributes() const {
return m_attributes;
}
void set_attributes(const Attributes& attributes) {
m_attributes = attributes;
}
private:
Attributes m_attributes = {};
};
} // namespace v10
} // namespace op
} // namespace ov

View File

@@ -82,6 +82,7 @@
#include "openvino/op/if.hpp"
#include "openvino/op/interpolate.hpp"
#include "openvino/op/irdft.hpp"
#include "openvino/op/is_inf.hpp"
#include "openvino/op/is_nan.hpp"
#include "openvino/op/less.hpp"
#include "openvino/op/less_eq.hpp"

View File

@@ -198,4 +198,5 @@ _OPENVINO_OP_REG(GenerateProposals, ov::op::v9)
_OPENVINO_OP_REG(MulticlassNms, ov::op::v9)
// New operations added in opset10
_OPENVINO_OP_REG(IsInf, ov::op::v10)
_OPENVINO_OP_REG(IsNaN, ov::op::v10)

View File

@@ -0,0 +1,40 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/op/is_inf.hpp"
#include "itt.hpp"
namespace ov {
op::v10::IsInf::IsInf(const Output<Node>& data) : op::Op{{data}} {
constructor_validate_and_infer_types();
}
op::v10::IsInf::IsInf(const Output<Node>& data, const Attributes& attributes)
: op::Op{{data}},
m_attributes{attributes} {
constructor_validate_and_infer_types();
}
bool op::v10::IsInf::visit_attributes(AttributeVisitor& visitor) {
OV_OP_SCOPE(v10_IsInf_visit_attributes);
visitor.on_attribute("detect_negative", m_attributes.detect_negative);
visitor.on_attribute("detect_positive", m_attributes.detect_positive);
return true;
}
void op::v10::IsInf::validate_and_infer_types() {
OV_OP_SCOPE(v10_IsInf_validate_and_infer_types);
NODE_VALIDATION_CHECK(this,
get_input_element_type(0).is_dynamic() || get_input_element_type(0).is_real(),
"The element type of the input tensor must be a floating point number.");
set_output_type(0, element::boolean, get_input_partial_shape(0));
}
std::shared_ptr<Node> op::v10::IsInf::clone_with_new_inputs(const OutputVector& new_args) const {
OV_OP_SCOPE(v10_IsInf_clone_with_new_inputs);
check_new_args_count(this, new_args);
return std::make_shared<op::v10::IsInf>(new_args.at(0), this->get_attributes());
}
} // namespace ov

View File

@@ -172,6 +172,7 @@ set(SRC
type_prop/if.cpp
type_prop/interpolate.cpp
type_prop/irdft.cpp
type_prop/is_inf.cpp
type_prop/is_nan.cpp
type_prop/logical_and.cpp
type_prop/logical_not.cpp
@@ -341,6 +342,7 @@ set(SRC
visitors/op/if.cpp
visitors/op/idft.cpp
visitors/op/irdft.cpp
visitors/op/is_inf.cpp
visitors/op/less_equal.cpp
visitors/op/less.cpp
visitors/op/log.cpp

View File

@@ -8,6 +8,7 @@
#include "openvino/op/op.hpp"
#include "openvino/opsets/opset1.hpp"
#include "openvino/opsets/opset10.hpp"
#include "openvino/opsets/opset2.hpp"
#include "openvino/opsets/opset3.hpp"
#include "openvino/opsets/opset4.hpp"
@@ -161,6 +162,12 @@ TEST(opset, opset9_dump) {
ASSERT_EQ(173, opset.get_types_info().size());
}
TEST(opset, opset10) {
auto op = std::make_shared<ov::opset10::Parameter>();
ASSERT_NE(nullptr, op);
EXPECT_TRUE(ov::op::util::is_parameter(op));
}
class MyOpOld : public ov::op::Op {
public:
static constexpr ov::DiscreteTypeInfo type_info{"MyOpOld", static_cast<uint64_t>(0)};

View File

@@ -0,0 +1,141 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "gtest/gtest.h"
#include "openvino/opsets/opset10.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ov;
using namespace ov::opset10;
TEST(type_prop, is_inf_default) {
const auto data = make_shared<Parameter>(element::f32, PartialShape{1, 64, 256, 256});
const auto is_inf = make_shared<IsInf>(data);
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({1, 64, 256, 256}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_dynamic_batch) {
const auto data = make_shared<Parameter>(element::f32, PartialShape{Dimension::dynamic(), 64, 256, 256});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({Dimension::dynamic(), 64, 256, 256}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_scalar) {
const auto data = make_shared<Parameter>(element::f32, PartialShape{1});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({1}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_bfloat16) {
const auto data = make_shared<Parameter>(element::bf16, PartialShape{1, 64, 256, 256});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({1, 64, 256, 256}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_float16) {
const auto data = make_shared<Parameter>(element::f16, PartialShape{1, 64, 256, 256});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({1, 64, 256, 256}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_float64) {
const auto data = make_shared<Parameter>(element::f64, PartialShape{1, 64, 256, 256});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({1, 64, 256, 256}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_interval) {
const auto data = make_shared<Parameter>(element::f32, PartialShape{Dimension(2, 4), Dimension(-1, 5)});
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape({Dimension(2, 4), Dimension(-1, 5)}))
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_dynamic) {
const auto data = make_shared<Parameter>(element::f32, PartialShape::dynamic());
const auto is_inf = make_shared<IsInf>(data, IsInf::Attributes{});
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape::dynamic())
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_negative) {
const auto data = make_shared<Parameter>(element::f32, PartialShape::dynamic());
IsInf::Attributes attributes{};
attributes.detect_positive = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape::dynamic())
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_positive) {
const auto data = make_shared<Parameter>(element::f32, PartialShape::dynamic());
IsInf::Attributes attributes{};
attributes.detect_negative = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape::dynamic())
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_all) {
const auto data = make_shared<Parameter>(element::f32, PartialShape::dynamic());
IsInf::Attributes attributes{};
attributes.detect_positive = true;
attributes.detect_negative = true;
const auto is_inf = make_shared<IsInf>(data, attributes);
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape::dynamic())
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}
TEST(type_prop, is_inf_none) {
const auto data = make_shared<Parameter>(element::f32, PartialShape::dynamic());
IsInf::Attributes attributes{};
attributes.detect_positive = false;
attributes.detect_negative = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
EXPECT_EQ(is_inf->get_element_type(), element::boolean)
<< "The output element type of IsInf should always be boolean";
EXPECT_EQ(is_inf->get_output_partial_shape(0), PartialShape::dynamic())
<< "The output shape of IsInf is incorrect, it should be the same as shape of input data";
}

View File

@@ -0,0 +1,85 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "openvino/opsets/opset10.hpp"
#include "util/visitor.hpp"
using namespace std;
using namespace ov;
using ngraph::test::NodeBuilder;
using namespace ov::opset10;
TEST(attributes, is_inf_defaults) {
NodeBuilder::get_ops().register_factory<IsInf>();
const auto data = make_shared<Parameter>(element::f32, Shape{1, 3, 10, 10});
const auto is_inf = make_shared<IsInf>(data);
NodeBuilder builder(is_inf);
auto g_is_inf = ov::as_type_ptr<IsInf>(builder.create());
const auto is_inf_attrs = is_inf->get_attributes();
const auto g_is_inf_attrs = g_is_inf->get_attributes();
EXPECT_EQ(g_is_inf_attrs.detect_positive, is_inf_attrs.detect_positive);
EXPECT_EQ(g_is_inf_attrs.detect_negative, is_inf_attrs.detect_negative);
}
TEST(attributes, is_inf_positive_only) {
NodeBuilder::get_ops().register_factory<IsInf>();
const auto data = make_shared<Parameter>(element::f32, Shape{1, 3, 10, 10});
IsInf::Attributes attributes{};
attributes.detect_negative = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
NodeBuilder builder(is_inf);
auto g_is_inf = ov::as_type_ptr<IsInf>(builder.create());
const auto is_inf_attrs = is_inf->get_attributes();
const auto g_is_inf_attrs = g_is_inf->get_attributes();
EXPECT_EQ(g_is_inf_attrs.detect_positive, is_inf_attrs.detect_positive);
EXPECT_EQ(g_is_inf_attrs.detect_negative, is_inf_attrs.detect_negative);
}
TEST(attributes, is_inf_negative_only) {
NodeBuilder::get_ops().register_factory<IsInf>();
const auto data = make_shared<Parameter>(element::f32, Shape{1, 3, 10, 10});
IsInf::Attributes attributes{};
attributes.detect_positive = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
NodeBuilder builder(is_inf);
auto g_is_inf = ov::as_type_ptr<IsInf>(builder.create());
const auto is_inf_attrs = is_inf->get_attributes();
const auto g_is_inf_attrs = g_is_inf->get_attributes();
EXPECT_EQ(g_is_inf_attrs.detect_positive, is_inf_attrs.detect_positive);
EXPECT_EQ(g_is_inf_attrs.detect_negative, is_inf_attrs.detect_negative);
}
TEST(attributes, is_inf_detect_none) {
NodeBuilder::get_ops().register_factory<IsInf>();
const auto data = make_shared<Parameter>(element::f32, Shape{1, 3, 10, 10});
IsInf::Attributes attributes{};
attributes.detect_negative = false;
attributes.detect_positive = false;
const auto is_inf = make_shared<IsInf>(data, attributes);
NodeBuilder builder(is_inf);
auto g_is_inf = ov::as_type_ptr<IsInf>(builder.create());
const auto is_inf_attrs = is_inf->get_attributes();
const auto g_is_inf_attrs = g_is_inf->get_attributes();
EXPECT_EQ(g_is_inf_attrs.detect_positive, is_inf_attrs.detect_positive);
EXPECT_EQ(g_is_inf_attrs.detect_negative, is_inf_attrs.detect_negative);
}

View File

@@ -0,0 +1,25 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "op/is_inf.hpp"
#include "openvino/opsets/opset10.hpp"
namespace ngraph {
namespace onnx_import {
namespace op {
namespace set_1 {
OutputVector is_inf(const Node& node) {
const auto data = node.get_ng_inputs().at(0);
ov::opset10::IsInf::Attributes attributes{};
attributes.detect_negative = node.get_attribute_value<int64_t>("detect_negative", 1);
attributes.detect_positive = node.get_attribute_value<int64_t>("detect_positive", 1);
return {std::make_shared<ov::opset10::IsInf>(data, attributes)};
}
} // namespace set_1
} // namespace op
} // namespace onnx_import
} // namespace ngraph

View File

@@ -0,0 +1,19 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "ngraph/node.hpp"
#include "onnx_import/core/node.hpp"
namespace ngraph {
namespace onnx_import {
namespace op {
namespace set_1 {
OutputVector is_inf(const Node& node);
} // namespace set_1
} // namespace op
} // namespace onnx_import
} // namespace ngraph

View File

@@ -80,6 +80,7 @@
#include "op/if.hpp"
#include "op/image_scaler.hpp"
#include "op/instance_norm.hpp"
#include "op/is_inf.hpp"
#include "op/is_nan.hpp"
#include "op/leaky_relu.hpp"
#include "op/less.hpp"
@@ -360,6 +361,7 @@ OperatorsBridge::OperatorsBridge() {
REGISTER_OPERATOR("If", 1, if_op);
REGISTER_OPERATOR("ImageScaler", 1, image_scaler);
REGISTER_OPERATOR("InstanceNormalization", 1, instance_norm);
REGISTER_OPERATOR("IsInf", 1, is_inf);
REGISTER_OPERATOR("IsNaN", 1, is_nan)
REGISTER_OPERATOR("LeakyRelu", 1, leaky_relu);
REGISTER_OPERATOR("Less", 1, less);

View File

@@ -0,0 +1,61 @@
ir_version: 10
producer_name: "nGraph ONNX Importer"
graph {
node {
input: "x"
output: "y"
op_type: "IsInf"
attribute {
name: "detect_negative"
i: 1
type: INT
}
attribute {
name: "detect_positive"
i: 1
type: INT
}
}
name: "is_inf_graph"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 9
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@@ -0,0 +1,61 @@
ir_version: 10
producer_name: "nGraph ONNX Importer"
graph {
node {
input: "x"
output: "y"
op_type: "IsInf"
attribute {
name: "detect_negative"
i: 1
type: INT
}
attribute {
name: "detect_positive"
i: 0
type: INT
}
}
name: "is_inf_graph"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 9
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@@ -0,0 +1,61 @@
ir_version: 10
producer_name: "nGraph ONNX Importer"
graph {
node {
input: "x"
output: "y"
op_type: "IsInf"
attribute {
name: "detect_negative"
i: 0
type: INT
}
attribute {
name: "detect_positive"
i: 0
type: INT
}
}
name: "is_inf_graph"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 9
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@@ -0,0 +1,61 @@
ir_version: 10
producer_name: "nGraph ONNX Importer"
graph {
node {
input: "x"
output: "y"
op_type: "IsInf"
attribute {
name: "detect_negative"
i: 0
type: INT
}
attribute {
name: "detect_positive"
i: 1
type: INT
}
}
name: "is_inf_graph"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 9
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@@ -5912,6 +5912,106 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_trilu_dynamic_shapes) {
// clang-format on
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_is_inf_default) {
const auto function = onnx_import::import_onnx_model(
file_util::path_join(CommonTestUtils::getExecutableDirectory(), SERIALIZED_ZOO, "onnx/is_inf.onnx"));
auto test_case = test::TestCase(function, s_device);
// clang-format off
test_case.add_input<float>(
Shape{2, 2, 2},
std::vector<float>{ std::numeric_limits<float>::infinity(), 0.0000f,
std::numeric_limits<float>::max(), -0.5000f,
-std::numeric_limits<float>::infinity(), 1.0000f,
std::numeric_limits<float>::min(), std::nanf("")});
test_case.add_expected_output<bool>(
Shape{2, 2, 2},
std::vector<bool>{true, false,
false, false,
true, false,
false, false});
test_case.run();
// clang-format on
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_is_inf_negative_only) {
const auto function = onnx_import::import_onnx_model(
file_util::path_join(CommonTestUtils::getExecutableDirectory(), SERIALIZED_ZOO, "onnx/is_inf_negative.onnx"));
auto test_case = test::TestCase(function, s_device);
// clang-format off
test_case.add_input<float>(
Shape{2, 2, 2},
std::vector<float>{ std::numeric_limits<float>::infinity(), 0.0000f,
std::numeric_limits<float>::max(), -0.5000f,
-std::numeric_limits<float>::infinity(), 1.0000f,
std::numeric_limits<float>::min(), std::nanf("")});
test_case.add_expected_output<bool>(
Shape{2, 2, 2},
std::vector<bool>{false, false,
false, false,
true, false,
false, false});
test_case.run();
// clang-format on
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_is_inf_positive_only) {
const auto function = onnx_import::import_onnx_model(
file_util::path_join(CommonTestUtils::getExecutableDirectory(), SERIALIZED_ZOO, "onnx/is_inf_positive.onnx"));
auto test_case = test::TestCase(function, s_device);
// clang-format off
test_case.add_input<float>(
Shape{2, 2, 2},
std::vector<float>{ std::numeric_limits<float>::infinity(), 0.0000f,
std::numeric_limits<float>::max(), -0.5000f,
-std::numeric_limits<float>::infinity(), 1.0000f,
std::numeric_limits<float>::min(), std::nanf("")});
test_case.add_expected_output<bool>(
Shape{2, 2, 2},
std::vector<bool>{true, false,
false, false,
false, false,
false, false});
test_case.run();
// clang-format on
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_is_inf_detect_none) {
const auto function = onnx_import::import_onnx_model(
file_util::path_join(CommonTestUtils::getExecutableDirectory(), SERIALIZED_ZOO, "onnx/is_inf_none.onnx"));
auto test_case = test::TestCase(function, s_device);
// clang-format off
test_case.add_input<float>(
Shape{2, 2, 2},
std::vector<float>{ std::numeric_limits<float>::infinity(), 0.0000f,
std::numeric_limits<float>::max(), -0.5000f,
-std::numeric_limits<float>::infinity(), 1.0000f,
std::numeric_limits<float>::min(), std::nanf("")});
test_case.add_expected_output<bool>(
Shape{2, 2, 2},
std::vector<bool>{false, false,
false, false,
false, false,
false, false});
test_case.run();
// clang-format on
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_is_nan) {
const auto function = onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/is_nan.onnx"));

View File

@@ -426,4 +426,9 @@ IE_CPU.onnx_softmax_crossentropy_loss_mean
# Cannot find blob with name: Y
IE_CPU.onnx_bool_init_and
onnx_is_inf_default
onnx_is_inf_positive_only
onnx_is_inf_negative_only
onnx_is_inf_detect_none
onnx_is_nan

View File

@@ -83,4 +83,9 @@ onnx_clip_no_min_no_max_int64
INTERPRETER.onnx_expand_context_dependent_function
INTERPRETER.onnx_softmax_crossentropy_loss_mean
onnx_is_inf_default
onnx_is_inf_positive_only
onnx_is_inf_negative_only
onnx_is_inf_detect_none
onnx_is_nan

View File

@@ -1312,6 +1312,8 @@ std::shared_ptr<ov::Model> generateUnaryEltwise(const std::shared_ptr<ov::op::Op
eltwiseNode = std::make_shared<ov::op::v5::HSigmoid>(param);
} else if (ov::is_type<ov::op::v4::HSwish>(node)) {
eltwiseNode = std::make_shared<ov::op::v4::HSwish>(param);
} else if (ov::is_type<ov::op::v10::IsInf>(node)) {
eltwiseNode = std::make_shared<ov::op::v10::IsInf>(param);
} else if (ov::is_type<ov::op::v10::IsNaN>(node)) {
eltwiseNode = std::make_shared<ov::op::v10::IsNaN>(param);
} else if (ov::is_type<ov::op::v0::Log>(node)) {