Revise NonZero op (#5909)

* Use ngraph rtti macros

* add visitor test

* add visitor test cases to cover all possibilities

* add boolean case to type prop tests

* add boolean type to sslt

* add nonzero to trusted op list

* multiply dimensions when input shape is dynamic

* Add and disable cpu instantiations of slts

* Add missing coma in skip tests config for cpu

* Fix expected output shapes in type prop testcases

* add partialshape input cases to type prop tests
This commit is contained in:
Bartosz Lesniewski 2021-06-30 11:35:19 +02:00 committed by GitHub
parent 9439d3b74e
commit 5e501ff614
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 159 additions and 20 deletions

View File

@ -19,8 +19,8 @@ std::vector<InferenceEngine::SizeVector> inputDims = {
{1, 1000}, {223, 217, 21}, {3, 4, 5, 1}, {3, 4, 1, 5, 1}};
std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::U8, InferenceEngine::Precision::FP32,
InferenceEngine::Precision::I32,
InferenceEngine::Precision::BOOL, InferenceEngine::Precision::U8,
InferenceEngine::Precision::FP32, InferenceEngine::Precision::I32,
};
ConfigMap config;

View File

@ -0,0 +1,36 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "single_layer_tests/nonzero.hpp"
#include "common_test_utils/test_constants.hpp"
#include <vector>
using namespace ngraph::helpers;
using namespace LayerTestsDefinitions;
namespace {
std::vector<std::vector<size_t>> inShapes = {
{1000},
{4, 1000},
{2, 4, 1000},
{2, 4, 4, 1000},
{2, 4, 4, 2, 1000},
};
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::I32,
InferenceEngine::Precision::FP16,
InferenceEngine::Precision::U8,
};
ConfigMap config;
INSTANTIATE_TEST_CASE_P(smoke_nonzero, NonZeroLayerTest,
::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(inputPrecisions),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
NonZeroLayerTest::getTestCaseName);
} // namespace

View File

@ -69,6 +69,8 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*smoke_SetBlobOfKindAUTO.*SetBlobOfKindTest.CompareWithRefs.*)",
// reference doesn't cover I8, U8 cases. Issue: 55842
R"(.*Gather7LayerTest.*netPRC=I8.*)",
// TODO: 57562 No dynamic output shape support
R"(.*NonZeroLayerTest.*)",
// need to implement Export / Import
R"(.*IEClassImportExportTestP.*)"
};

View File

@ -57,6 +57,7 @@ VERIFIED_OP_REFERENCES = [
'Negative-1',
'NonMaxSuppression-4',
'NonMaxSuppression-5',
'NonZero-3',
'PSROIPooling-1',
'Proposal-1',
'Proposal-4',

View File

@ -23,8 +23,7 @@ namespace ngraph
class NGRAPH_API NonZero : public Op
{
public:
static constexpr NodeTypeInfo type_info{"NonZero", 3};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs a NonZero operation.
NonZero() = default;
/// \brief Constructs a NonZero operation.

View File

@ -15,7 +15,7 @@
using namespace ngraph;
using namespace std;
constexpr NodeTypeInfo op::v3::NonZero::type_info;
NGRAPH_RTTI_DEFINITION(op::v3::NonZero, "NonZero", 3);
op::v3::NonZero::NonZero(const Output<Node>& arg)
: Op({arg})
@ -47,25 +47,20 @@ bool ngraph::op::v3::NonZero::visit_attributes(AttributeVisitor& visitor)
void op::v3::NonZero::validate_and_infer_types()
{
NGRAPH_OP_SCOPE(v3_NonZero_validate_and_infer_types);
const PartialShape& input_shape = get_input_partial_shape(0);
NODE_VALIDATION_CHECK(this,
m_output_type == element::i64 || m_output_type == element::i32,
"Output type must be i32 or i64");
// For scalar non-zero value case, onnx test case expects output shape {1, 1}
if (input_shape.rank() == 0)
const PartialShape& input_shape = get_input_partial_shape(0);
if (input_shape.rank().compatible(0))
{
set_output_type(0, m_output_type, PartialShape{Dimension::dynamic(), Dimension::dynamic()});
}
else
{
const Dimension dim = input_shape.is_static()
? std::accumulate(begin(input_shape),
end(input_shape),
Dimension(0, 1),
std::multiplies<Dimension>())
: Dimension();
const Dimension dim = std::accumulate(
begin(input_shape), end(input_shape), Dimension(0, 1), std::multiplies<Dimension>());
set_output_type(0, m_output_type, PartialShape{input_shape.rank(), dim});
}

View File

@ -257,6 +257,7 @@ set(SRC
visitors/op/mvn.cpp
visitors/op/negative.cpp
visitors/op/non_max_suppression.cpp
visitors/op/non_zero.cpp
visitors/op/normalize_l2.cpp
visitors/op/one_hot.cpp
visitors/op/pad.cpp

View File

@ -14,8 +14,23 @@ TEST(type_prop, non_zero)
auto data = make_shared<op::Parameter>(element::f32, Shape{3, 3, 224, 224});
auto non_zero = make_shared<op::v3::NonZero>(data);
EXPECT_EQ(non_zero->get_element_type(), element::i64);
EXPECT_TRUE(
non_zero->get_output_partial_shape(0).same_scheme(PartialShape{4, Dimension::dynamic()}));
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{4, {0, 451584}}));
}
TEST(type_prop, non_zero_partial_input)
{
auto data = make_shared<op::Parameter>(element::f32, PartialShape{{3, 4}, {5, 6}, {7, 8}});
auto non_zero = make_shared<op::v3::NonZero>(data);
EXPECT_EQ(non_zero->get_element_type(), element::i64);
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{3, {0, 192}}));
}
TEST(type_prop, non_zero_partial_with_negative)
{
auto data = make_shared<op::Parameter>(element::f32, PartialShape{{3, 4}, {5, 6}, -1});
auto non_zero = make_shared<op::v3::NonZero>(data);
EXPECT_EQ(non_zero->get_element_type(), element::i64);
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{3, -1}));
}
TEST(type_prop, non_zero_dynamic)
@ -33,8 +48,7 @@ TEST(type_prop, non_zero_output_type)
auto non_zero = make_shared<op::v3::NonZero>(data, element::i32);
ASSERT_EQ(non_zero->get_output_element_type(0), element::i32);
EXPECT_TRUE(
non_zero->get_output_partial_shape(0).same_scheme(PartialShape{4, Dimension::dynamic()}));
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{4, {0, 24}}));
}
TEST(type_prop, non_zero_string_output_type)
@ -43,8 +57,16 @@ TEST(type_prop, non_zero_string_output_type)
auto non_zero = make_shared<op::v3::NonZero>(data, "i32");
ASSERT_EQ(non_zero->get_output_element_type(0), element::i32);
EXPECT_TRUE(
non_zero->get_output_partial_shape(0).same_scheme(PartialShape{4, Dimension::dynamic()}));
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{4, {0, 24}}));
}
TEST(type_prop, non_zero_bool_input_type)
{
auto data = make_shared<op::Parameter>(element::boolean, Shape{1, 2, 3, 4});
auto non_zero = make_shared<op::v3::NonZero>(data, element::i32);
ASSERT_EQ(non_zero->get_output_element_type(0), element::i32);
ASSERT_EQ(non_zero->get_output_partial_shape(0), (PartialShape{4, {0, 24}}));
}
TEST(type_prop, non_zero_fail_index_element_type)

View File

@ -0,0 +1,83 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "ngraph/opsets/opset3.hpp"
#include "util/visitor.hpp"
using namespace std;
using namespace ngraph;
using ngraph::test::NodeBuilder;
using ngraph::test::ValueMap;
TEST(attributes, non_zero_op_default)
{
NodeBuilder::get_ops().register_factory<opset3::NonZero>();
const auto data_node = make_shared<op::Parameter>(element::f32, Shape{1});
const auto non_zero = make_shared<op::NonZero>(data_node);
NodeBuilder builder(non_zero);
const auto expected_attr_count = 1;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(non_zero->get_output_type(), element::i64);
}
TEST(attributes, non_zero_op_i32)
{
NodeBuilder::get_ops().register_factory<opset3::NonZero>();
const auto data_node = make_shared<op::Parameter>(element::f32, Shape{1});
const auto non_zero = make_shared<op::NonZero>(data_node, element::i32);
NodeBuilder builder(non_zero);
const auto expected_attr_count = 1;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(non_zero->get_output_type(), element::i32);
}
TEST(attributes, non_zero_op_i32_string)
{
NodeBuilder::get_ops().register_factory<opset3::NonZero>();
const auto data_node = make_shared<op::Parameter>(element::f32, Shape{1});
const auto non_zero = make_shared<op::NonZero>(data_node, "i32");
NodeBuilder builder(non_zero);
const auto expected_attr_count = 1;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(non_zero->get_output_type(), element::i32);
}
TEST(attributes, non_zero_op_i64)
{
NodeBuilder::get_ops().register_factory<opset3::NonZero>();
const auto data_node = make_shared<op::Parameter>(element::f32, Shape{1});
const auto non_zero = make_shared<op::NonZero>(data_node, element::i64);
NodeBuilder builder(non_zero);
const auto expected_attr_count = 1;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(non_zero->get_output_type(), element::i64);
}
TEST(attributes, non_zero_op_i64_string)
{
NodeBuilder::get_ops().register_factory<opset3::NonZero>();
const auto data_node = make_shared<op::Parameter>(element::f32, Shape{1});
const auto non_zero = make_shared<op::NonZero>(data_node, "i64");
NodeBuilder builder(non_zero);
const auto expected_attr_count = 1;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(non_zero->get_output_type(), element::i64);
}