diff --git a/inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp b/inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp index 15a0b0c2c05..890929b8b66 100644 --- a/inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp +++ b/inference-engine/src/vpu/common/src/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp @@ -45,23 +45,19 @@ void dynamicToStaticShapeBroadcast(std::shared_ptr target) { std::shared_ptr dsr; if (broadcast->get_broadcast_spec() == ngraph::op::BroadcastType::BIDIRECTIONAL) { - const auto inputShape = broadcast->get_input_shape(0); + const auto dataDSR = ngraph::as_type_ptr(broadcast->input_value(0).get_node_shared_ptr()); + const auto shapeElementType = dataDSR ? dataDSR->get_input_element_type(1) : broadcast->get_input_element_type(1); + const auto dataShape = dataDSR ? dataDSR->input_value(1) : shapeToConstant(shapeElementType, broadcast->get_input_shape(0)); const auto targetShape = broadcast->input_value(1).get_node_shared_ptr(); - const auto shapeType = targetShape->get_element_type(); - const auto inputShapeDimsCount = inputShape.size(); + const auto dataShapeDimsCount = ngraph::shape_size(dataShape.get_shape()); const auto targetShapeDimsCount = ngraph::shape_size(broadcast->get_input_partial_shape(1).get_shape()); - const auto inputShapeConst = std::make_shared( - shapeType, - ngraph::Shape{static_cast(inputShapeDimsCount)}, - inputShape); - - const auto minRank = std::min(inputShapeDimsCount, targetShapeDimsCount); - const auto maxRank = std::max(inputShapeDimsCount, targetShapeDimsCount); - const auto minRankNode = minRank == inputShapeDimsCount ? inputShapeConst : targetShape; - const auto maxRankNode = minRank == inputShapeDimsCount ? targetShape : inputShapeConst; + const auto minRank = std::min(dataShapeDimsCount, targetShapeDimsCount); + const auto maxRank = std::max(dataShapeDimsCount, targetShapeDimsCount); + const auto minRankNode = minRank == dataShapeDimsCount ? dataShape : targetShape; + const auto maxRankNode = minRank == dataShapeDimsCount ? targetShape : dataShape; ngraph::NodeVector dims; @@ -69,19 +65,19 @@ void dynamicToStaticShapeBroadcast(std::shared_ptr target) { dims.push_back( std::make_shared( maxRankNode, - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {i}), - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0}))); + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {i}), + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0}))); } for (int i = 0; i < minRank; i++) { const auto minRankDim = std::make_shared( minRankNode, - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {i}), - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0})); + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {i}), + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0})); const auto maxRankDim = std::make_shared( maxRankNode, - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {maxRank - minRank + i}), - ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0})); + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {maxRank - minRank + i}), + ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0})); dims.push_back(std::make_shared(minRankDim, maxRankDim)); } diff --git a/inference-engine/tests/functional/plugin/myriad/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp b/inference-engine/tests/functional/plugin/myriad/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp index c8755b7e238..7c7996da502 100644 --- a/inference-engine/tests/functional/plugin/myriad/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp +++ b/inference-engine/tests/functional/plugin/myriad/ngraph/transformations/dynamic_to_static_shape_broadcast.cpp @@ -6,6 +6,7 @@ #include "vpu/ngraph/transformations/dynamic_to_static_shape.hpp" #include "vpu/ngraph/operations/static_shape_broadcast.hpp" #include "vpu/ngraph/operations/dynamic_shape_resolver.hpp" +#include "vpu/ngraph/utilities.hpp" #include #include @@ -26,12 +27,22 @@ using TensorType = ngraph::element::Type; using TensorShape = ngraph::PartialShape; using AxesMapping = std::vector; +enum class BroadcastInputType { + DYNAMIC, + STATIC +}; + struct BroadcastShapes { TensorShape srcShape; TensorShape targetShape; AxesMapping axesMapping; }; -using BroadcastTestParams = std::tuple; + +using BroadcastTestParams = std::tuple< + TensorType, + TensorType, + BroadcastShapes, + BroadcastInputType>; class DynamicToStaticShapeBroadcastExplicitTests : public CommonTestUtils::TestsCommon, @@ -39,38 +50,53 @@ class DynamicToStaticShapeBroadcastExplicitTests public: void SetUp() override { const auto& parameters = GetParam(); - const auto& tensorType = std::get<0>(parameters); - const auto& tensorShape = std::get<1>(parameters).srcShape; - const auto& targetShape = std::get<1>(parameters).targetShape; - const auto& axesMapping = std::get<1>(parameters).axesMapping; + const auto& tensorType = std::get<0>(parameters); + const auto& shapeType = std::get<1>(parameters); + const auto& tensorShape = std::get<2>(parameters).srcShape; + const auto& targetShape = std::get<2>(parameters).targetShape; + const auto& axesMapping = std::get<2>(parameters).axesMapping; + const auto& broadcastInputType = std::get<3>(parameters); ngraph::helpers::CompareFunctions( - *transform(tensorType, tensorShape, targetShape, axesMapping), - *reference(tensorType, tensorShape, targetShape, axesMapping)); + *transform(tensorType, shapeType, tensorShape, targetShape, axesMapping, broadcastInputType), + *reference(tensorType, shapeType, tensorShape, targetShape, axesMapping, broadcastInputType)); } protected: std::shared_ptr transform( const TensorType& tensorType, + const TensorType& shapeType, const TensorShape& tensorShape, const TensorShape& targetShape, - const AxesMapping& axesMapping) const { + const AxesMapping& axesMapping, + BroadcastInputType broadcastInputType) const { const auto tensorParam = std::make_shared(tensorType, tensorShape); const auto tensorWithTargetShapeParam = std::make_shared(tensorType, targetShape); - const auto shapeOfNode = std::make_shared(tensorWithTargetShapeParam); + const auto shapeOfNode = std::make_shared(tensorWithTargetShapeParam, shapeType); shapeOfNode->set_is_foldable(false); + ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam}; + + std::shared_ptr broadcastInput = tensorParam; + if (broadcastInputType == BroadcastInputType::DYNAMIC) { + const auto shapeParam = std::make_shared( + shapeType, + ngraph::Shape{static_cast(tensorShape.rank().get_length())}); + params.push_back(shapeParam); + broadcastInput = std::make_shared(tensorParam, shapeParam); + } + const auto axesMappingConstant = std::make_shared( ngraph::element::u64, ngraph::Shape{axesMapping.size()}, axesMapping); - const auto broadcast = std::make_shared(tensorParam, shapeOfNode, axesMappingConstant); + const auto broadcast = std::make_shared(broadcastInput, shapeOfNode, axesMappingConstant); auto function = std::make_shared( ngraph::NodeVector{broadcast}, - ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam}, + params, "Actual"); // We need to set broadcast output shape to make its rank static. @@ -87,24 +113,37 @@ protected: std::shared_ptr reference( const TensorType& tensorType, + const TensorType& shapeType, const TensorShape& tensorShape, const TensorShape& targetShape, - const AxesMapping& axesMapping) const { + const AxesMapping& axesMapping, + BroadcastInputType broadcastInputType) const { const auto tensorParam = std::make_shared(tensorType, tensorShape); const auto tensorWithTargetShapeParam = std::make_shared(tensorType, targetShape); - const auto shapeOf = std::make_shared(tensorWithTargetShapeParam); + const auto shapeOf = std::make_shared(tensorWithTargetShapeParam, shapeType); + + ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam}; + + std::shared_ptr broadcastInput = tensorParam; + if (broadcastInputType == BroadcastInputType::DYNAMIC) { + const auto shapeParam = std::make_shared( + shapeType, + ngraph::Shape{static_cast(tensorShape.rank().get_length())}); + params.push_back(shapeParam); + broadcastInput = std::make_shared(tensorParam, shapeParam); + } const auto axesMappingConstant = std::make_shared( - ngraph::element::u64, + ngraph::element::i64, ngraph::Shape{axesMapping.size()}, axesMapping); - const auto staticShapeBroadcast = std::make_shared(tensorParam, shapeOf, axesMappingConstant); + const auto staticShapeBroadcast = std::make_shared(broadcastInput, shapeOf, axesMappingConstant); const auto dsrOut = std::make_shared(staticShapeBroadcast, shapeOf); return std::make_shared( ngraph::NodeVector{dsrOut}, - ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam}, + params, "Expected"); } }; @@ -119,9 +158,15 @@ INSTANTIATE_TEST_CASE_P(smoke_NGraph, DynamicToStaticShapeBroadcastExplicitTests ngraph::element::i32, ngraph::element::i64, ngraph::element::u8), + testing::Values( + ngraph::element::i32, + ngraph::element::i64), testing::Values( BroadcastShapes{TensorShape{16}, TensorShape{1, 16, 50, 50}, AxesMapping{1}}, - BroadcastShapes{TensorShape{50, 50}, TensorShape{1, 50, 50, 16}, AxesMapping{1, 2}}) + BroadcastShapes{TensorShape{50, 50}, TensorShape{1, 50, 50, 16}, AxesMapping{1, 2}}), + testing::Values( + BroadcastInputType::DYNAMIC, + BroadcastInputType::STATIC) )); @@ -130,31 +175,46 @@ class DynamicToStaticShapeBroadcastBidirectionalTests : public CommonTestUtils:: public: void SetUp() override { const auto& parameters = GetParam(); - const auto& tensorType = std::get<0>(parameters); - const auto& tensorShape = std::get<1>(parameters).srcShape; - const auto& targetShape = std::get<1>(parameters).targetShape; + const auto& tensorType = std::get<0>(parameters); + const auto& shapeType = std::get<1>(parameters); + const auto& tensorShape = std::get<2>(parameters).srcShape; + const auto& targetShape = std::get<2>(parameters).targetShape; + const auto& broadcastInputType = std::get<3>(parameters); ngraph::helpers::CompareFunctions( - *transform(tensorType, tensorShape, targetShape), - *reference(tensorType, tensorShape, targetShape)); + *transform(tensorType, shapeType, tensorShape, targetShape, broadcastInputType), + *reference(tensorType, shapeType, tensorShape, targetShape, broadcastInputType)); } protected: std::shared_ptr transform( const TensorType& tensorType, + const TensorType& shapeType, const TensorShape& tensorShape, - const TensorShape& targetShape) const { + const TensorShape& targetShape, + BroadcastInputType broadcastInputType) const { const auto tensorParam = std::make_shared(tensorType, tensorShape); - const auto tensorWithTargetShapeParam = std::make_shared(tensorType, targetShape); + const auto tensorWithTargetShapeParam = std::make_shared(shapeType, targetShape); - const auto shapeOfNode = std::make_shared(tensorWithTargetShapeParam); + const auto shapeOfNode = std::make_shared(tensorWithTargetShapeParam, shapeType); shapeOfNode->set_is_foldable(false); - const auto broadcast = std::make_shared(tensorParam, shapeOfNode, ngraph::op::BroadcastType::BIDIRECTIONAL); + ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam}; + + std::shared_ptr broadcastInput = tensorParam; + if (broadcastInputType == BroadcastInputType::DYNAMIC) { + const auto shapeParam = std::make_shared( + shapeType, + ngraph::Shape{static_cast(tensorShape.rank().get_length())}); + params.push_back(shapeParam); + broadcastInput = std::make_shared(tensorParam, shapeParam); + } + + const auto broadcast = std::make_shared(broadcastInput, shapeOfNode, ngraph::op::BroadcastType::BIDIRECTIONAL); auto function = std::make_shared( ngraph::NodeVector{broadcast}, - ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam}, + params, "Actual"); const auto transformations = vpu::Transformations{{ngraph::opset5::Broadcast::type_info, vpu::dynamicToStaticShapeBroadcast}}; @@ -164,29 +224,41 @@ protected: std::shared_ptr reference( const TensorType& tensorType, + const TensorType& shapeType, const TensorShape& tensorShape, - const TensorShape& targetShape) const { + const TensorShape& targetShape, + BroadcastInputType broadcastInputType) const { const auto tensorParam = std::make_shared(tensorType, tensorShape); - const auto tensorWithTargetShapeParam = std::make_shared(tensorType, targetShape); - std::shared_ptr shapeOf = std::make_shared(tensorWithTargetShapeParam); + const auto tensorWithTargetShapeParam = std::make_shared(shapeType, targetShape); + std::shared_ptr shapeOf = std::make_shared(tensorWithTargetShapeParam, shapeType); + + ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam}; + + std::shared_ptr broadcastInput = tensorParam; + if (broadcastInputType == BroadcastInputType::DYNAMIC) { + const auto shapeParam = std::make_shared( + shapeType, + ngraph::Shape{static_cast(tensorShape.rank().get_length())}); + params.push_back(shapeParam); + broadcastInput = std::make_shared(tensorParam, shapeParam); + } const auto staticShapeBroadcast = std::make_shared( - tensorParam, + broadcastInput, shapeOf, ngraph::op::BroadcastType::BIDIRECTIONAL); const auto tensorShapeDimsCount = tensorShape.rank().get_length(); const auto targetShapeDimsCount = targetShape.rank().get_length(); - std::shared_ptr tensorShapeConst = std::make_shared( - ngraph::element::i64, - ngraph::Shape{static_cast(tensorShapeDimsCount)}, - tensorShape.get_shape()); + const auto tensorShapeNode = broadcastInputType == BroadcastInputType::DYNAMIC ? + staticShapeBroadcast->input_value(0).get_node_shared_ptr()->input_value(1) : + vpu::shapeToConstant(shapeType, tensorShape.get_shape()); - const auto maxRankNode = tensorShapeDimsCount > targetShapeDimsCount ? tensorShapeConst : shapeOf; - const auto minRankNode = maxRankNode == tensorShapeConst ? shapeOf : tensorShapeConst; - const auto maxRank = maxRankNode == tensorShapeConst ? tensorShapeDimsCount : targetShapeDimsCount; - const auto minRank = minRankNode == tensorShapeConst ? tensorShapeDimsCount : targetShapeDimsCount; + const auto maxRankNode = tensorShapeDimsCount > targetShapeDimsCount ? tensorShapeNode : shapeOf; + const auto minRankNode = maxRankNode == tensorShapeNode ? shapeOf : tensorShapeNode; + const auto maxRank = maxRankNode == tensorShapeNode ? tensorShapeDimsCount : targetShapeDimsCount; + const auto minRank = minRankNode == tensorShapeNode ? tensorShapeDimsCount : targetShapeDimsCount; ngraph::NodeVector dims; @@ -216,7 +288,7 @@ protected: staticShapeBroadcast->output(0), outShape); return std::make_shared( ngraph::NodeVector{dsrOut}, - ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam}, + params, "Expected"); } }; @@ -231,14 +303,19 @@ INSTANTIATE_TEST_CASE_P(smoke_NGraph, DynamicToStaticShapeBroadcastBidirectional ngraph::element::i32, ngraph::element::i64, ngraph::element::u8), + testing::Values( + ngraph::element::i32, + ngraph::element::i64), testing::Values( BroadcastShapes{TensorShape{1, 1, 4}, TensorShape{300, 2, 4}, {}}, BroadcastShapes{TensorShape{15, 1}, TensorShape{2, 16, 15, 14}, {}}, BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{15, 14}, {}}, BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{16, 1, 1}, {}}, BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{16, 1, 14}, {}}, - BroadcastShapes{TensorShape{16, 15, 1}, TensorShape{2, 1, 15, 14}, {}}) - + BroadcastShapes{TensorShape{16, 15, 1}, TensorShape{2, 1, 15, 14}, {}}), + testing::Values( + BroadcastInputType::DYNAMIC, + BroadcastInputType::STATIC) )); } // namespace diff --git a/inference-engine/tests/functional/plugin/myriad/subgraph_tests/dsr_tests_common.hpp b/inference-engine/tests/functional/plugin/myriad/subgraph_tests/dsr_tests_common.hpp index dc0d6ab8993..6a68373c3db 100644 --- a/inference-engine/tests/functional/plugin/myriad/subgraph_tests/dsr_tests_common.hpp +++ b/inference-engine/tests/functional/plugin/myriad/subgraph_tests/dsr_tests_common.hpp @@ -29,6 +29,7 @@ class DSR_TestsCommon : virtual public LayerTestsUtils::LayerTestsCommon { protected: std::unordered_map m_shapes; ngraph::ParameterVector m_parameterVector; + ngraph::ResultVector m_additionalResults; std::shared_ptr createParameter( const ngraph::element::Type& element_type, @@ -69,6 +70,7 @@ protected: for (const auto& output : testedOp->outputs()) { results.emplace_back(std::make_shared(output)); } + results.insert(results.end(), m_additionalResults.begin(), m_additionalResults.end()); function = std::make_shared( results, diff --git a/inference-engine/tests/functional/plugin/myriad/subgraph_tests/nonzero_broadcast.cpp b/inference-engine/tests/functional/plugin/myriad/subgraph_tests/nonzero_broadcast.cpp index 5b62d681dd6..be8c4b4c1cd 100644 --- a/inference-engine/tests/functional/plugin/myriad/subgraph_tests/nonzero_broadcast.cpp +++ b/inference-engine/tests/functional/plugin/myriad/subgraph_tests/nonzero_broadcast.cpp @@ -15,7 +15,7 @@ using TensorType = ngraph::element::Type; using TensorShape = ngraph::Shape; struct BroadcastInputParams { - TensorShape inputShape; + DataShapeWithUpperBound inputShape; DataShapeWithUpperBound targetShape; InferenceEngine::SizeVector axesMapping; }; @@ -24,8 +24,8 @@ using BroadcastTestParams = std::tuple< BroadcastInputParams, TensorType, LayerTestsUtils::TargetDevice>; -class NonZero_BroadcastBidirectional : public testing::WithParamInterface, - virtual public LayerTestsUtils::LayerTestsCommon { +class NonZero_Broadcast : public testing::WithParamInterface, + public DSR_TestsCommon { protected: size_t getDynamicAxis(const DataShape& shapeA, const DataShape& shapeB) const { size_t res = 0; @@ -35,9 +35,7 @@ protected: return res; } - void prepareBroadcastInputs() { - SetRefMode(LayerTestsUtils::RefMode::CONSTANT_FOLDING); - + std::shared_ptr createTestedOp() override { const auto& parameters = GetParam(); const auto& broadcastParams = std::get<0>(parameters); const auto& tensorType = std::get<1>(parameters); @@ -48,133 +46,99 @@ protected: const auto dynamicAxis = getDynamicAxis(upperBoundShape, realShape); - m_param = std::make_shared(tensorType, TensorShape{upperBoundShape[dynamicAxis]}); - m_nonZero = std::make_shared(m_param); - const auto shapeOfNonZero = std::make_shared(m_nonZero); + const auto& nonZeroParam = createParameter(tensorType, TensorShape{upperBoundShape[dynamicAxis]}); + const auto& nonZero = std::make_shared(nonZeroParam, ngraph::element::i32); + m_additionalResults.push_back(std::make_shared(nonZero->output(0))); + const auto shapeOfNonZero = std::make_shared(nonZero, ngraph::element::i32); const auto numNonZeros = std::make_shared( shapeOfNonZero, ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{1}, {1}), ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{1}, {0})); - m_broadcastTargetShape = numNonZeros; + std::shared_ptr broadcastTargetShape = numNonZeros; if (dynamicAxis > 0) { - m_broadcastTargetShape = std::make_shared( + broadcastTargetShape = std::make_shared( ngraph::NodeVector{ ngraph::opset5::Constant::create( - ngraph::element::i64, + ngraph::element::i32, ngraph::Shape{dynamicAxis}, std::vector{upperBoundShape.begin(), upperBoundShape.begin() + dynamicAxis}), - m_broadcastTargetShape}, + broadcastTargetShape}, 0); } if (dynamicAxis < upperBoundShape.size() - 1) { - m_broadcastTargetShape = std::make_shared( + broadcastTargetShape = std::make_shared( ngraph::NodeVector{ - m_broadcastTargetShape, + broadcastTargetShape, ngraph::opset5::Constant::create( - ngraph::element::i64, + ngraph::element::i32, ngraph::Shape{upperBoundShape.size() - dynamicAxis - 1}, std::vector{upperBoundShape.begin() + dynamicAxis + 1, upperBoundShape.end()})}, 0); } - m_broadcastInput = ngraph::builder::makeConstant(tensorType, ngraph::Shape{broadcastParams.inputShape}, std::vector{}, true); - } + const auto& broadcastInput = broadcastParams.inputShape.upperBoundShape.size() ? + createInputSubgraphWithDSR(tensorType, broadcastParams.inputShape) : + ngraph::builder::makeConstant(tensorType, ngraph::Shape{broadcastParams.inputShape.shape}, std::vector{}, true); - void SetUp() override { - prepareBroadcastInputs(); + if (broadcastParams.axesMapping.size() != 0) { + const auto& axesMapping = std::get<0>(GetParam()).axesMapping; + const auto axesMappingConst = ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{axesMapping.size()}, axesMapping); - const auto broadcast = std::make_shared(m_broadcastInput, m_broadcastTargetShape, ngraph::op::BroadcastType::BIDIRECTIONAL); + return std::make_shared(broadcastInput, broadcastTargetShape, axesMappingConst); + } - function = std::make_shared( - ngraph::NodeVector{broadcast, m_nonZero}, - ngraph::ParameterVector{m_param}, - "NonZero-Broadcast"); + return std::make_shared(broadcastInput, broadcastTargetShape, ngraph::op::BroadcastType::BIDIRECTIONAL); } InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override { - // We emulate dynamic shape through the number of non-zeros in NonZero input tensor - const auto& broadcastParams = std::get<0>(GetParam()); - const auto numNonZeros = broadcastParams.targetShape.shape[getDynamicAxis( - broadcastParams.targetShape.upperBoundShape, - broadcastParams.targetShape.shape)]; + if (info.name() == m_parameterVector.front()->get_friendly_name()) { + // We emulate dynamic target shape through the number of non-zeros in NonZero input tensor + const auto &broadcastParams = std::get<0>(GetParam()); + const auto numNonZeros = broadcastParams.targetShape.shape[getDynamicAxis( + broadcastParams.targetShape.upperBoundShape, + broadcastParams.targetShape.shape)]; - auto tensorDesc = info.getTensorDesc(); - auto blob = make_blob_with_precision(tensorDesc); - blob->allocate(); - CommonTestUtils::fill_data_const(blob, 0); + auto tensorDesc = info.getTensorDesc(); + auto blob = make_blob_with_precision(tensorDesc); + blob->allocate(); + CommonTestUtils::fill_data_const(blob, 0); - InferenceEngine::SizeVector newDims = {numNonZeros}; - blob->getTensorDesc().setDims(newDims); - CommonTestUtils::fill_data_const(blob, 1); + InferenceEngine::SizeVector newDims = {numNonZeros}; + blob->getTensorDesc().setDims(newDims); + CommonTestUtils::fill_data_const(blob, 1); - blob->getTensorDesc().setDims(tensorDesc.getDims()); + blob->getTensorDesc().setDims(tensorDesc.getDims()); - return blob; - } + return blob; + } -protected: - std::shared_ptr m_broadcastInput; - std::shared_ptr m_broadcastTargetShape; - std::shared_ptr m_nonZero; - std::shared_ptr m_param; -}; - -TEST_P(NonZero_BroadcastBidirectional, CompareWithReference) { - Run(); -} - -std::vector broadcastBidirectionalTestParams = { - { {1, 1, 4}, DataShapeWithUpperBound{ {200, 2, 4}, {300, 2, 4} }, {} }, - { {15, 14}, DataShapeWithUpperBound{ {2, 16, 1, 14}, {2, 16, 15, 14} }, {} }, - { {15, 1}, DataShapeWithUpperBound{ {1, 16, 15, 14}, {2, 16, 15, 14} }, {} }, - { {2, 16, 15, 14}, DataShapeWithUpperBound{ {1, 15, 14}, {16, 15, 14} }, {} }, - { {2, 16, 15, 14}, DataShapeWithUpperBound{ {16, 1, 1}, {16, 1, 14}}, {} }, - { {16, 15, 1}, DataShapeWithUpperBound{ {2, 1, 15, 14}, {2, 16, 15, 14} }, {} }, -}; - -INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_BroadcastBidirectional, - ::testing::Combine( - ::testing::ValuesIn(broadcastBidirectionalTestParams), - ::testing::Values(ngraph::element::f16, ngraph::element::f32, ngraph::element::i32), - ::testing::Values(CommonTestUtils::DEVICE_MYRIAD))); - -using BroadcastExplicitTestParams = std::tuple< - BroadcastTestParams, TensorShape, TensorType, LayerTestsUtils::TargetDevice>; - -class NonZero_BroadcastExplicit : public NonZero_BroadcastBidirectional { -protected: - void SetUp() override { - prepareBroadcastInputs(); - - const auto& axesMapping = std::get<0>(GetParam()).axesMapping; - const auto axesMappingConst = ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{axesMapping.size()}, axesMapping); - - const auto broadcast = std::make_shared(m_broadcastInput, m_broadcastTargetShape, axesMappingConst); - - function = std::make_shared( - ngraph::NodeVector{broadcast, m_nonZero}, - ngraph::ParameterVector{m_param}, - "NonZero-Broadcast"); + return DSR_TestsCommon::GenerateInput(info); } }; -TEST_P(NonZero_BroadcastExplicit, CompareWithReference) { +TEST_P(NonZero_Broadcast, CompareWithReference) { Run(); } -std::vector broadcastExplicitTestParams = { - { {1}, DataShapeWithUpperBound{ {1, 800}, {1, 1000} }, {0} }, - { {4}, DataShapeWithUpperBound{ {100, 4}, {1000, 4} }, {1} }, - { {128, 256}, DataShapeWithUpperBound{ {1, 128, 256}, {3, 128, 256} }, {1, 2} }, +std::vector broadcastTestParams = { + { DataShapeWithUpperBound{ {1, 1, 4}, {} }, DataShapeWithUpperBound{ {200, 2, 4}, {300, 2, 4} }, {} }, + { DataShapeWithUpperBound{ {15, 14}, {} }, DataShapeWithUpperBound{ {2, 16, 1, 14}, {2, 16, 15, 14} }, {} }, + { DataShapeWithUpperBound{ {15, 1}, {} }, DataShapeWithUpperBound{ {1, 16, 15, 14}, {2, 16, 15, 14} }, {} }, + { DataShapeWithUpperBound{ {2, 16, 15, 14}, {} }, DataShapeWithUpperBound{ {1, 15, 14}, {16, 15, 14} }, {} }, + { DataShapeWithUpperBound{ {2, 16, 15, 14}, {} }, DataShapeWithUpperBound{ {16, 1, 1}, {16, 1, 14}}, {} }, + { DataShapeWithUpperBound{ {16, 15, 1}, {} }, DataShapeWithUpperBound{ {2, 1, 15, 14}, {2, 16, 15, 14} }, {} }, + { DataShapeWithUpperBound{ {142, 1, 1, 64}, {300, 1, 1, 64} }, DataShapeWithUpperBound { {142, 3, 64, 64}, {300, 3, 64, 64} }, {} }, + { DataShapeWithUpperBound{ {1}, {} }, DataShapeWithUpperBound{ {1, 800}, {1, 1000} }, {0} }, + { DataShapeWithUpperBound{ {4}, {} }, DataShapeWithUpperBound{ {100, 4}, {1000, 4} }, {1} }, + { DataShapeWithUpperBound{ {128, 256}, {} }, DataShapeWithUpperBound{ {1, 128, 256}, {3, 128, 256} }, {1, 2} }, }; -INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_BroadcastExplicit, +INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_Broadcast, ::testing::Combine( - ::testing::ValuesIn(broadcastExplicitTestParams), + ::testing::ValuesIn(broadcastTestParams), ::testing::Values(ngraph::element::f16, ngraph::element::f32, ngraph::element::i32), ::testing::Values(CommonTestUtils::DEVICE_MYRIAD))); - } // namespace