[IE][VPU]: Fixes BinaryEltwise DTS on empty input (#3879)

Makes DTS for BinaryEltwise produce empty output tensor in case if at least one input is empty. As criteria for empty tensor ReduceMin is used (assuming all shape's values are non-negative).

Tests are changed accordingly. Trying to add a new test case on inference with empty input, reference version failed, so those tests are left unchanged.
This commit is contained in:
Gladilov, Gleb
2021-02-08 10:28:57 +03:00
committed by GitHub
parent 132b47394c
commit 785828d321
4 changed files with 321 additions and 73 deletions

View File

@@ -6,14 +6,14 @@ include_guard(GLOBAL)
set(VPU_SUPPORTED_FIRMWARES usb-ma2x8x pcie-ma2x8x)
set(VPU_SUPPORTED_FIRMWARES_HASH
"7892e82f8ba90b487c4b115bfc266265d8ceb6f3cfc3e7e203ec6150d041fa2c"
"bec36fa7a8b64cd50df8b7782c594df32267c5081d7aa2e77a701dcfa18b3ec6")
"c21f14cf8ee215f5fccf6b50de87e413b4c1ed8331f0c8fddb6c4d5746d884d7"
"f2521913ee6a024cf07bc823c4ed88e265b1c369666f027279f51d6a89e9e7de")
#
# Default packages
#
set(FIRMWARE_PACKAGE_VERSION 1599)
set(FIRMWARE_PACKAGE_VERSION 1606)
set(VPU_CLC_MA2X8X_VERSION "movi-cltools-20.09.2")
#

View File

@@ -1,4 +1,4 @@
// Copyright (C) 2020 Intel Corporation
// Copyright (C) 2020-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
@@ -9,11 +9,10 @@
#include <vpu/utils/error.hpp>
#include "ngraph/graph_util.hpp"
#include "ngraph/opsets/opset3.hpp"
#include "ngraph/opsets/opset5.hpp"
#include <ngraph/ops.hpp>
#include <ngraph/opsets/opset6.hpp>
#include <memory>
#include <numeric>
namespace vpu {
@@ -44,13 +43,35 @@ void processBinaryEltwise(std::shared_ptr<ngraph::Node> eltwise, size_t lhsIndex
const auto diff = std::abs(lhsRank.get_length() - rhsRank.get_length());
if (diff) {
auto & broadcastInput = lhsRank.get_length() < rhsRank.get_length() ? lhsInput : rhsInput;
const auto broadcastConst = ngraph::opset3::Constant::create(broadcastInput.get_element_type(), {static_cast<size_t>(diff)}, {1});
broadcastInput = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, broadcastInput}, 0);
const auto broadcastConst = ngraph::opset6::Constant::create(broadcastInput.get_element_type(), {static_cast<size_t>(diff)}, {1});
broadcastInput = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, broadcastInput}, 0);
}
const auto shape = std::make_shared<ngraph::opset3::Maximum>(lhsInput, rhsInput);
const auto& lhsInputShape = lhsInput.get_partial_shape();
const auto& rhsInputShape = rhsInput.get_partial_shape();
auto outDSR = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(copied, shape);
VPU_THROW_UNLESS(lhsInputShape.is_static() && lhsInputShape.rank().get_length() == 1,
"DynamicToStaticShape transformation for {} of type {} expects lhs input shape to be static 1D vector, actual is {}",
eltwise->get_friendly_name(), eltwise->get_type_info(), lhsInputShape);
VPU_THROW_UNLESS(rhsInputShape.is_static() && rhsInputShape.rank().get_length() == 1,
"DynamicToStaticShape transformation for {} of type {} expects rhs input shape to be static 1D vector, actual is {}",
eltwise->get_friendly_name(), eltwise->get_type_info(), rhsInputShape);
std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(lhsInput, rhsInput);
const auto& updateOutputShapeOnZerosFrom = [&maxShape, &shapeElementType](const ngraph::Output<ngraph::Node>& input) {
const auto& shapeValue = input.get_partial_shape();
const auto& rank = ngraph::shape_size(shapeValue.to_shape());
const auto& zeros = ngraph::opset6::Constant::create(shapeElementType, {rank}, std::vector<std::int64_t>(rank, 0));
const auto& isZero = std::make_shared<ngraph::opset6::Equal>(input, zeros);
maxShape = std::make_shared<ngraph::opset6::Select>(isZero, zeros, maxShape);
};
updateOutputShapeOnZerosFrom(lhsInput);
updateOutputShapeOnZerosFrom(rhsInput);
auto outDSR = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(copied, maxShape);
outDSR->set_friendly_name(eltwise->get_friendly_name());
ngraph::replace_node(std::move(eltwise), std::move(outDSR));
}
@@ -58,7 +79,7 @@ void processBinaryEltwise(std::shared_ptr<ngraph::Node> eltwise, size_t lhsIndex
} // namespace
void dynamicToStaticShapeBinaryEltwise(std::shared_ptr<ngraph::Node> eltwise) {
if (eltwise->get_type_info() == ngraph::opset5::Select::type_info) {
if (eltwise->get_type_info() == ngraph::opset6::Select::type_info) {
processBinaryEltwise(eltwise, 1, 2);
} else {
VPU_THROW_UNLESS(eltwise->get_input_size() == 2,

View File

@@ -1,9 +1,8 @@
// Copyright (C) 2020 Intel Corporation
// Copyright (C) 2020-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <ngraph/opsets/opset3.hpp>
#include <ngraph/opsets/opset5.hpp>
#include <ngraph/opsets/opset6.hpp>
#include <ngraph/shape.hpp>
#include <ngraph/type/element_type.hpp>
@@ -15,6 +14,7 @@
#include <vpu/utils/error.hpp>
#include <ngraph_functions/utils/ngraph_helpers.hpp>
#include <numeric>
namespace {
@@ -52,17 +52,17 @@ protected:
const ngraph::Shape& dataDims0,
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) const {
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);
const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);
ngraph::ParameterVector params{input0, input1, input0Dims};
std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
const auto input1Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64,
const auto input1Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64,
ngraph::Shape{dataDims1.size()});
eltwiseInput1 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input1, input1Dims);
params.push_back(input1Dims);
@@ -92,20 +92,20 @@ public:
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);
const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);
ngraph::ParameterVector params{input0, input1, input0Dims};
std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes:: ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}
std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
@@ -116,8 +116,11 @@ public:
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);
// Shape infer subgraph
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(input0Dims, dims);
const auto dsr_final = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(input0Dims, dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, input0Dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, dims);
const auto dsr_final = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);
const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsr_final},
@@ -135,20 +138,20 @@ public:
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);
const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);
ngraph::ParameterVector params{input0, input1, input0Dims};
std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}
std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
@@ -159,10 +162,14 @@ public:
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);
// Shape infer subgraph
const auto broadcastConst = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size() - dataDims0.size()}, {1});
const auto concat = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, input0Dims}, 0);
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(concat, dims);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
const auto broadcastConst = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size() - dataDims0.size()}, {1});
const auto concat = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, input0Dims}, 0);
std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(concat, dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, concat);
maxShape = updateOutputShapeOnZerosFrom(maxShape, dims);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);
const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrFinal},
@@ -180,20 +187,20 @@ public:
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);
const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);
ngraph::ParameterVector params{input0, input1, input0Dims};
std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}
std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
@@ -204,10 +211,14 @@ public:
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);
// Shape infer subgraph
const auto broadcastConst = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims0.size() - dataDims1.size()}, {1});
const auto concat = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, dims}, 0);
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(input0Dims, concat);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
const auto broadcastConst = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims0.size() - dataDims1.size()}, {1});
const auto concat = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, dims}, 0);
std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(input0Dims, concat);
maxShape = updateOutputShapeOnZerosFrom(maxShape, input0Dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, concat);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);
const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrFinal},
@@ -224,13 +235,13 @@ private:
const ngraph::OutputVector& inputs,
ngraph::ParameterVector& params,
TestShapeTypes testShapeTypes) {
if (eltwiseType == ngraph::opset5::Select::type_info) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(
if (eltwiseType == ngraph::opset6::Select::type_info) {
params.push_back(std::make_shared<ngraph::opset6::Parameter>(
ngraph::element::boolean,
ngraph::Shape{inputs.front().get_shape()}));
std::shared_ptr<ngraph::Node> condInput = params.back();
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(
params.push_back(std::make_shared<ngraph::opset6::Parameter>(
ngraph::element::i64,
ngraph::Shape{static_cast<size_t>(inputs.front().get_partial_shape().rank().get_length())}));
condInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(condInput, params.back());
@@ -240,6 +251,16 @@ private:
return ngraph::helpers::getNodeSharedPtr(eltwiseType, inputs);
}
}
static std::shared_ptr<ngraph::Node> updateOutputShapeOnZerosFrom(
const std::shared_ptr<ngraph::Node>& outputShape, const ngraph::Output<ngraph::Node>& inputShape) {
const auto& shapeValue = inputShape.get_partial_shape();
const auto& rank = ngraph::shape_size(shapeValue.to_shape());
const auto& zeros = ngraph::opset6::Constant::create(ngraph::element::i64, {rank}, std::vector<std::int64_t>(rank, 0));
const auto& isZero = std::make_shared<ngraph::opset6::Equal>(inputShape, zeros);
return std::make_shared<ngraph::opset6::Select>(isZero, zeros, outputShape);
}
};
TEST_P(DynamicToStaticShapeEltwise, CompareFunctions) {
@@ -253,17 +274,17 @@ INSTANTIATE_TEST_CASE_P(smoke_EltwiseBroadcast, DynamicToStaticShapeEltwise, tes
ngraph::element::i64,
ngraph::element::u8),
testing::Values(
ngraph::opset3::Add::type_info,
ngraph::opset3::Divide::type_info,
ngraph::opset3::Equal::type_info,
ngraph::opset3::Greater::type_info,
ngraph::opset3::Power::type_info,
ngraph::opset3::Multiply::type_info,
ngraph::opset3::Subtract::type_info,
ngraph::opset3::Maximum::type_info,
ngraph::opset3::Minimum::type_info,
ngraph::opset3::Less::type_info,
ngraph::opset5::Select::type_info),
ngraph::opset6::Add::type_info,
ngraph::opset6::Divide::type_info,
ngraph::opset6::Equal::type_info,
ngraph::opset6::Greater::type_info,
ngraph::opset6::Power::type_info,
ngraph::opset6::Multiply::type_info,
ngraph::opset6::Subtract::type_info,
ngraph::opset6::Maximum::type_info,
ngraph::opset6::Minimum::type_info,
ngraph::opset6::Less::type_info,
ngraph::opset6::Select::type_info),
testing::Values(
EltwiseParams{DataDims{1000}, DataDims{1}, DynamicToStaticShapeEltwise::reference_simple},
EltwiseParams{DataDims{1000, 1, 1}, DataDims{1000, 1, 1}, DynamicToStaticShapeEltwise::reference_simple},
@@ -272,4 +293,4 @@ INSTANTIATE_TEST_CASE_P(smoke_EltwiseBroadcast, DynamicToStaticShapeEltwise, tes
testing::Values(TestShapeTypes::ALL_DYNAMIC, TestShapeTypes::SINGLE_DSR)
));
} // namespace
} // namespace

View File

@@ -1,4 +1,4 @@
// Copyright (C) 2020 Intel Corporation
// Copyright (C) 2020-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
@@ -8,6 +8,8 @@
#include <ngraph_functions/builders.hpp>
#include <vpu/ngraph/operations/dynamic_shape_resolver.hpp>
#include <ngraph/opsets/opset6.hpp>
namespace {
using namespace LayerTestsUtils::vpu;
@@ -37,7 +39,7 @@ protected:
const auto inputSubgraph0 = createInputSubgraphWithDSR(inDataType, inDataShapes.lhs);
const auto inputSubgraph1 = createInputSubgraphWithDSR(inDataType, inDataShapes.rhs);
const auto eltwise = eltwiseType == ngraph::opset5::Select::type_info ?
const auto eltwise = eltwiseType == ngraph::opset6::Select::type_info ?
ngraph::helpers::getNodeSharedPtr(eltwiseType, {createInputSubgraphWithDSR(
ngraph::element::boolean, inDataShapes.lhs), inputSubgraph0, inputSubgraph1}) :
ngraph::helpers::getNodeSharedPtr(eltwiseType, {inputSubgraph0, inputSubgraph1});
@@ -59,7 +61,7 @@ protected:
const auto inputSubgraph0 = createInputSubgraphWithDSR(inDataType, inDataShapes.lhs);
const auto input1 = createParameter(inDataType, inDataShapes.rhs.shape);
const auto eltwise = eltwiseType == ngraph::opset5::Select::type_info ?
const auto eltwise = eltwiseType == ngraph::opset6::Select::type_info ?
ngraph::helpers::getNodeSharedPtr(eltwiseType, {createParameter(
ngraph::element::boolean, inDataShapes.rhs.shape), inputSubgraph0, input1}) :
ngraph::helpers::getNodeSharedPtr(eltwiseType, {inputSubgraph0, input1});
@@ -69,20 +71,20 @@ protected:
};
static const std::vector<ngraph::NodeTypeInfo> binaryEltwiseTypeVector = {
ngraph::opset3::Add::type_info,
ngraph::opset3::Multiply::type_info,
ngraph::opset3::Divide::type_info,
ngraph::opset3::Subtract::type_info,
ngraph::opset3::Equal::type_info,
ngraph::opset3::Greater::type_info,
ngraph::opset3::Power::type_info,
ngraph::opset5::Select::type_info,
ngraph::opset6::Add::type_info,
ngraph::opset6::Multiply::type_info,
ngraph::opset6::Divide::type_info,
ngraph::opset6::Subtract::type_info,
ngraph::opset6::Equal::type_info,
ngraph::opset6::Greater::type_info,
ngraph::opset6::Power::type_info,
ngraph::opset6::Select::type_info,
};
static const std::set<ngraph::NodeTypeInfo> doNotSupportI32 = {
ngraph::opset3::Power::type_info,
ngraph::opset3::Equal::type_info,
ngraph::opset3::Greater::type_info,
ngraph::opset6::Power::type_info,
ngraph::opset6::Equal::type_info,
ngraph::opset6::Greater::type_info,
};
TEST_P(DSR_BinaryElementwiseBothDSR, CompareWithReference) {
@@ -135,4 +137,208 @@ INSTANTIATE_TEST_CASE_P(smoke_DynamicBinaryElementwiseSingleDSR, DSR_BinaryEleme
::testing::ValuesIn(binaryEltwiseTypeVector),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)));
class DSR_BinaryElementwiseBothDSRCheckOutputShape : public DSR_BinaryElementwiseBothDSR {
protected:
void Validate() override {
const auto& actualOutputs = GetOutputs();
ASSERT_EQ(actualOutputs.size(), 1);
const auto& output = actualOutputs.front();
const auto& outputShape = output->getTensorDesc().getDims();
const auto& parameters = GetParam();
const auto& inputShapes = std::get<1>(parameters);
const auto& lhsShape = inputShapes.lhs.shape;
const auto& rhsShape = inputShapes.rhs.shape;
auto broadcastedPartialShape = ngraph::PartialShape{lhsShape};
ngraph::PartialShape::broadcast_merge_into(broadcastedPartialShape, ngraph::PartialShape{rhsShape}, ngraph::op::AutoBroadcastSpec::NUMPY);
const auto& broadcasted = broadcastedPartialShape.to_shape();
ASSERT_EQ(broadcasted, outputShape);
const auto& refTotal = ngraph::shape_size(broadcasted);
const auto& actTotal = std::accumulate(outputShape.cbegin(), outputShape.cend(), static_cast<std::size_t>(1), std::multiplies<std::size_t>());
ASSERT_EQ(refTotal, 0);
ASSERT_EQ(actTotal, 0);
}
};
TEST_P(DSR_BinaryElementwiseBothDSRCheckOutputShape, CheckOutputShape) {
const auto& inDataType = std::get<0>(GetParam());
const auto& eltwiseType = std::get<2>(GetParam());
if (doNotSupportI32.count(eltwiseType) && inDataType == ngraph::element::i32) {
SKIP() << eltwiseType.name << " doesn't support int32_t inputs" << std::endl;
}
Run();
}
std::vector<BinaryEltwiseShapes> dataShapesWithUpperBoundBothDSREmpty = {
{
DataShapeWithUpperBound{DataShape{0}, DataShape{1}},
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
},
{
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
DataShapeWithUpperBound{DataShape{0}, DataShape{1}},
},
{
DataShapeWithUpperBound{DataShape{0}, DataShape{1}},
DataShapeWithUpperBound{DataShape{0}, DataShape{1}},
},
{
DataShapeWithUpperBound{DataShape{0, 2}, DataShape{1, 3}},
DataShapeWithUpperBound{DataShape{1}, DataShape{3}},
},
{
DataShapeWithUpperBound{DataShape{2, 0}, DataShape{3, 2}},
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
},
{
DataShapeWithUpperBound{DataShape{0, 0}, DataShape{1, 2}},
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
},
{
DataShapeWithUpperBound{DataShape{1}, DataShape{3}},
DataShapeWithUpperBound{DataShape{0, 2}, DataShape{1, 3}},
},
{
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
DataShapeWithUpperBound{DataShape{2, 0}, DataShape{3, 2}},
},
{
DataShapeWithUpperBound{DataShape{1}, DataShape{2}},
DataShapeWithUpperBound{DataShape{0, 0}, DataShape{1, 2}},
},
{
DataShapeWithUpperBound{DataShape{0, 2}, DataShape{1, 3}},
DataShapeWithUpperBound{DataShape{0, 2}, DataShape{1, 3}},
},
{
DataShapeWithUpperBound{DataShape{2, 0}, DataShape{3, 1}},
DataShapeWithUpperBound{DataShape{2, 0}, DataShape{3, 1}},
},
{
DataShapeWithUpperBound{DataShape{0, 0}, DataShape{1, 1}},
DataShapeWithUpperBound{DataShape{0, 0}, DataShape{1, 1}},
},
{
DataShapeWithUpperBound{DataShape{0, 2, 3}, DataShape{1, 3, 4}},
DataShapeWithUpperBound{DataShape{2, 3}, DataShape{3, 4}},
},
{
DataShapeWithUpperBound{DataShape{4, 0, 3}, DataShape{5, 2, 4}},
DataShapeWithUpperBound{DataShape{1, 3}, DataShape{2, 4}},
},
{
DataShapeWithUpperBound{DataShape{4, 5, 0}, DataShape{5, 6, 2}},
DataShapeWithUpperBound{DataShape{5, 1}, DataShape{6, 2}},
},
{
DataShapeWithUpperBound{DataShape{2, 3}, DataShape{3, 4}},
DataShapeWithUpperBound{DataShape{0, 2, 3}, DataShape{1, 3, 4}},
},
{
DataShapeWithUpperBound{DataShape{1, 3}, DataShape{2, 4}},
DataShapeWithUpperBound{DataShape{4, 0, 3}, DataShape{5, 2, 4}},
},
{
DataShapeWithUpperBound{DataShape{5, 1}, DataShape{6, 2}},
DataShapeWithUpperBound{DataShape{4, 5, 0}, DataShape{5, 6, 2}},
},
{
DataShapeWithUpperBound{DataShape{0, 0}, DataShape{1, 1}},
DataShapeWithUpperBound{DataShape{0, 0, 0}, DataShape{1, 1, 1}},
},
{
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
DataShapeWithUpperBound{DataShape{7, 5, 6}, DataShape{8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 5, 6}, DataShape{8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
DataShapeWithUpperBound{DataShape{7, 1, 6}, DataShape{8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
DataShapeWithUpperBound{DataShape{7, 5, 1}, DataShape{8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 1, 1}, DataShape{8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 1, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 5, 6}, DataShape{1, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 1, 6}, DataShape{2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 5, 1}, DataShape{2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
DataShapeWithUpperBound{DataShape{1, 1, 1}, DataShape{2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{7, 5, 6}, DataShape{8, 6, 7}},
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 5, 6}, DataShape{8, 6, 7}},
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{7, 1, 6}, DataShape{8, 6, 7}},
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{7, 5, 1}, DataShape{8, 6, 7}},
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 1, 1}, DataShape{8, 6, 7}},
DataShapeWithUpperBound{DataShape{0, 7, 5, 6}, DataShape{1, 8, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 5, 6}, DataShape{2, 6, 7}},
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 1, 6}, DataShape{2, 6, 7}},
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 5, 1}, DataShape{2, 6, 7}},
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{1, 1, 1}, DataShape{2, 6, 7}},
DataShapeWithUpperBound{DataShape{8, 0, 5, 6}, DataShape{9, 2, 6, 7}},
},
{
DataShapeWithUpperBound{DataShape{2, 3, 1, 0, 1, 0}, DataShape{3, 4, 5, 2, 2, 1}},
DataShapeWithUpperBound{DataShape{2, 1, 4, 1, 0, 0}, DataShape{3, 4, 5, 2, 2, 1}},
},
};
INSTANTIATE_TEST_CASE_P(smoke_BinaryElementwiseBothDSRCheckOutputShape, DSR_BinaryElementwiseBothDSRCheckOutputShape,
::testing::Combine(
::testing::Values(ngraph::element::f16, ngraph::element::f32, ngraph::element::i32),
::testing::ValuesIn(dataShapesWithUpperBoundBothDSREmpty),
::testing::ValuesIn(binaryEltwiseTypeVector),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)));
} // namespace