[IE][VPU]: Support dynamic data in Broadcast DTS (#3548)

Ticket - #-44546
Changes:

* Support dynamic data as broadcast input in Broadcast DTS
* Update DTS tests to support both dynamic and static inputs
* Update inference tests:
  a) Refactor tests to have only one testing class - NonZero_Broadcast
  b) Make DSR_TestsCommon base class to reuse createInputSubgraphWithDSR and inputs generating utils.
  c) Add possibility to add additional results in DSR_TestsCommon, because NonZero doesn't support cases when both its outputs are unused, so we need to add at least one of them to function results.
This commit is contained in:
Andrew Bakalin 2020-12-11 12:29:16 +03:00 committed by GitHub
parent b6f311b463
commit 578ea2fc3c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 190 additions and 151 deletions

View File

@ -45,23 +45,19 @@ void dynamicToStaticShapeBroadcast(std::shared_ptr<ngraph::Node> target) {
std::shared_ptr<ngraph::Node> dsr;
if (broadcast->get_broadcast_spec() == ngraph::op::BroadcastType::BIDIRECTIONAL) {
const auto inputShape = broadcast->get_input_shape(0);
const auto dataDSR = ngraph::as_type_ptr<ngraph::vpu::op::DynamicShapeResolver>(broadcast->input_value(0).get_node_shared_ptr());
const auto shapeElementType = dataDSR ? dataDSR->get_input_element_type(1) : broadcast->get_input_element_type(1);
const auto dataShape = dataDSR ? dataDSR->input_value(1) : shapeToConstant(shapeElementType, broadcast->get_input_shape(0));
const auto targetShape = broadcast->input_value(1).get_node_shared_ptr();
const auto shapeType = targetShape->get_element_type();
const auto inputShapeDimsCount = inputShape.size();
const auto dataShapeDimsCount = ngraph::shape_size(dataShape.get_shape());
const auto targetShapeDimsCount = ngraph::shape_size(broadcast->get_input_partial_shape(1).get_shape());
const auto inputShapeConst = std::make_shared<ngraph::opset5::Constant>(
shapeType,
ngraph::Shape{static_cast<size_t>(inputShapeDimsCount)},
inputShape);
const auto minRank = std::min(inputShapeDimsCount, targetShapeDimsCount);
const auto maxRank = std::max(inputShapeDimsCount, targetShapeDimsCount);
const auto minRankNode = minRank == inputShapeDimsCount ? inputShapeConst : targetShape;
const auto maxRankNode = minRank == inputShapeDimsCount ? targetShape : inputShapeConst;
const auto minRank = std::min(dataShapeDimsCount, targetShapeDimsCount);
const auto maxRank = std::max(dataShapeDimsCount, targetShapeDimsCount);
const auto minRankNode = minRank == dataShapeDimsCount ? dataShape : targetShape;
const auto maxRankNode = minRank == dataShapeDimsCount ? targetShape : dataShape;
ngraph::NodeVector dims;
@ -69,19 +65,19 @@ void dynamicToStaticShapeBroadcast(std::shared_ptr<ngraph::Node> target) {
dims.push_back(
std::make_shared<ngraph::opset5::Gather>(
maxRankNode,
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {i}),
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0})));
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {i}),
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0})));
}
for (int i = 0; i < minRank; i++) {
const auto minRankDim = std::make_shared<ngraph::opset5::Gather>(
minRankNode,
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {i}),
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0}));
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {i}),
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0}));
const auto maxRankDim = std::make_shared<ngraph::opset5::Gather>(
maxRankNode,
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {maxRank - minRank + i}),
ngraph::opset5::Constant::create(shapeType, ngraph::Shape{1}, {0}));
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {maxRank - minRank + i}),
ngraph::opset5::Constant::create(shapeElementType, ngraph::Shape{1}, {0}));
dims.push_back(std::make_shared<ngraph::opset5::Maximum>(minRankDim, maxRankDim));
}

View File

@ -6,6 +6,7 @@
#include "vpu/ngraph/transformations/dynamic_to_static_shape.hpp"
#include "vpu/ngraph/operations/static_shape_broadcast.hpp"
#include "vpu/ngraph/operations/dynamic_shape_resolver.hpp"
#include "vpu/ngraph/utilities.hpp"
#include <ngraph_functions/utils/ngraph_helpers.hpp>
#include <ngraph/function.hpp>
@ -26,12 +27,22 @@ using TensorType = ngraph::element::Type;
using TensorShape = ngraph::PartialShape;
using AxesMapping = std::vector<size_t>;
enum class BroadcastInputType {
DYNAMIC,
STATIC
};
struct BroadcastShapes {
TensorShape srcShape;
TensorShape targetShape;
AxesMapping axesMapping;
};
using BroadcastTestParams = std::tuple<TensorType, BroadcastShapes>;
using BroadcastTestParams = std::tuple<
TensorType,
TensorType,
BroadcastShapes,
BroadcastInputType>;
class DynamicToStaticShapeBroadcastExplicitTests
: public CommonTestUtils::TestsCommon,
@ -40,37 +51,52 @@ public:
void SetUp() override {
const auto& parameters = GetParam();
const auto& tensorType = std::get<0>(parameters);
const auto& tensorShape = std::get<1>(parameters).srcShape;
const auto& targetShape = std::get<1>(parameters).targetShape;
const auto& axesMapping = std::get<1>(parameters).axesMapping;
const auto& shapeType = std::get<1>(parameters);
const auto& tensorShape = std::get<2>(parameters).srcShape;
const auto& targetShape = std::get<2>(parameters).targetShape;
const auto& axesMapping = std::get<2>(parameters).axesMapping;
const auto& broadcastInputType = std::get<3>(parameters);
ngraph::helpers::CompareFunctions(
*transform(tensorType, tensorShape, targetShape, axesMapping),
*reference(tensorType, tensorShape, targetShape, axesMapping));
*transform(tensorType, shapeType, tensorShape, targetShape, axesMapping, broadcastInputType),
*reference(tensorType, shapeType, tensorShape, targetShape, axesMapping, broadcastInputType));
}
protected:
std::shared_ptr<const ngraph::Function> transform(
const TensorType& tensorType,
const TensorType& shapeType,
const TensorShape& tensorShape,
const TensorShape& targetShape,
const AxesMapping& axesMapping) const {
const AxesMapping& axesMapping,
BroadcastInputType broadcastInputType) const {
const auto tensorParam = std::make_shared<ngraph::opset3::Parameter>(tensorType, tensorShape);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset3::Parameter>(tensorType, targetShape);
const auto shapeOfNode = std::make_shared<ngraph::opset3::ShapeOf>(tensorWithTargetShapeParam);
const auto shapeOfNode = std::make_shared<ngraph::opset3::ShapeOf>(tensorWithTargetShapeParam, shapeType);
shapeOfNode->set_is_foldable(false);
ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam};
std::shared_ptr<ngraph::Node> broadcastInput = tensorParam;
if (broadcastInputType == BroadcastInputType::DYNAMIC) {
const auto shapeParam = std::make_shared<ngraph::opset5::Parameter>(
shapeType,
ngraph::Shape{static_cast<size_t>(tensorShape.rank().get_length())});
params.push_back(shapeParam);
broadcastInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(tensorParam, shapeParam);
}
const auto axesMappingConstant = std::make_shared<ngraph::opset3::Constant>(
ngraph::element::u64,
ngraph::Shape{axesMapping.size()},
axesMapping);
const auto broadcast = std::make_shared<ngraph::opset3::Broadcast>(tensorParam, shapeOfNode, axesMappingConstant);
const auto broadcast = std::make_shared<ngraph::opset3::Broadcast>(broadcastInput, shapeOfNode, axesMappingConstant);
auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{broadcast},
ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam},
params,
"Actual");
// We need to set broadcast output shape to make its rank static.
@ -87,24 +113,37 @@ protected:
std::shared_ptr<const ngraph::Function> reference(
const TensorType& tensorType,
const TensorType& shapeType,
const TensorShape& tensorShape,
const TensorShape& targetShape,
const AxesMapping& axesMapping) const {
const AxesMapping& axesMapping,
BroadcastInputType broadcastInputType) const {
const auto tensorParam = std::make_shared<ngraph::opset3::Parameter>(tensorType, tensorShape);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset3::Parameter>(tensorType, targetShape);
const auto shapeOf = std::make_shared<ngraph::opset3::ShapeOf>(tensorWithTargetShapeParam);
const auto shapeOf = std::make_shared<ngraph::opset3::ShapeOf>(tensorWithTargetShapeParam, shapeType);
ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam};
std::shared_ptr<ngraph::Node> broadcastInput = tensorParam;
if (broadcastInputType == BroadcastInputType::DYNAMIC) {
const auto shapeParam = std::make_shared<ngraph::opset5::Parameter>(
shapeType,
ngraph::Shape{static_cast<size_t>(tensorShape.rank().get_length())});
params.push_back(shapeParam);
broadcastInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(tensorParam, shapeParam);
}
const auto axesMappingConstant = std::make_shared<ngraph::opset3::Constant>(
ngraph::element::u64,
ngraph::element::i64,
ngraph::Shape{axesMapping.size()},
axesMapping);
const auto staticShapeBroadcast = std::make_shared<ngraph::vpu::op::StaticShapeBroadcast>(tensorParam, shapeOf, axesMappingConstant);
const auto staticShapeBroadcast = std::make_shared<ngraph::vpu::op::StaticShapeBroadcast>(broadcastInput, shapeOf, axesMappingConstant);
const auto dsrOut = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(staticShapeBroadcast, shapeOf);
return std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrOut},
ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam},
params,
"Expected");
}
};
@ -119,9 +158,15 @@ INSTANTIATE_TEST_CASE_P(smoke_NGraph, DynamicToStaticShapeBroadcastExplicitTests
ngraph::element::i32,
ngraph::element::i64,
ngraph::element::u8),
testing::Values(
ngraph::element::i32,
ngraph::element::i64),
testing::Values(
BroadcastShapes{TensorShape{16}, TensorShape{1, 16, 50, 50}, AxesMapping{1}},
BroadcastShapes{TensorShape{50, 50}, TensorShape{1, 50, 50, 16}, AxesMapping{1, 2}})
BroadcastShapes{TensorShape{50, 50}, TensorShape{1, 50, 50, 16}, AxesMapping{1, 2}}),
testing::Values(
BroadcastInputType::DYNAMIC,
BroadcastInputType::STATIC)
));
@ -131,30 +176,45 @@ public:
void SetUp() override {
const auto& parameters = GetParam();
const auto& tensorType = std::get<0>(parameters);
const auto& tensorShape = std::get<1>(parameters).srcShape;
const auto& targetShape = std::get<1>(parameters).targetShape;
const auto& shapeType = std::get<1>(parameters);
const auto& tensorShape = std::get<2>(parameters).srcShape;
const auto& targetShape = std::get<2>(parameters).targetShape;
const auto& broadcastInputType = std::get<3>(parameters);
ngraph::helpers::CompareFunctions(
*transform(tensorType, tensorShape, targetShape),
*reference(tensorType, tensorShape, targetShape));
*transform(tensorType, shapeType, tensorShape, targetShape, broadcastInputType),
*reference(tensorType, shapeType, tensorShape, targetShape, broadcastInputType));
}
protected:
std::shared_ptr<const ngraph::Function> transform(
const TensorType& tensorType,
const TensorType& shapeType,
const TensorShape& tensorShape,
const TensorShape& targetShape) const {
const TensorShape& targetShape,
BroadcastInputType broadcastInputType) const {
const auto tensorParam = std::make_shared<ngraph::opset5::Parameter>(tensorType, tensorShape);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset5::Parameter>(tensorType, targetShape);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset5::Parameter>(shapeType, targetShape);
const auto shapeOfNode = std::make_shared<ngraph::opset5::ShapeOf>(tensorWithTargetShapeParam);
const auto shapeOfNode = std::make_shared<ngraph::opset5::ShapeOf>(tensorWithTargetShapeParam, shapeType);
shapeOfNode->set_is_foldable(false);
const auto broadcast = std::make_shared<ngraph::opset5::Broadcast>(tensorParam, shapeOfNode, ngraph::op::BroadcastType::BIDIRECTIONAL);
ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam};
std::shared_ptr<ngraph::Node> broadcastInput = tensorParam;
if (broadcastInputType == BroadcastInputType::DYNAMIC) {
const auto shapeParam = std::make_shared<ngraph::opset5::Parameter>(
shapeType,
ngraph::Shape{static_cast<size_t>(tensorShape.rank().get_length())});
params.push_back(shapeParam);
broadcastInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(tensorParam, shapeParam);
}
const auto broadcast = std::make_shared<ngraph::opset5::Broadcast>(broadcastInput, shapeOfNode, ngraph::op::BroadcastType::BIDIRECTIONAL);
auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{broadcast},
ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam},
params,
"Actual");
const auto transformations = vpu::Transformations{{ngraph::opset5::Broadcast::type_info, vpu::dynamicToStaticShapeBroadcast}};
@ -164,29 +224,41 @@ protected:
std::shared_ptr<const ngraph::Function> reference(
const TensorType& tensorType,
const TensorType& shapeType,
const TensorShape& tensorShape,
const TensorShape& targetShape) const {
const TensorShape& targetShape,
BroadcastInputType broadcastInputType) const {
const auto tensorParam = std::make_shared<ngraph::opset5::Parameter>(tensorType, tensorShape);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset5::Parameter>(tensorType, targetShape);
std::shared_ptr<ngraph::Node> shapeOf = std::make_shared<ngraph::opset5::ShapeOf>(tensorWithTargetShapeParam);
const auto tensorWithTargetShapeParam = std::make_shared<ngraph::opset5::Parameter>(shapeType, targetShape);
std::shared_ptr<ngraph::Node> shapeOf = std::make_shared<ngraph::opset5::ShapeOf>(tensorWithTargetShapeParam, shapeType);
ngraph::ParameterVector params{tensorParam, tensorWithTargetShapeParam};
std::shared_ptr<ngraph::Node> broadcastInput = tensorParam;
if (broadcastInputType == BroadcastInputType::DYNAMIC) {
const auto shapeParam = std::make_shared<ngraph::opset5::Parameter>(
shapeType,
ngraph::Shape{static_cast<size_t>(tensorShape.rank().get_length())});
params.push_back(shapeParam);
broadcastInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(tensorParam, shapeParam);
}
const auto staticShapeBroadcast = std::make_shared<ngraph::vpu::op::StaticShapeBroadcast>(
tensorParam,
broadcastInput,
shapeOf,
ngraph::op::BroadcastType::BIDIRECTIONAL);
const auto tensorShapeDimsCount = tensorShape.rank().get_length();
const auto targetShapeDimsCount = targetShape.rank().get_length();
std::shared_ptr<ngraph::Node> tensorShapeConst = std::make_shared<ngraph::opset5::Constant>(
ngraph::element::i64,
ngraph::Shape{static_cast<size_t>(tensorShapeDimsCount)},
tensorShape.get_shape());
const auto tensorShapeNode = broadcastInputType == BroadcastInputType::DYNAMIC ?
staticShapeBroadcast->input_value(0).get_node_shared_ptr()->input_value(1) :
vpu::shapeToConstant(shapeType, tensorShape.get_shape());
const auto maxRankNode = tensorShapeDimsCount > targetShapeDimsCount ? tensorShapeConst : shapeOf;
const auto minRankNode = maxRankNode == tensorShapeConst ? shapeOf : tensorShapeConst;
const auto maxRank = maxRankNode == tensorShapeConst ? tensorShapeDimsCount : targetShapeDimsCount;
const auto minRank = minRankNode == tensorShapeConst ? tensorShapeDimsCount : targetShapeDimsCount;
const auto maxRankNode = tensorShapeDimsCount > targetShapeDimsCount ? tensorShapeNode : shapeOf;
const auto minRankNode = maxRankNode == tensorShapeNode ? shapeOf : tensorShapeNode;
const auto maxRank = maxRankNode == tensorShapeNode ? tensorShapeDimsCount : targetShapeDimsCount;
const auto minRank = minRankNode == tensorShapeNode ? tensorShapeDimsCount : targetShapeDimsCount;
ngraph::NodeVector dims;
@ -216,7 +288,7 @@ protected:
staticShapeBroadcast->output(0), outShape);
return std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrOut},
ngraph::ParameterVector{tensorParam, tensorWithTargetShapeParam},
params,
"Expected");
}
};
@ -231,14 +303,19 @@ INSTANTIATE_TEST_CASE_P(smoke_NGraph, DynamicToStaticShapeBroadcastBidirectional
ngraph::element::i32,
ngraph::element::i64,
ngraph::element::u8),
testing::Values(
ngraph::element::i32,
ngraph::element::i64),
testing::Values(
BroadcastShapes{TensorShape{1, 1, 4}, TensorShape{300, 2, 4}, {}},
BroadcastShapes{TensorShape{15, 1}, TensorShape{2, 16, 15, 14}, {}},
BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{15, 14}, {}},
BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{16, 1, 1}, {}},
BroadcastShapes{TensorShape{2, 16, 15, 14}, TensorShape{16, 1, 14}, {}},
BroadcastShapes{TensorShape{16, 15, 1}, TensorShape{2, 1, 15, 14}, {}})
BroadcastShapes{TensorShape{16, 15, 1}, TensorShape{2, 1, 15, 14}, {}}),
testing::Values(
BroadcastInputType::DYNAMIC,
BroadcastInputType::STATIC)
));
} // namespace

View File

@ -29,6 +29,7 @@ class DSR_TestsCommon : virtual public LayerTestsUtils::LayerTestsCommon {
protected:
std::unordered_map<std::string, DataShape> m_shapes;
ngraph::ParameterVector m_parameterVector;
ngraph::ResultVector m_additionalResults;
std::shared_ptr<ngraph::opset3::Parameter> createParameter(
const ngraph::element::Type& element_type,
@ -69,6 +70,7 @@ protected:
for (const auto& output : testedOp->outputs()) {
results.emplace_back(std::make_shared<ngraph::opset3::Result>(output));
}
results.insert(results.end(), m_additionalResults.begin(), m_additionalResults.end());
function = std::make_shared<ngraph::Function>(
results,

View File

@ -15,7 +15,7 @@ using TensorType = ngraph::element::Type;
using TensorShape = ngraph::Shape;
struct BroadcastInputParams {
TensorShape inputShape;
DataShapeWithUpperBound inputShape;
DataShapeWithUpperBound targetShape;
InferenceEngine::SizeVector axesMapping;
};
@ -24,8 +24,8 @@ using BroadcastTestParams = std::tuple<
BroadcastInputParams, TensorType, LayerTestsUtils::TargetDevice>;
class NonZero_BroadcastBidirectional : public testing::WithParamInterface<BroadcastTestParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
class NonZero_Broadcast : public testing::WithParamInterface<BroadcastTestParams>,
public DSR_TestsCommon {
protected:
size_t getDynamicAxis(const DataShape& shapeA, const DataShape& shapeB) const {
size_t res = 0;
@ -35,9 +35,7 @@ protected:
return res;
}
void prepareBroadcastInputs() {
SetRefMode(LayerTestsUtils::RefMode::CONSTANT_FOLDING);
std::shared_ptr<ngraph::Node> createTestedOp() override {
const auto& parameters = GetParam();
const auto& broadcastParams = std::get<0>(parameters);
const auto& tensorType = std::get<1>(parameters);
@ -48,54 +46,56 @@ protected:
const auto dynamicAxis = getDynamicAxis(upperBoundShape, realShape);
m_param = std::make_shared<ngraph::opset5::Parameter>(tensorType, TensorShape{upperBoundShape[dynamicAxis]});
m_nonZero = std::make_shared<ngraph::opset5::NonZero>(m_param);
const auto shapeOfNonZero = std::make_shared<ngraph::opset5::ShapeOf>(m_nonZero);
const auto& nonZeroParam = createParameter(tensorType, TensorShape{upperBoundShape[dynamicAxis]});
const auto& nonZero = std::make_shared<ngraph::opset5::NonZero>(nonZeroParam, ngraph::element::i32);
m_additionalResults.push_back(std::make_shared<ngraph::opset3::Result>(nonZero->output(0)));
const auto shapeOfNonZero = std::make_shared<ngraph::opset5::ShapeOf>(nonZero, ngraph::element::i32);
const auto numNonZeros = std::make_shared<ngraph::opset5::Gather>(
shapeOfNonZero,
ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{1}, {1}),
ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{1}, {0}));
m_broadcastTargetShape = numNonZeros;
std::shared_ptr<ngraph::Node> broadcastTargetShape = numNonZeros;
if (dynamicAxis > 0) {
m_broadcastTargetShape = std::make_shared<ngraph::opset5::Concat>(
broadcastTargetShape = std::make_shared<ngraph::opset5::Concat>(
ngraph::NodeVector{
ngraph::opset5::Constant::create(
ngraph::element::i64,
ngraph::element::i32,
ngraph::Shape{dynamicAxis},
std::vector<size_t>{upperBoundShape.begin(), upperBoundShape.begin() + dynamicAxis}),
m_broadcastTargetShape},
broadcastTargetShape},
0);
}
if (dynamicAxis < upperBoundShape.size() - 1) {
m_broadcastTargetShape = std::make_shared<ngraph::opset5::Concat>(
broadcastTargetShape = std::make_shared<ngraph::opset5::Concat>(
ngraph::NodeVector{
m_broadcastTargetShape,
broadcastTargetShape,
ngraph::opset5::Constant::create(
ngraph::element::i64,
ngraph::element::i32,
ngraph::Shape{upperBoundShape.size() - dynamicAxis - 1},
std::vector<size_t>{upperBoundShape.begin() + dynamicAxis + 1, upperBoundShape.end()})},
0);
}
m_broadcastInput = ngraph::builder::makeConstant(tensorType, ngraph::Shape{broadcastParams.inputShape}, std::vector<int64_t>{}, true);
const auto& broadcastInput = broadcastParams.inputShape.upperBoundShape.size() ?
createInputSubgraphWithDSR(tensorType, broadcastParams.inputShape) :
ngraph::builder::makeConstant(tensorType, ngraph::Shape{broadcastParams.inputShape.shape}, std::vector<int64_t>{}, true);
if (broadcastParams.axesMapping.size() != 0) {
const auto& axesMapping = std::get<0>(GetParam()).axesMapping;
const auto axesMappingConst = ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{axesMapping.size()}, axesMapping);
return std::make_shared<ngraph::opset5::Broadcast>(broadcastInput, broadcastTargetShape, axesMappingConst);
}
void SetUp() override {
prepareBroadcastInputs();
const auto broadcast = std::make_shared<ngraph::opset5::Broadcast>(m_broadcastInput, m_broadcastTargetShape, ngraph::op::BroadcastType::BIDIRECTIONAL);
function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{broadcast, m_nonZero},
ngraph::ParameterVector{m_param},
"NonZero-Broadcast");
return std::make_shared<ngraph::opset5::Broadcast>(broadcastInput, broadcastTargetShape, ngraph::op::BroadcastType::BIDIRECTIONAL);
}
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
// We emulate dynamic shape through the number of non-zeros in NonZero input tensor
if (info.name() == m_parameterVector.front()->get_friendly_name()) {
// We emulate dynamic target shape through the number of non-zeros in NonZero input tensor
const auto &broadcastParams = std::get<0>(GetParam());
const auto numNonZeros = broadcastParams.targetShape.shape[getDynamicAxis(
broadcastParams.targetShape.upperBoundShape,
@ -115,66 +115,30 @@ protected:
return blob;
}
protected:
std::shared_ptr<ngraph::Node> m_broadcastInput;
std::shared_ptr<ngraph::Node> m_broadcastTargetShape;
std::shared_ptr<ngraph::opset5::NonZero> m_nonZero;
std::shared_ptr<ngraph::opset5::Parameter> m_param;
return DSR_TestsCommon::GenerateInput(info);
}
};
TEST_P(NonZero_BroadcastBidirectional, CompareWithReference) {
TEST_P(NonZero_Broadcast, CompareWithReference) {
Run();
}
std::vector<BroadcastInputParams> broadcastBidirectionalTestParams = {
{ {1, 1, 4}, DataShapeWithUpperBound{ {200, 2, 4}, {300, 2, 4} }, {} },
{ {15, 14}, DataShapeWithUpperBound{ {2, 16, 1, 14}, {2, 16, 15, 14} }, {} },
{ {15, 1}, DataShapeWithUpperBound{ {1, 16, 15, 14}, {2, 16, 15, 14} }, {} },
{ {2, 16, 15, 14}, DataShapeWithUpperBound{ {1, 15, 14}, {16, 15, 14} }, {} },
{ {2, 16, 15, 14}, DataShapeWithUpperBound{ {16, 1, 1}, {16, 1, 14}}, {} },
{ {16, 15, 1}, DataShapeWithUpperBound{ {2, 1, 15, 14}, {2, 16, 15, 14} }, {} },
std::vector<BroadcastInputParams> broadcastTestParams = {
{ DataShapeWithUpperBound{ {1, 1, 4}, {} }, DataShapeWithUpperBound{ {200, 2, 4}, {300, 2, 4} }, {} },
{ DataShapeWithUpperBound{ {15, 14}, {} }, DataShapeWithUpperBound{ {2, 16, 1, 14}, {2, 16, 15, 14} }, {} },
{ DataShapeWithUpperBound{ {15, 1}, {} }, DataShapeWithUpperBound{ {1, 16, 15, 14}, {2, 16, 15, 14} }, {} },
{ DataShapeWithUpperBound{ {2, 16, 15, 14}, {} }, DataShapeWithUpperBound{ {1, 15, 14}, {16, 15, 14} }, {} },
{ DataShapeWithUpperBound{ {2, 16, 15, 14}, {} }, DataShapeWithUpperBound{ {16, 1, 1}, {16, 1, 14}}, {} },
{ DataShapeWithUpperBound{ {16, 15, 1}, {} }, DataShapeWithUpperBound{ {2, 1, 15, 14}, {2, 16, 15, 14} }, {} },
{ DataShapeWithUpperBound{ {142, 1, 1, 64}, {300, 1, 1, 64} }, DataShapeWithUpperBound { {142, 3, 64, 64}, {300, 3, 64, 64} }, {} },
{ DataShapeWithUpperBound{ {1}, {} }, DataShapeWithUpperBound{ {1, 800}, {1, 1000} }, {0} },
{ DataShapeWithUpperBound{ {4}, {} }, DataShapeWithUpperBound{ {100, 4}, {1000, 4} }, {1} },
{ DataShapeWithUpperBound{ {128, 256}, {} }, DataShapeWithUpperBound{ {1, 128, 256}, {3, 128, 256} }, {1, 2} },
};
INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_BroadcastBidirectional,
INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_Broadcast,
::testing::Combine(
::testing::ValuesIn(broadcastBidirectionalTestParams),
::testing::ValuesIn(broadcastTestParams),
::testing::Values(ngraph::element::f16, ngraph::element::f32, ngraph::element::i32),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)));
using BroadcastExplicitTestParams = std::tuple<
BroadcastTestParams, TensorShape, TensorType, LayerTestsUtils::TargetDevice>;
class NonZero_BroadcastExplicit : public NonZero_BroadcastBidirectional {
protected:
void SetUp() override {
prepareBroadcastInputs();
const auto& axesMapping = std::get<0>(GetParam()).axesMapping;
const auto axesMappingConst = ngraph::opset5::Constant::create(ngraph::element::i64, ngraph::Shape{axesMapping.size()}, axesMapping);
const auto broadcast = std::make_shared<ngraph::opset5::Broadcast>(m_broadcastInput, m_broadcastTargetShape, axesMappingConst);
function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{broadcast, m_nonZero},
ngraph::ParameterVector{m_param},
"NonZero-Broadcast");
}
};
TEST_P(NonZero_BroadcastExplicit, CompareWithReference) {
Run();
}
std::vector<BroadcastInputParams> broadcastExplicitTestParams = {
{ {1}, DataShapeWithUpperBound{ {1, 800}, {1, 1000} }, {0} },
{ {4}, DataShapeWithUpperBound{ {100, 4}, {1000, 4} }, {1} },
{ {128, 256}, DataShapeWithUpperBound{ {1, 128, 256}, {3, 128, 256} }, {1, 2} },
};
INSTANTIATE_TEST_CASE_P(smoke_DynamicBroadcast, NonZero_BroadcastExplicit,
::testing::Combine(
::testing::ValuesIn(broadcastExplicitTestParams),
::testing::Values(ngraph::element::f16, ngraph::element::f32, ngraph::element::i32),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)));
} // namespace