Apply Eltwise input shape to templateFuncTests

Modify GenerateInputs() to use functionRefs
This commit is contained in:
Steve Yoo
2021-09-27 14:59:00 +09:00
parent 25c399d922
commit 0802282972
10 changed files with 136 additions and 89 deletions

View File

@@ -66,8 +66,8 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}})),
::testing::Values(std::vector<std::vector<std::pair<size_t, size_t>>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 30, 30}}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
@@ -79,10 +79,12 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPaddingDynamicShape, ConvolutionL
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {30, 300}, {30, 300}})),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}}),
std::vector<std::vector<size_t>>({{2, 4, 31, 31}}),
std::vector<std::vector<size_t>>({{1, 3, 30, 30}, {2, 4, 31, 31}})),
::testing::Values(std::vector<std::vector<std::pair<size_t, size_t>>>(
{{{1, 10}, {3, 30}, {30, 300}, {30, 300}}})),
::testing::Values(std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 30, 30}}}),
std::vector<std::vector<std::vector<size_t>>>({{{2, 4, 31, 31}}}),
std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 30, 30}},
{{2, 4, 31, 31}}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
// ! [test_convolution:instantiate]
@@ -95,8 +97,8 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}})),
::testing::Values(std::vector<std::vector<std::pair<size_t, size_t>>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 30, 30}}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
@@ -139,8 +141,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTe
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t> >({{1, 3, 10, 10, 10}})),
::testing::Values(std::vector<std::vector<std::pair<size_t, size_t>>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 10, 10, 10}}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
@@ -152,8 +154,8 @@ INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTes
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t> >({{1, 3, 10, 10, 10}})),
::testing::Values(std::vector<std::vector<std::pair<size_t, size_t>>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<std::vector<size_t>>>({{{1, 3, 10, 10, 10}}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);

View File

@@ -20,18 +20,18 @@ const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
};
const std::vector<std::vector<std::pair<size_t, size_t>>> inputStaticShape2D = {
const std::vector<std::vector<std::vector<std::pair<size_t, size_t>>>> inputStaticShape2D = {
{NULL_RANGE}
};
const std::vector<std::vector<std::pair<size_t, size_t>>> inputShape2D = {
{{1, 200}, {1, 200}}
const std::vector<std::vector<std::vector<std::pair<size_t, size_t>>>> inputShape2D = {
{{{1, 200}, {1, 200}}}
};
const std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes2D = {
{InferenceEngine::SizeVector {1, 100}},
{InferenceEngine::SizeVector {100, 1}},
{InferenceEngine::SizeVector {10, 10}},
const std::vector<std::vector<std::vector<InferenceEngine::SizeVector>>> targetShapes2D = {
{{InferenceEngine::SizeVector{1, 100}}},
{{InferenceEngine::SizeVector{100, 1}}},
{{InferenceEngine::SizeVector{10, 10}}},
};
const std::vector<size_t> axis2D = {
@@ -78,18 +78,18 @@ INSTANTIATE_TEST_SUITE_P(
SoftMaxLayerTest::getTestCaseName
);
const std::vector<std::vector<std::pair<size_t, size_t>>> inputStaticShape4D = {
const std::vector<std::vector<std::vector<std::pair<size_t, size_t>>>> inputStaticShape4D = {
{NULL_RANGE}
};
const std::vector<std::vector<std::pair<size_t, size_t>>> inputShape4D = {
{{1, 200}, {1, 200}, {1, 200}, {1, 200}}
const std::vector<std::vector<std::vector<std::pair<size_t, size_t>>>> inputShape4D = {
{{{1, 200}, {1, 200}, {1, 200}, {1, 200}}}
};
const std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes4D = {
{InferenceEngine::SizeVector {1, 100, 1, 1}},
{InferenceEngine::SizeVector {1, 3, 4, 3}},
{InferenceEngine::SizeVector {2, 3, 4, 5}},
const std::vector<std::vector<std::vector<InferenceEngine::SizeVector>>> targetShapes4D = {
{{InferenceEngine::SizeVector{1, 100, 1, 1}}},
{{InferenceEngine::SizeVector{1, 3, 4, 3}}},
{{InferenceEngine::SizeVector{2, 3, 4, 5}}},
};
const std::vector<size_t> axis4D = {0, 1, 2, 3};

View File

@@ -107,13 +107,14 @@ public:
function = make_ngraph(false);
reference_function = make_ngraph(true); //use extra ops to mimic the preprocessing
functionRefs = ngraph::clone_function(*function);
}
void Validate() override {
// w/a: copy of original function is required to provide correct op coverage report (overflow of convert counter issue)
auto copyOriginalFunction = function;
//force the reference implementation to use graph with extra Convert operation
functionRefs = reference_function;
functionRefs = ngraph::clone_function(*reference_function);
LayerTestsUtils::LayerTestsCommon::Validate();
function = copyOriginalFunction;
}

View File

@@ -125,6 +125,8 @@ protected:
virtual void ConfigureNetwork();
virtual void ConfigureNetwork_Secondary() {}
virtual void LoadNetwork();
virtual void GenerateInputs();
@@ -145,11 +147,11 @@ protected:
float threshold;
InferenceEngine::CNNNetwork cnnNetwork;
std::shared_ptr<InferenceEngine::Core> core;
ngraph::PartialShape inputDynamicShape;
ngraph::Shape targetStaticShape;
std::vector<ngraph::Shape> targetStaticShapes;
std::vector<ngraph::PartialShape> inputDynamicShape;
std::vector<ngraph::Shape> targetStaticShape;
std::vector<std::vector<ngraph::Shape>> targetStaticShapes;
virtual void setTargetStaticShape(ngraph::Shape& targetStaticShape);
virtual void setTargetStaticShape(std::vector<ngraph::Shape>& desiredTargetStaticShape) {}
virtual void Validate();

View File

@@ -17,24 +17,24 @@ namespace LayerTestsDefinitions {
// ! [test_convolution:definition]
typedef std::tuple<
InferenceEngine::SizeVector, // Kernel size
InferenceEngine::SizeVector, // Strides
std::vector<ptrdiff_t>, // Pad begin
std::vector<ptrdiff_t>, // Pad end
InferenceEngine::SizeVector, // Dilation
size_t, // Num out channels
ngraph::op::PadType // Padding type
InferenceEngine::SizeVector, // Kernel size
InferenceEngine::SizeVector, // Strides
std::vector<ptrdiff_t>, // Pad begin
std::vector<ptrdiff_t>, // Pad end
InferenceEngine::SizeVector, // Dilation
size_t, // Num out channels
ngraph::op::PadType // Padding type
> convSpecificParams;
typedef std::tuple<
convSpecificParams,
InferenceEngine::Precision, // Net precision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::pair<size_t, size_t>>, // Input shape
std::vector<std::vector<size_t>>, // target shapes
LayerTestsUtils::TargetDevice // Device name
InferenceEngine::Precision, // Net precision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::vector<std::pair<size_t, size_t>>>, // Input shape
std::vector<std::vector<std::vector<size_t>>>, // target shapes
LayerTestsUtils::TargetDevice // Device name
> convLayerTestParamsSet;
class ConvolutionLayerTest : public testing::WithParamInterface<convLayerTestParamsSet>,
@@ -45,6 +45,7 @@ public:
protected:
void SetUp() override;
std::shared_ptr<ngraph::Function> makeConvolution(const std::string& name = "");
void setTargetStaticShape(std::vector<ngraph::Shape>& desiredTargetStaticShape) override;
private:
InferenceEngine::Precision::ePrecision netPrecision = InferenceEngine::Precision::UNSPECIFIED;

View File

@@ -17,16 +17,16 @@
namespace LayerTestsDefinitions {
using softMaxLayerTestParams = std::tuple<
InferenceEngine::Precision, // netPrecision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::pair<size_t, size_t>>, // Input shape
std::vector<std::vector<size_t>>, // Target shapes
size_t, // axis
std::string, // targetDevice
std::map<std::string, std::string> // config
InferenceEngine::Precision, // netPrecision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::vector<std::pair<size_t, size_t>>>, // Input shape
std::vector<std::vector<std::vector<size_t>>>, // Target shapes
size_t, // axis
std::string, // targetDevice
std::map<std::string, std::string> // config
>;
class SoftMaxLayerTest : public testing::WithParamInterface<softMaxLayerTestParams>,
@@ -37,6 +37,7 @@ public:
protected:
void SetUp() override;
std::shared_ptr<ngraph::Function> makeSoftMax(const std::string& name = "");
void setTargetStaticShape(std::vector<ngraph::Shape>& desiredTargetStaticShape) override;
private:
InferenceEngine::Precision netPrecision;

View File

@@ -94,11 +94,12 @@ void LayerTestsCommon::Serialize() {
}
InferenceEngine::Blob::Ptr LayerTestsCommon::GenerateInput(const InferenceEngine::InputInfo& info) const {
return FuncTestUtils::createAndFillBlob(
targetStaticShape.empty() ? info.getTensorDesc()
: InferenceEngine::TensorDesc(info.getPrecision(),
targetStaticShape,
const_cast<InferenceEngine::InputInfo&>(info).getLayout()));
return FuncTestUtils::createAndFillBlob(targetStaticShape.empty() || targetStaticShape[0].empty() ?
info.getTensorDesc() :
InferenceEngine::TensorDesc(
info.getPrecision(),
targetStaticShape[0],
const_cast<InferenceEngine::InputInfo&>(info).getLayout()));
}
void LayerTestsCommon::Compare(const std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> &expectedOutputs,
@@ -319,13 +320,21 @@ void LayerTestsCommon::ConfigureNetwork() {
}
}
if (inputDynamicShape.is_dynamic()) {
std::map<std::string, ngraph::PartialShape> inputShapes;
auto inputsDataMap = cnnNetwork.getInputsInfo();
for (auto&& inputDataMap : inputsDataMap) {
inputShapes[inputDataMap.first] = std::vector<ngraph::Dimension>(inputDynamicShape);
if (!inputDynamicShape.empty()) {
if (inputDynamicShape.size() == 1) {
if (inputDynamicShape.front().is_dynamic()) {
std::map<std::string, ngraph::PartialShape> inputShapes;
auto inputsDataMap = cnnNetwork.getInputsInfo();
for (auto&& inputDataMap : inputsDataMap) {
inputShapes[inputDataMap.first] = std::vector<ngraph::Dimension>(inputDynamicShape.front());
}
cnnNetwork.reshape(inputShapes);
}
} else if (inputDynamicShape.size() == 2) {
ConfigureNetwork_Secondary();
} else {
IE_THROW() << "Incorrect number of input shapes";
}
cnnNetwork.reshape(inputShapes);
}
}
@@ -339,7 +348,7 @@ void LayerTestsCommon::LoadNetwork() {
void LayerTestsCommon::GenerateInputs() {
inputs.clear();
const auto& inputsInfo = executableNetwork.GetInputsInfo();
const auto& functionParams = function->get_parameters();
const auto& functionParams = functionRefs->get_parameters();
for (int i = 0; i < functionParams.size(); ++i) {
const auto& param = functionParams[i];
const auto infoIt = inputsInfo.find(param->get_friendly_name());
@@ -529,8 +538,4 @@ std::map<std::string, std::string> &LayerTestsCommon::GetConfiguration() {
return configuration;
}
void LayerTestsCommon::setTargetStaticShape(ngraph::Shape& desiredTargetStaticShape) {
targetStaticShape = desiredTargetStaticShape;
}
} // namespace LayerTestsUtils

View File

@@ -14,8 +14,8 @@ std::string ConvolutionLayerTest::getTestCaseName(const testing::TestParamInfo<c
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::vector<std::vector<std::pair<size_t, size_t>>> inputShape;
std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes;
std::string targetDevice;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, targetDevice) =
obj.param;
@@ -46,14 +46,19 @@ std::string ConvolutionLayerTest::getTestCaseName(const testing::TestParamInfo<c
void ConvolutionLayerTest::SetUp() {
convSpecificParams convParams;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::vector<std::vector<std::pair<size_t, size_t>>> inputShape;
std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, targetDevice) =
this->GetParam();
for (auto&& targetShape : targetShapes) {
targetStaticShapes.emplace_back(targetShape);
targetStaticShapes.emplace_back(
std::vector<ngraph::Shape>{ngraph::Shape{targetShape.front()}, ngraph::Shape{targetShape.front()}});
}
inputDynamicShape = FuncTestUtils::PartialShapeUtils::vec2partialshape(inputShape, targetStaticShapes[0]);
inputDynamicShape.emplace_back(
FuncTestUtils::PartialShapeUtils::vec2partialshape(
inputShape.empty() ?
std::vector<std::pair<size_t, size_t>>{} :
inputShape.front(), targetStaticShapes[0].front()));
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
setTargetStaticShape(targetStaticShapes[0]);
@@ -63,13 +68,13 @@ void ConvolutionLayerTest::SetUp() {
std::shared_ptr<ngraph::Function> ConvolutionLayerTest::makeConvolution(const std::string& name) {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape});
auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape.front()});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<float> filter_weights;
if (targetDevice == CommonTestUtils::DEVICE_GNA) {
auto filter_size = std::accumulate(std::begin(kernel), std::end(kernel), 1, std::multiplies<size_t>());
filter_weights = CommonTestUtils::generate_float_numbers(convOutChannels * targetStaticShape[1] * filter_size,
filter_weights = CommonTestUtils::generate_float_numbers(convOutChannels * targetStaticShape.front()[1] * filter_size,
-0.5f, 0.5f);
}
auto conv = std::dynamic_pointer_cast<ngraph::opset1::Convolution>(
@@ -79,4 +84,8 @@ std::shared_ptr<ngraph::Function> ConvolutionLayerTest::makeConvolution(const st
return std::make_shared<ngraph::Function>(results, params, name);
}
void ConvolutionLayerTest::setTargetStaticShape(std::vector<ngraph::Shape>& desiredTargetStaticShape) {
targetStaticShape = desiredTargetStaticShape;
}
} // namespace LayerTestsDefinitions

View File

@@ -11,8 +11,8 @@ std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<softM
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::vector<std::vector<std::pair<size_t, size_t>>> inputShape;
std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes;
size_t axis;
std::string targetDevice;
std::map<std::string, std::string> config;
@@ -24,8 +24,8 @@ std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<softM
result << "outPRC=" << outPrc.name() << "_";
result << "inL=" << inLayout << "_";
result << "outL=" << outLayout << "_";
result << "IS=" << CommonTestUtils::vec2str(inputShape) << "_";
result << "TS=" << CommonTestUtils::vec2str(targetShapes) << "_";
result << "IS=" << CommonTestUtils::vec2str(inputShape.front()) << "_";
result << "TS=" << CommonTestUtils::vec2str(targetShapes.front()) << "_";
result << "axis=" << axis << "_";
result << "trgDev=" << targetDevice;
@@ -33,17 +33,22 @@ std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<softM
}
void SoftMaxLayerTest::SetUp() {
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::vector<std::vector<std::pair<size_t, size_t>>> inputShape;
std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, axis, targetDevice, configuration) = GetParam();
outLayout = inLayout;
for (auto&& targetShape : targetShapes) {
targetStaticShapes.emplace_back(targetShape);
targetStaticShapes.emplace_back(
std::vector<ngraph::Shape>{ngraph::Shape{targetShape.front()}, ngraph::Shape{targetShape.front()}});
}
inputDynamicShape = FuncTestUtils::PartialShapeUtils::vec2partialshape(inputShape, targetStaticShapes[0]);
inputDynamicShape.emplace_back(
FuncTestUtils::PartialShapeUtils::vec2partialshape(
inputShape.empty() ?
std::vector<std::pair<size_t, size_t>>{} :
inputShape.front(), targetStaticShapes[0].front()));
setTargetStaticShape(targetStaticShapes[0]);
function = makeSoftMax("softMax");
@@ -52,7 +57,7 @@ void SoftMaxLayerTest::SetUp() {
std::shared_ptr<ngraph::Function> SoftMaxLayerTest::makeSoftMax(const std::string& name) {
const auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
const auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape});
const auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape.front()});
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
@@ -61,4 +66,8 @@ std::shared_ptr<ngraph::Function> SoftMaxLayerTest::makeSoftMax(const std::strin
return std::make_shared<ngraph::Function>(results, params, name);
}
void SoftMaxLayerTest::setTargetStaticShape(std::vector<ngraph::Shape>& desiredTargetStaticShape) {
targetStaticShape = desiredTargetStaticShape;
}
} // namespace LayerTestsDefinitions

View File

@@ -61,6 +61,14 @@ inline std::string vec2str(const std::vector<std::pair<size_t, size_t>> &vec) {
return result.str();
}
inline std::string vec2str(const std::vector<std::vector<std::pair<size_t, size_t>>> &vec) {
std::ostringstream result;
for (const auto &v : vec) {
result << vec2str(v);
}
return result.str();
}
template<typename vecElementType>
inline std::string vec2str(const std::vector<std::vector<vecElementType>> &vec) {
std::ostringstream result;
@@ -70,6 +78,15 @@ inline std::string vec2str(const std::vector<std::vector<vecElementType>> &vec)
return result.str();
}
template<typename vecElementType>
inline std::string vec2str(const std::vector<std::vector<std::vector<vecElementType>>> &vec) {
std::ostringstream result;
for (const auto &v : vec) {
result << vec2str<vecElementType>(v);
}
return result.str();
}
template<typename vecElementType>
inline std::string set2str(const std::set<vecElementType> &set) {
if (!set.empty()) {