ConvolutionLayerTest and SoftMaxLayerTest on template_plugin for dynamic shapes rebased

This commit is contained in:
Steve Yoo 2021-09-03 13:31:22 +09:00
parent 904384fee3
commit 0854f07291
11 changed files with 274 additions and 108 deletions

View File

@ -3,6 +3,7 @@
//
#include <vector>
#include <utility>
#include "single_layer_tests/convolution.hpp"
#include "common_test_utils/test_constants.hpp"
@ -57,7 +58,7 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine(
);
// ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPaddingStaticShape, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
@ -65,22 +66,38 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {30, 300}, {30, 300}})),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}, {2, 4, 31, 31}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
// ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
//INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
// ::testing::Combine(
// conv2DParams_AutoPadValid,
// ::testing::ValuesIn(netPrecisions),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE),
// std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {30, 300}, {30, 300}})),
// ::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
// ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
// ConvolutionLayerTest::getTestCaseName);
/* ============= 3D Convolution ============= */
@ -113,28 +130,32 @@ const auto conv3DParams_AutoPadValid = ::testing::Combine(
::testing::Values(ngraph::op::PadType::VALID)
);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
//INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
// ::testing::Combine(
// conv3DParams_ExplicitPadding,
// ::testing::ValuesIn(netPrecisions),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE),
// std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {10, 100}, {10, 100}, {10, 100}})),
// ::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
// ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
// ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
//INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTest,
// ::testing::Combine(
// conv3DParams_AutoPadValid,
// ::testing::ValuesIn(netPrecisions),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(InferenceEngine::Layout::ANY),
// ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE),
// std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {10, 100}, {10, 100}, {10, 100}})),
// ::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
// ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
// ConvolutionLayerTest::getTestCaseName);
} // namespace

View File

@ -3,6 +3,7 @@
//
#include <vector>
#include <utility>
#include "single_layer_tests/softmax.hpp"
#include "common_test_utils/test_constants.hpp"
@ -11,7 +12,7 @@ using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
const std::vector<InferenceEngine::Precision> netPrecision = {
InferenceEngine::Precision::FP32,
};
@ -19,26 +20,55 @@ const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
};
const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
InferenceEngine::SizeVector {1, 100},
InferenceEngine::SizeVector {100, 1},
InferenceEngine::SizeVector {10, 10},
const std::vector<std::vector<std::pair<size_t, size_t>>> inputStaticShape2D = {
{NULL_RANGE}
};
const std::vector<std::vector<std::pair<size_t, size_t>>> inputShape2D = {
{{1, 200}, {1, 200}}
};
const std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes2D = {
{InferenceEngine::SizeVector {1, 100}},
{InferenceEngine::SizeVector {100, 1}},
{InferenceEngine::SizeVector {10, 10}},
};
const std::vector<size_t> axis2D = {
0, 1
};
const auto params2DStaticShape = testing::Combine(
testing::ValuesIn(netPrecision),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputStaticShape2D),
testing::ValuesIn(targetShapes2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
const auto params2D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
testing::ValuesIn(netPrecision),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShape2D),
testing::ValuesIn(targetShapes2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2DStaticShape,
SoftMaxLayerTest,
params2DStaticShape,
SoftMaxLayerTest::getTestCaseName
);
INSTANTIATE_TEST_SUITE_P(
@ -57,7 +87,7 @@ const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::ValuesIn(netPrecision),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
@ -68,11 +98,11 @@ const auto params4D = testing::Combine(
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D,
SoftMaxLayerTest,
params4D,
SoftMaxLayerTest::getTestCaseName
);
//INSTANTIATE_TEST_SUITE_P(
// smoke_SoftMax4D,
// SoftMaxLayerTest,
// params4D,
// SoftMaxLayerTest::getTestCaseName
//);
} // namespace

View File

@ -144,6 +144,11 @@ protected:
float threshold;
InferenceEngine::CNNNetwork cnnNetwork;
std::shared_ptr<InferenceEngine::Core> core;
ngraph::PartialShape inputDynamicShape;
ngraph::Shape targetStaticShape;
std::vector<ngraph::Shape> targetStaticShapes;
virtual void setTargetStaticShape(ngraph::Shape& targetStaticShape);
virtual void Validate();

View File

@ -17,23 +17,24 @@ namespace LayerTestsDefinitions {
// ! [test_convolution:definition]
typedef std::tuple<
InferenceEngine::SizeVector, // Kernel size
InferenceEngine::SizeVector, // Strides
std::vector<ptrdiff_t>, // Pad begin
std::vector<ptrdiff_t>, // Pad end
InferenceEngine::SizeVector, // Dilation
size_t, // Num out channels
ngraph::op::PadType // Padding type
InferenceEngine::SizeVector, // Kernel size
InferenceEngine::SizeVector, // Strides
std::vector<ptrdiff_t>, // Pad begin
std::vector<ptrdiff_t>, // Pad end
InferenceEngine::SizeVector, // Dilation
size_t, // Num out channels
ngraph::op::PadType // Padding type
> convSpecificParams;
typedef std::tuple<
convSpecificParams,
InferenceEngine::Precision, // Net precision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
InferenceEngine::SizeVector, // Input shapes
LayerTestsUtils::TargetDevice // Device name
InferenceEngine::Precision, // Net precision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::pair<size_t, size_t>>, // Input shape
std::vector<std::vector<size_t>>, // target shapes
LayerTestsUtils::TargetDevice // Device name
> convLayerTestParamsSet;
class ConvolutionLayerTest : public testing::WithParamInterface<convLayerTestParamsSet>,
@ -43,6 +44,14 @@ public:
protected:
void SetUp() override;
void makeConvolutionFunction();
private:
InferenceEngine::Precision::ePrecision netPrecision = InferenceEngine::Precision::UNSPECIFIED;
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd;
size_t convOutChannels;
};
// ! [test_convolution:definition]

View File

@ -17,15 +17,16 @@
namespace LayerTestsDefinitions {
using softMaxLayerTestParams = std::tuple<
InferenceEngine::Precision, // netPrecision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
InferenceEngine::SizeVector, // inputShape
size_t, // axis
std::string, // targetDevice
std::map<std::string, std::string> // config
InferenceEngine::Precision, // netPrecision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::vector<std::pair<size_t, size_t>>, // Input shape
std::vector<std::vector<size_t>>, // Target shapes
size_t, // axis
std::string, // targetDevice
std::map<std::string, std::string> // config
>;
class SoftMaxLayerTest : public testing::WithParamInterface<softMaxLayerTestParams>,
@ -35,6 +36,11 @@ public:
protected:
void SetUp() override;
void makeSoftMaxFunction();
private:
InferenceEngine::Precision netPrecision;
size_t axis;
};
} // namespace LayerTestsDefinitions

View File

@ -47,10 +47,13 @@ void LayerTestsCommon::Run() {
try {
LoadNetwork();
GenerateInputs();
Infer();
Validate();
s.updateOPsStats(function, PassRate::Statuses::PASSED);
for (auto&& tss : targetStaticShapes) {
setTargetStaticShape(tss);
GenerateInputs();
Infer();
Validate();
s.updateOPsStats(function, PassRate::Statuses::PASSED);
}
}
catch (const std::runtime_error &re) {
s.updateOPsStats(function, PassRate::Statuses::FAILED);
@ -90,8 +93,10 @@ void LayerTestsCommon::Serialize() {
CommonTestUtils::removeIRFiles(out_xml_path, out_bin_path);
}
InferenceEngine::Blob::Ptr LayerTestsCommon::GenerateInput(const InferenceEngine::InputInfo &info) const {
return FuncTestUtils::createAndFillBlob(info.getTensorDesc());
InferenceEngine::Blob::Ptr LayerTestsCommon::GenerateInput(const InferenceEngine::InputInfo& info) const {
return FuncTestUtils::createAndFillBlob(
InferenceEngine::TensorDesc(info.getPrecision(), targetStaticShape,
const_cast<InferenceEngine::InputInfo&>(info).getLayout()));
}
void LayerTestsCommon::Compare(const std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> &expectedOutputs,
@ -311,6 +316,14 @@ void LayerTestsCommon::ConfigureNetwork() {
out.second->setPrecision(outPrc);
}
}
std::map<std::string, ngraph::PartialShape> shapes;
auto isdm = cnnNetwork.getInputsInfo();
for (auto&& idm : isdm) {
shapes[idm.first] = isdm[idm.first]->getInputData()->getPartialShape();
shapes[idm.first] = std::vector<ngraph::Dimension>(inputDynamicShape);
}
cnnNetwork.reshape(shapes);
}
void LayerTestsCommon::LoadNetwork() {
@ -513,4 +526,8 @@ std::map<std::string, std::string> &LayerTestsCommon::GetConfiguration() {
return configuration;
}
void LayerTestsCommon::setTargetStaticShape(ngraph::Shape& desiredTargetStaticShape) {
targetStaticShape = desiredTargetStaticShape;
}
} // namespace LayerTestsUtils

View File

@ -2,7 +2,10 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <utility>
#include "shared_test_classes/single_layer/convolution.hpp"
#include "functional_test_utils/partial_shape_utils.hpp"
namespace LayerTestsDefinitions {
@ -11,9 +14,10 @@ std::string ConvolutionLayerTest::getTestCaseName(testing::TestParamInfo<convLay
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
InferenceEngine::SizeVector inputShapes;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::string targetDevice;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShapes, targetDevice) =
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, targetDevice) =
obj.param;
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
@ -22,7 +26,8 @@ std::string ConvolutionLayerTest::getTestCaseName(testing::TestParamInfo<convLay
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
result << "IS=" << CommonTestUtils::vec2str(inputShape) << "_";
result << "TS=" << CommonTestUtils::vec2str(targetShapes) << "_";
result << "K" << CommonTestUtils::vec2str(kernel) << "_";
result << "S" << CommonTestUtils::vec2str(stride) << "_";
result << "PB" << CommonTestUtils::vec2str(padBegin) << "_";
@ -41,23 +46,29 @@ std::string ConvolutionLayerTest::getTestCaseName(testing::TestParamInfo<convLay
void ConvolutionLayerTest::SetUp() {
convSpecificParams convParams;
std::vector<size_t> inputShape;
auto netPrecision = InferenceEngine::Precision::UNSPECIFIED;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetDevice) =
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, targetDevice) =
this->GetParam();
ngraph::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd;
size_t convOutChannels;
for (auto&& targetShape : targetShapes) {
targetStaticShapes.emplace_back(targetShape);
}
inputDynamicShape = FuncTestUtils::PartialShapeUtils::vec2partialshape(inputShape, targetStaticShapes[0]);
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
setTargetStaticShape(targetStaticShapes[0]);
makeConvolutionFunction();
}
void ConvolutionLayerTest::makeConvolutionFunction() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<float> filter_weights;
if (targetDevice == CommonTestUtils::DEVICE_GNA) {
auto filter_size = std::accumulate(std::begin(kernel), std::end(kernel), 1, std::multiplies<size_t>());
filter_weights = CommonTestUtils::generate_float_numbers(convOutChannels * inputShape[1] * filter_size,
filter_weights = CommonTestUtils::generate_float_numbers(convOutChannels * targetStaticShape[1] * filter_size,
-0.5f, 0.5f);
}
auto conv = std::dynamic_pointer_cast<ngraph::opset1::Convolution>(
@ -66,4 +77,5 @@ void ConvolutionLayerTest::SetUp() {
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(conv)};
function = std::make_shared<ngraph::Function>(results, params, "convolution");
}
} // namespace LayerTestsDefinitions

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/single_layer/softmax.hpp"
#include "functional_test_utils/partial_shape_utils.hpp"
namespace LayerTestsDefinitions {
@ -10,11 +11,12 @@ std::string SoftMaxLayerTest::getTestCaseName(testing::TestParamInfo<softMaxLaye
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
InferenceEngine::SizeVector inputShape;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
size_t axis;
std::string targetDevice;
std::map<std::string, std::string> config;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, axis, targetDevice, config) = obj.param;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, axis, targetDevice, config) = obj.param;
std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_";
@ -23,6 +25,7 @@ std::string SoftMaxLayerTest::getTestCaseName(testing::TestParamInfo<softMaxLaye
result << "inL=" << inLayout << "_";
result << "outL=" << outLayout << "_";
result << "IS=" << CommonTestUtils::vec2str(inputShape) << "_";
result << "TS=" << CommonTestUtils::vec2str(targetShapes) << "_";
result << "axis=" << axis << "_";
result << "trgDev=" << targetDevice;
@ -30,24 +33,30 @@ std::string SoftMaxLayerTest::getTestCaseName(testing::TestParamInfo<softMaxLaye
}
void SoftMaxLayerTest::SetUp() {
InferenceEngine::SizeVector inputShape;
InferenceEngine::Precision netPrecision;
size_t axis;
std::vector<std::pair<size_t, size_t>> inputShape;
std::vector<InferenceEngine::SizeVector> targetShapes;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, axis, targetDevice, configuration) = GetParam();
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetShapes, axis, targetDevice, configuration) = GetParam();
outLayout = inLayout;
for (auto&& targetShape : targetShapes) {
targetStaticShapes.emplace_back(targetShape);
}
inputDynamicShape = FuncTestUtils::PartialShapeUtils::vec2partialshape(inputShape, targetStaticShapes[0]);
makeSoftMaxFunction();
}
void SoftMaxLayerTest::makeSoftMaxFunction() {
const auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
const auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
const auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape});
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), axis);
const ngraph::ResultVector results {std::make_shared<ngraph::opset1::Result>(softMax)};
function = std::make_shared<ngraph::Function>(results, params, "softMax");
}
} // namespace LayerTestsDefinitions

View File

@ -47,6 +47,21 @@ inline std::string vec2str(const std::vector<vecElementType> &vec) {
return std::string("()");
}
inline std::string pair2str(const std::pair<size_t, size_t>& p) {
std::ostringstream result;
result << "(" << p.first << "." << p.second << ")";
return result.str();
}
template<>
inline std::string vec2str(const std::vector<std::pair<size_t, size_t>> &vec) {
std::ostringstream result;
for (const auto &p : vec) {
result << pair2str(p);
}
return result.str();
}
template<typename vecElementType>
inline std::string vec2str(const std::vector<std::vector<vecElementType>> &vec) {
std::ostringstream result;

View File

@ -22,6 +22,9 @@ const char REPORT_EXTENSION[] = ".xml";
const unsigned int maxFileNameLength = 140;
#define UNDEFINED_RANGE {-1, -1}
#define NULL_RANGE {}
#ifdef _WIN32
#ifdef __MINGW32__
const char pre[] = "lib";

View File

@ -0,0 +1,39 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <vector>
#include "ngraph_functions/utils/ngraph_helpers.hpp"
namespace FuncTestUtils {
namespace PartialShapeUtils {
namespace {
inline bool isInputShapeFakeEmpty(std::vector<std::pair<size_t, size_t>>& inputShape) {
return (inputShape.size() == 1 && inputShape[0] == std::pair<size_t, size_t>(0, 0));
}
}
inline ngraph::PartialShape vec2partialshape(std::vector<std::pair<size_t, size_t>> inputShape, const ngraph::Shape& targetShape) {
if (isInputShapeFakeEmpty(inputShape)) {
inputShape.clear();
}
if (inputShape.empty()) {
for (auto&& item : targetShape) {
inputShape.emplace_back(item, item);
}
}
std::vector<ngraph::Dimension> dimensions;
dimensions.reserve(inputShape.size());
for (auto&& item : inputShape) {
dimensions.emplace_back(item.first, item.second);
}
return ngraph::PartialShape(dimensions);
}
} // namespace PartialShapeUtils
} // namespace FuncTestUtils