templateFuncTest runs except multiple input shape cases

This commit is contained in:
Steve Yoo 2021-09-16 19:24:00 +09:00
parent 8adbaba71d
commit ceb802d978
11 changed files with 49 additions and 70 deletions

View File

@ -80,8 +80,9 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>({{1, 10}, {3, 30}, {30, 300}, {30, 300}})),
// ::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}, {2, 4, 31, 31}})),
::testing::Values(std::vector<std::vector<size_t>>({{2, 4, 31, 31}})),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}}),
std::vector<std::vector<size_t>>({{2, 4, 31, 31}}),
std::vector<std::vector<size_t>>({{1, 3, 30, 30}, {2, 4, 31, 31}})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
// ! [test_convolution:instantiate]

View File

@ -43,7 +43,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
@ -55,7 +56,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
@ -87,7 +89,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
@ -99,7 +102,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 30, 30}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
@ -127,7 +131,8 @@ namespace specificWeightLayout {
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 1, 50, 75})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 1, 50, 75}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
} // namespace specificWeightLayout
@ -159,7 +164,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 10, 10, 10}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
@ -171,7 +177,8 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{1, 3, 10, 10, 10}})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);

View File

@ -113,7 +113,7 @@ public:
// w/a: copy of original function is required to provide correct op coverage report (overflow of convert counter issue)
auto copyOriginalFunction = function;
//force the reference implementation to use graph with extra Convert operation
function = reference_function;
functionRefs = reference_function;
LayerTestsUtils::LayerTestsCommon::Validate();
function = copyOriginalFunction;
}

View File

@ -106,6 +106,7 @@ void HeteroSyntheticTest::SetUp() {
--num;
}
function = std::get<Function>(param)._function;
functionRefs = function;
}
void HeteroSyntheticTest::TearDown() {

View File

@ -45,7 +45,6 @@ public:
protected:
void SetUp() override;
std::shared_ptr<ngraph::Function> makeConvolution(const std::string& name = "");
void Run() override;
private:
InferenceEngine::Precision::ePrecision netPrecision = InferenceEngine::Precision::UNSPECIFIED;

View File

@ -36,7 +36,7 @@ public:
protected:
void SetUp() override;
void makeSoftMax();
std::shared_ptr<ngraph::Function> makeSoftMax(const std::string& name = "");
private:
InferenceEngine::Precision netPrecision;

View File

@ -39,10 +39,10 @@ void LayerTestsCommon::Run() {
s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
s.updateOPsStats(function, PassRate::Statuses::SKIPPED);
s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else {
s.updateOPsStats(function, PassRate::Statuses::CRASHED);
s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED);
}
try {
@ -52,17 +52,17 @@ void LayerTestsCommon::Run() {
GenerateInputs();
Infer();
Validate();
s.updateOPsStats(function, PassRate::Statuses::PASSED);
s.updateOPsStats(functionRefs, PassRate::Statuses::PASSED);
}
}
catch (const std::runtime_error &re) {
s.updateOPsStats(function, PassRate::Statuses::FAILED);
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_(re.what());
} catch (const std::exception &ex) {
s.updateOPsStats(function, PassRate::Statuses::FAILED);
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_(ex.what());
} catch (...) {
s.updateOPsStats(function, PassRate::Statuses::FAILED);
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_("Unknown failure occurred.");
}
}
@ -95,8 +95,10 @@ void LayerTestsCommon::Serialize() {
InferenceEngine::Blob::Ptr LayerTestsCommon::GenerateInput(const InferenceEngine::InputInfo& info) const {
return FuncTestUtils::createAndFillBlob(
InferenceEngine::TensorDesc(info.getPrecision(), targetStaticShape,
const_cast<InferenceEngine::InputInfo&>(info).getLayout()));
targetStaticShape.empty() ? info.getTensorDesc()
: InferenceEngine::TensorDesc(info.getPrecision(),
targetStaticShape,
const_cast<InferenceEngine::InputInfo&>(info).getLayout()));
}
void LayerTestsCommon::Compare(const std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> &expectedOutputs,
@ -317,12 +319,14 @@ void LayerTestsCommon::ConfigureNetwork() {
}
}
std::map<std::string, ngraph::PartialShape> inputShapes;
auto inputsDataMap = cnnNetwork.getInputsInfo();
for (auto&& inputDataMap : inputsDataMap) {
inputShapes[inputDataMap.first] = std::vector<ngraph::Dimension>(inputDynamicShape);
if (inputDynamicShape.is_dynamic()) {
std::map<std::string, ngraph::PartialShape> inputShapes;
auto inputsDataMap = cnnNetwork.getInputsInfo();
for (auto&& inputDataMap : inputsDataMap) {
inputShapes[inputDataMap.first] = std::vector<ngraph::Dimension>(inputDynamicShape);
}
cnnNetwork.reshape(inputShapes);
}
cnnNetwork.reshape(inputShapes);
}
void LayerTestsCommon::LoadNetwork() {

View File

@ -58,7 +58,8 @@ void ConvolutionLayerTest::SetUp() {
setTargetStaticShape(targetStaticShapes[0]);
function = makeConvolution("convolution");
functionRefs = makeConvolution("convolutionRefs");
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("convolutionRefs");
}
std::shared_ptr<ngraph::Function> ConvolutionLayerTest::makeConvolution(const std::string& name) {
@ -79,45 +80,4 @@ std::shared_ptr<ngraph::Function> ConvolutionLayerTest::makeConvolution(const st
return std::make_shared<ngraph::Function>(results, params, name);
}
void ConvolutionLayerTest::Run() {
auto crashHandler = [](int errCode) {
auto &s = LayerTestsUtils::Summary::getInstance();
s.saveReport();
std::cout << "Unexpected application crash!" << std::endl;
std::abort();
};
signal(SIGSEGV, crashHandler);
auto &s = LayerTestsUtils::Summary::getInstance();
s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::SKIPPED);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else {
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::CRASHED);
}
try {
LoadNetwork();
for (auto&& tss : targetStaticShapes) {
setTargetStaticShape(tss);
GenerateInputs();
Infer();
Validate();
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::PASSED);
}
}
catch (const std::runtime_error &re) {
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_(re.what());
} catch (const std::exception &ex) {
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_(ex.what());
} catch (...) {
s.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_("Unknown failure occurred.");
}
}
} // namespace LayerTestsDefinitions

View File

@ -45,6 +45,8 @@ void ReshapeLayerTest::SetUp() {
std::make_shared<ngraph::opset1::Reshape>(paramIn[0], constNode, specialZero));
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(reshape)};
function = std::make_shared<ngraph::Function>(results, paramsIn, "Reshape");
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("ReshapeRefs");
}
} // namespace LayerTestsDefinitions

View File

@ -45,10 +45,13 @@ void SoftMaxLayerTest::SetUp() {
inputDynamicShape = FuncTestUtils::PartialShapeUtils::vec2partialshape(inputShape, targetStaticShapes[0]);
makeSoftMax();
setTargetStaticShape(targetStaticShapes[0]);
function = makeSoftMax("softMax");
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("softMaxRefs");
}
void SoftMaxLayerTest::makeSoftMax() {
std::shared_ptr<ngraph::Function> SoftMaxLayerTest::makeSoftMax(const std::string& name) {
const auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
const auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShape});
const auto paramOuts =
@ -56,7 +59,7 @@ void SoftMaxLayerTest::makeSoftMax() {
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), axis);
const ngraph::ResultVector results {std::make_shared<ngraph::opset1::Result>(softMax)};
function = std::make_shared<ngraph::Function>(results, params, "softMax");
return std::make_shared<ngraph::Function>(results, params, name);
}
} // namespace LayerTestsDefinitions

View File

@ -53,5 +53,7 @@ void SplitLayerTest::SetUp() {
results.push_back(std::make_shared<ngraph::opset5::Result>(split->output(outIndices[i])));
}
function = std::make_shared<ngraph::Function>(results, params, "split");
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("splitRefs");
}
} // namespace LayerTestsDefinitions