Added BatchNormInference tests (#1927)

This commit is contained in:
Liubov Batanina 2020-08-25 16:28:05 +03:00 committed by GitHub
parent 03cd918a30
commit c5b19aa8f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 150 additions and 0 deletions

View File

@ -0,0 +1,45 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include "single_layer_tests/batch_norm.hpp"
using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16
};
const std::vector<double> epsilon = {
1e-6,
1e-5,
1e-4
};
const std::vector<std::vector<size_t>> inputShapes = {
{1, 3},
{2, 5},
{1, 3, 10},
{1, 3, 1, 1},
{2, 5, 4, 4},
};
const auto batchNormParams = testing::Combine(
testing::ValuesIn(epsilon),
testing::ValuesIn(netPrecisions),
testing::ValuesIn(inputShapes),
testing::Values(CommonTestUtils::DEVICE_CPU)
);
INSTANTIATE_TEST_CASE_P(
BatchNorm,
BatchNormLayerTest,
batchNormParams,
BatchNormLayerTest::getTestCaseName
);
} // namespace

View File

@ -0,0 +1,28 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "functional_test_utils/layer_test_utils.hpp"
#include "ngraph_functions/builders.hpp"
typedef std::tuple<
double, // epsilon
InferenceEngine::Precision, // Net precision
InferenceEngine::SizeVector, // Input shapes
LayerTestsUtils::TargetDevice // Target device name
> BatchNormLayerTestParams;
namespace LayerTestsDefinitions {
class BatchNormLayerTest : public testing::WithParamInterface<BatchNormLayerTestParams>,
public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(const testing::TestParamInfo<BatchNormLayerTestParams>& obj);
protected:
void SetUp() override;
};
} // namespace LayerTestsDefinitions

View File

@ -0,0 +1,44 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "single_layer_tests/batch_norm.hpp"
namespace LayerTestsDefinitions {
std::string BatchNormLayerTest::getTestCaseName(const testing::TestParamInfo<BatchNormLayerTestParams>& obj) {
InferenceEngine::Precision netPrecision;
InferenceEngine::SizeVector inputShapes;
double epsilon;
std::string targetDevice;
std::tie(epsilon, netPrecision, inputShapes, targetDevice) = obj.param;
std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
result << "epsilon=" << epsilon << "_";
result << "netPRC=" << netPrecision.name() << "_";
result << "targetDevice=" << targetDevice;
return result.str();
}
void BatchNormLayerTest::SetUp() {
InferenceEngine::Precision netPrecision;
InferenceEngine::SizeVector inputShapes;
double epsilon;
std::tie(epsilon, netPrecision, inputShapes, targetDevice) = this->GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShapes});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::opset4::Parameter>(params));
auto batchNorm = ngraph::builder::makeBatchNormInference(paramOuts[0], epsilon);
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(batchNorm)};
function = std::make_shared<ngraph::Function>(results, params, "BatchNormInference");
}
TEST_P(BatchNormLayerTest, CompareWithRefs) {
Run();
}
} // namespace LayerTestsDefinitions

View File

@ -334,5 +334,8 @@ std::shared_ptr<ngraph::Node> makePad(const ngraph::Output<Node>& data,
float argPadValue,
ngraph::helpers::PadMode padMode);
std::shared_ptr<ngraph::Node> makeBatchNormInference(const ngraph::Output<Node>& data,
double epsilon);
} // namespace builder
} // namespace ngraph

View File

@ -0,0 +1,30 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include <memory>
#include "ngraph_functions/builders.hpp"
namespace ngraph {
namespace builder {
std::shared_ptr<ngraph::Node> makeBatchNormInference(const ngraph::Output<Node>& data,
double epsilon) {
auto ngPrc = data.get_element_type();
size_t C = data.get_shape().at(1);
bool random = true;
std::vector<float> values(C);
auto gamma = ngraph::builder::makeConstant(ngPrc, ngraph::Shape{C}, values, random);
auto beta = ngraph::builder::makeConstant(ngPrc, ngraph::Shape{C}, values, random);
auto mean = ngraph::builder::makeConstant(ngPrc, ngraph::Shape{C}, values, random);
// Fill the vector for variance with positive values
std::default_random_engine gen;
std::uniform_real_distribution<float> dis(0.0, 10.0);
std::generate(values.begin(), values.end(), [&dis, &gen]() { return dis(gen); });
auto variance = ngraph::builder::makeConstant(ngPrc, ngraph::Shape{C}, values, !random);
return std::make_shared<ngraph::opset4::BatchNormInference>(data, gamma, beta, mean, variance, epsilon);
}
} // namespace builder
} // namespace ngraph