Clean-up files in tests helpers (#1173)

This commit is contained in:
Ilya Lavrenov 2020-07-01 22:34:43 +03:00 committed by GitHub
parent acaab888f2
commit c9749ce397
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 254 additions and 289 deletions

View File

@ -23,6 +23,21 @@
TEST_P(ModelTransformationsTest, LPT) {}
static void checkLayerInputPrecision(const ICNNNetwork& network, const std::string& layerName, Precision expectedPrecision, int inputIndex = -1) {
CNNLayerPtr layer = getLayer(network, layerName);
if (layer == nullptr) {
THROW_IE_EXCEPTION << "layer '" << layerName << "' was not found";
}
for (size_t index = 0ul; index < layer->insData.size(); ++index) {
if ((inputIndex != -1) && (index != inputIndex)) {
continue;
}
const DataWeakPtr weakData = layer->insData[index];
ASSERT_EQ(expectedPrecision, weakData.lock()->getPrecision()) << " unexpected precision " << weakData.lock()->getPrecision() << " for layer " << layerName;
}
}
ModelParams getModelParams(const std::string modelName) {
std::map<std::string, ModelParams> modelParams = {
{
@ -68,7 +83,7 @@ std::map<std::string, ModelParams> modelParams = {
for (const std::pair<std::string, std::string> item : fakeQuantizeAndConcolutionItems) {
TestsCommonFunc::checkLayerOuputPrecision(*usedNetwork, item.first, Precision::U8);
if (!item.second.empty()) {
TestsCommonFunc::checkLayerInputPrecision(*usedNetwork, item.second, Precision::U8, 0);
checkLayerInputPrecision(*usedNetwork, item.second, Precision::U8, 0);
}
}
}

View File

@ -90,6 +90,19 @@ class BinaryConvolutionOnlyTest : public TestsCommon,
protected:
static void fill_data_bin(float *data, size_t size) {
for (size_t i = 0; i < size; i++) {
data[i] = sinf((float)i) > 0.f ? 1.f : -1.f;
}
}
static void fill_data_bin_packed(int8_t *data, size_t size) {
int nbits = 8;
for (size_t i = 0; i < div_up(size, nbits); i++) {
data[i] = static_cast<int8_t>(i % 255);
}
}
size_t calculateOutDim(size_t in_dim, size_t kernel, size_t stride, size_t pad_begin) {
return (in_dim + 2lu * pad_begin - kernel) / stride + 1lu;
}

View File

@ -181,6 +181,35 @@ void SingleLayerTransformationsTest::compareInDetails(
}
}
static void relative_compare(
const float* res,
const float* ref,
size_t size,
float max_diff = 0.01f,
const std::string assertDetails = "",
float zero_diff = 1e-7f) {
for (size_t i = 0lu; i < size; i++) {
if (std::isnan(res[i]) && std::isnan(ref[i])) {
continue;
}
if ((ref[i] == 0.f) || (res[i] == 0.f)) {
const float diff = fabs(res[i] - ref[i]);
ASSERT_TRUE(diff < zero_diff) <<
"\nAbsolute comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", index: " << i << "\n" << assertDetails;
} else {
const float diff = fabs((res[i] - ref[i]) / (std::max)(ref[i], res[i]));
ASSERT_LT(diff, max_diff) <<
"\nRelative comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", max_diff: " << max_diff <<
", index: " << i << "\n" << assertDetails;
}
}
}
void SingleLayerTransformationsTest::SetUp() {
try {
const SingleLayerTransformationsTestParams p = ::testing::WithParamInterface<SingleLayerTransformationsTestParams>::GetParam();

View File

@ -31,6 +31,7 @@
#endif
#define REPLACE_WITH_NUM(SRC, PATTERN, NUM) REPLACE_WITH_STR(SRC, PATTERN, to_string_c_locale(NUM))
#define REPLACE_WITH_NUM_VECTOR(SRC, PATTERN, NUMS) \
{ std::string result; \
if (NUMS.size() > 0u) { \
@ -40,6 +41,7 @@
} \
} \
REPLACE_WITH_STR(SRC, PATTERN, result); }
#define REPLACE_WITH_NUM_VECTOR_REVERSE(SRC, PATTERN, NUMS) \
{ std::string result; \
auto nums_size = NUMS.size(); \
@ -50,6 +52,7 @@
} \
} \
REPLACE_WITH_STR(SRC, PATTERN, result); }
#define REMOVE_LINE(SRC, PATTERN) REPLACE_WITH_STR(SRC, PATTERN, "")
#define PRETTY_PARAM(name, type) \
@ -67,14 +70,6 @@
*os << #name ": " << ::testing::PrintToString((name::param_type)(param)); \
}
struct MapStrStr {
std::map<std::string, std::string> data{};
explicit MapStrStr(std::map<std::string, std::string> _data) : data(std::move(_data)) {}
MapStrStr() = default;
};
template<int Version = 3>
inline InferenceEngine::CNNNetwork
buildSingleLayerNetworkCommon(const std::string &layerType,

View File

@ -72,121 +72,73 @@ void TestsCommon::TearDown() {
InferenceEngine::ExecutorManager::getInstance()->clear();
}
IE_SUPPRESS_DEPRECATED_START
/**
* @brief Copies a 8-bit RGB image to the blob.
*
* Throws an exception in case of dimensions or input size mismatch
*
* @tparam data_t Type of the target blob
* @param RGB8 8-bit RGB image
* @param RGB8_size Size of the image
* @param blob Target blob to write image to
*/
template <typename data_t>
void copyFromRGB8(uint8_t* RGB8, size_t RGB8_size, InferenceEngine::TBlob<data_t>* blob) {
InferenceEngine::SizeVector dims = blob->getTensorDesc().getDims();
if (4 != dims.size())
THROW_IE_EXCEPTION << "Cannot write data to input blob! Blob has incorrect dimensions size " << dims.size();
size_t num_channels = dims[1]; // because RGB
size_t num_images = dims[0];
size_t w = dims[3];
size_t h = dims[2];
size_t nPixels = w * h;
class BaseTestCreator {
protected:
std::string _type;
public:
explicit BaseTestCreator(const std::string& type) : _type(type) {}
virtual ~BaseTestCreator() = default;
if (RGB8_size != w * h * num_channels * num_images)
THROW_IE_EXCEPTION << "input pixels mismatch, expecting " << w * h * num_channels * num_images
<< " bytes, got: " << RGB8_size;
virtual InferenceEngine::CNNLayerPtr create(const std::string& type) = 0;
virtual bool shouldCreate(const std::string& type) = 0;
};
template<class LT>
class LayerTestCreator : public BaseTestCreator {
public:
explicit LayerTestCreator(const std::string& type) : BaseTestCreator(type) {}
InferenceEngine::CNNLayerPtr create(const std::string& type) override {
InferenceEngine::LayerParams params;
params.type = type;
return std::make_shared<LT>(params);
std::vector<data_t*> dataArray;
for (unsigned int n = 0; n < num_images; n++) {
for (unsigned int i = 0; i < num_channels; i++) {
if (!n && !i && dataArray.empty()) {
dataArray.push_back(blob->data());
} else {
dataArray.push_back(dataArray.at(n * num_channels + i - 1) + nPixels);
}
}
}
bool shouldCreate(const std::string& type) override {
return type == _type;
for (size_t n = 0; n < num_images; n++) {
size_t n_num_channels = n * num_channels;
size_t n_num_channels_nPixels = n_num_channels * nPixels;
for (size_t i = 0; i < nPixels; i++) {
size_t i_num_channels = i * num_channels + n_num_channels_nPixels;
for (size_t j = 0; j < num_channels; j++) {
dataArray.at(n_num_channels + j)[i] = RGB8[i_num_channels + j];
}
}
}
};
static std::vector<std::shared_ptr<BaseTestCreator>>& getCreators() {
// there should be unique_ptr but it cant be used with initializer lists
static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("LogSoftMax"),
std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather"),
std::make_shared<LayerTestCreator<InferenceEngine::StridedSliceLayer>>("StridedSlice"),
std::make_shared<LayerTestCreator<InferenceEngine::ShuffleChannelsLayer>>("ShuffleChannels"),
std::make_shared<LayerTestCreator<InferenceEngine::DepthToSpaceLayer>>("DepthToSpace"),
std::make_shared<LayerTestCreator<InferenceEngine::ReverseSequenceLayer>>("ReverseSequence"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Abs"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atan"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atanh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Ceil"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Erf"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Floor"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("HardSigmoid"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Log"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Exp"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Reciprocal"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Selu"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softplus"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softsign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Tan"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceAnd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL1"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL2"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSumExp"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMax"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMean"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMin"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceOr"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceProd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSumSquare"),
std::make_shared<LayerTestCreator<InferenceEngine::TopKLayer>>("TopK"),
std::make_shared<LayerTestCreator<InferenceEngine::NonMaxSuppressionLayer>>("NonMaxSuppression"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterUpdateLayer>>("ScatterUpdate"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterElementsUpdateLayer>>("ScatterElementsUpdate")
};
return creators;
}
InferenceEngine::CNNLayer::Ptr TestsCommon::createLayer(const std::string& type) {
for (auto& creator : getCreators()) {
if (!creator->shouldCreate(type))
continue;
return creator->create(type);
}
static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
return genericCreator.create(type);
/**
* @brief Splits the RGB channels to either I16 Blob or float blob.
*
* The image buffer is assumed to be packed with no support for strides.
*
* @param imgBufRGB8 Packed 24bit RGB image (3 bytes per pixel: R-G-B)
* @param lengthbytesSize Size in bytes of the RGB image. It is equal to amount of pixels times 3 (number of channels)
* @param input Blob to contain the split image (to 3 channels)
*/
void ConvertImageToInput(unsigned char* imgBufRGB8, size_t lengthbytesSize, InferenceEngine::Blob& input) {
InferenceEngine::TBlob<float>* float_input = dynamic_cast<InferenceEngine::TBlob<float>*>(&input);
if (float_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, float_input);
InferenceEngine::TBlob<short>* short_input = dynamic_cast<InferenceEngine::TBlob<short>*>(&input);
if (short_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, short_input);
InferenceEngine::TBlob<uint8_t>* byte_input = dynamic_cast<InferenceEngine::TBlob<uint8_t>*>(&input);
if (byte_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, byte_input);
}
IE_SUPPRESS_DEPRECATED_END

View File

@ -40,16 +40,13 @@ inline std::string to_string_c_locale(T value) {
}
class TestsCommon : public ::testing::Test {
public:
IE_SUPPRESS_DEPRECATED_START
static InferenceEngine::CNNLayer::Ptr createLayer(const std::string &type);
IE_SUPPRESS_DEPRECATED_END
protected:
void SetUp() override;
static std::string make_so_name(const std::string & input) {
return CommonTestUtils::pre + input + IE_BUILD_POSTFIX + CommonTestUtils::ext;
}
void TearDown() override;
public:
@ -57,10 +54,6 @@ public:
return make_plugin_name("mock_engine");
}
static std::string make_so_name(const std::string & input) {
return CommonTestUtils::pre + input + IE_BUILD_POSTFIX + CommonTestUtils::ext;
}
static std::string make_plugin_name(const std::string & input) {
return make_so_name(input);
}
@ -79,25 +72,6 @@ public:
}
}
static void fill_data_non_zero(int32_t *data, size_t size, int n) {
for (size_t i = 0; i < size; i++) {
data[i] = n*i%254+1;
}
}
static void fill_data_bin(float *data, size_t size) {
for (size_t i = 0; i < size; i++) {
data[i] = sinf((float)i) > 0.f ? 1.f : -1.f;
}
}
static void fill_data_bin_packed(int8_t *data, size_t size) {
int nbits = 8;
for (size_t i = 0; i < div_up(size, nbits); i++) {
data[i] = static_cast<int8_t>(i % 255);
}
}
static void fill_data_dbgval(float *data, size_t size, float alpha = 1.0f) {
for (size_t i = 0; i < size; i++) {
data[i] = i * alpha;
@ -141,35 +115,6 @@ public:
}
}
static void relative_compare(
const float* res,
const float* ref,
size_t size,
float max_diff = 0.01f,
const std::string assertDetails = "",
float zero_diff = 1e-7f) {
for (size_t i = 0lu; i < size; i++) {
if (std::isnan(res[i]) && std::isnan(ref[i])) {
continue;
}
if ((ref[i] == 0.f) || (res[i] == 0.f)) {
const float diff = fabs(res[i] - ref[i]);
ASSERT_TRUE(diff < zero_diff) <<
"\nAbsolute comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", index: " << i << "\n" << assertDetails;
} else {
const float diff = fabs((res[i] - ref[i]) / (std::max)(ref[i], res[i]));
ASSERT_LT(diff, max_diff) <<
"\nRelative comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", max_diff: " << max_diff <<
", index: " << i << "\n" << assertDetails;
}
}
}
void replace(std::string& str, const std::string& from, const std::string& to) {
std::string::size_type pos = 0;
@ -221,13 +166,6 @@ public:
# error Unsupported architecture
#endif
template <typename T,typename S>
std::shared_ptr<InferenceEngine::TBlob<T>> to_tblob(const std::shared_ptr<S> &obj)
{
return std::dynamic_pointer_cast<InferenceEngine::TBlob<T>>(obj);
}
inline InferenceEngine::InputInfo::Ptr getFirstInput(InferenceEngine::ICNNNetwork *pNet)
{
InferenceEngine::InputsDataMap inputs;
@ -236,53 +174,6 @@ inline InferenceEngine::InputInfo::Ptr getFirstInput(InferenceEngine::ICNNNetwor
return inputs.begin()->second;
}
/**
* @brief Copies a 8-bit RGB image to the blob.
*
* Throws an exception in case of dimensions or input size mismatch
*
* @tparam data_t Type of the target blob
* @param RGB8 8-bit RGB image
* @param RGB8_size Size of the image
* @param blob Target blob to write image to
*/
template <typename data_t>
void copyFromRGB8(uint8_t* RGB8, size_t RGB8_size, InferenceEngine::TBlob<data_t>* blob) {
InferenceEngine::SizeVector dims = blob->getTensorDesc().getDims();
if (4 != dims.size())
THROW_IE_EXCEPTION << "Cannot write data to input blob! Blob has incorrect dimensions size " << dims.size();
size_t num_channels = dims[1]; // because RGB
size_t num_images = dims[0];
size_t w = dims[3];
size_t h = dims[2];
size_t nPixels = w * h;
if (RGB8_size != w * h * num_channels * num_images)
THROW_IE_EXCEPTION << "input pixels mismatch, expecting " << w * h * num_channels * num_images
<< " bytes, got: " << RGB8_size;
std::vector<data_t*> dataArray;
for (unsigned int n = 0; n < num_images; n++) {
for (unsigned int i = 0; i < num_channels; i++) {
if (!n && !i && dataArray.empty()) {
dataArray.push_back(blob->data());
} else {
dataArray.push_back(dataArray.at(n * num_channels + i - 1) + nPixels);
}
}
}
for (size_t n = 0; n < num_images; n++) {
size_t n_num_channels = n * num_channels;
size_t n_num_channels_nPixels = n_num_channels * nPixels;
for (size_t i = 0; i < nPixels; i++) {
size_t i_num_channels = i * num_channels + n_num_channels_nPixels;
for (size_t j = 0; j < num_channels; j++) {
dataArray.at(n_num_channels + j)[i] = RGB8[i_num_channels + j];
}
}
}
}
/**
* @brief Splits the RGB channels to either I16 Blob or float blob.
*
@ -292,16 +183,4 @@ void copyFromRGB8(uint8_t* RGB8, size_t RGB8_size, InferenceEngine::TBlob<data_t
* @param lengthbytesSize Size in bytes of the RGB image. It is equal to amount of pixels times 3 (number of channels)
* @param input Blob to contain the split image (to 3 channels)
*/
inline void ConvertImageToInput(unsigned char* imgBufRGB8, size_t lengthbytesSize, InferenceEngine::Blob& input) {
InferenceEngine::TBlob<float>* float_input = dynamic_cast<InferenceEngine::TBlob<float>*>(&input);
if (float_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, float_input);
InferenceEngine::TBlob<short>* short_input = dynamic_cast<InferenceEngine::TBlob<short>*>(&input);
if (short_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, short_input);
InferenceEngine::TBlob<uint8_t>* byte_input = dynamic_cast<InferenceEngine::TBlob<uint8_t>*>(&input);
if (byte_input != nullptr)
copyFromRGB8(imgBufRGB8, lengthbytesSize, byte_input);
}
void ConvertImageToInput(unsigned char* imgBufRGB8, size_t lengthbytesSize, InferenceEngine::Blob& input);

View File

@ -20,10 +20,6 @@ using namespace InferenceEngine;
IE_SUPPRESS_DEPRECATED_START
class TestsCommonFunc {
public:
InferenceEngine::Blob::Ptr readInput(std::string path, int batch = 1);
static CNNLayerPtr getLayer(const ICNNNetwork& network, const std::string& layerName) {
std::vector<CNNLayerPtr> layers = InferenceEngine::details::CNNNetSortTopologically(network);
for (CNNLayerPtr layer : layers) {
@ -34,6 +30,9 @@ public:
return nullptr;
}
public:
InferenceEngine::Blob::Ptr readInput(std::string path, int batch = 1);
static void checkLayerOuputPrecision(
const ICNNNetwork& network,
@ -65,21 +64,6 @@ public:
}
}
static void checkLayerInputPrecision(const ICNNNetwork& network, const std::string& layerName, Precision expectedPrecision, int inputIndex = -1) {
CNNLayerPtr layer = getLayer(network, layerName);
if (layer == nullptr) {
THROW_IE_EXCEPTION << "layer '" << layerName << "' was not found";
}
for (size_t index = 0ul; index < layer->insData.size(); ++index) {
if ((inputIndex != -1) && (index != inputIndex)) {
continue;
}
const DataWeakPtr weakData = layer->insData[index];
ASSERT_EQ(expectedPrecision, weakData.lock()->getPrecision()) << " unexpected precision " << weakData.lock()->getPrecision() << " for layer " << layerName;
}
}
static void checkLayerOuputPrecision(const ICNNNetwork& network, const std::string& layerName, std::vector<Precision> expectedPrecisions) {
CNNLayerPtr layer = getLayer(network, layerName);
if (layer == nullptr) {
@ -94,30 +78,6 @@ public:
}
}
static bool hasBlobEqualsValues(Blob& blob) {
const float* buffer = blob.buffer().as<float*>();
for (int i = 0; i < (blob.size() - 1); ++i) {
if (buffer[i] != buffer[i + 1]) {
return false;
}
}
return true;
}
static bool checkScalesAndShifts(const CNNLayer& scaleShift, const bool equals) {
const Blob::Ptr scalesBlob = InferenceEngine::details::CNNNetworkHelper::getBlob(std::make_shared<CNNLayer>(scaleShift), "weights");
if (equals != hasBlobEqualsValues(*scalesBlob)) {
return false;
}
const Blob::Ptr shiftsBlob = InferenceEngine::details::CNNNetworkHelper::getBlob(std::make_shared<CNNLayer>(scaleShift), "biases");
if (equals != hasBlobEqualsValues(*shiftsBlob)) {
return false;
}
return true;
}
bool compareTop(
InferenceEngine::Blob& blob,
std::vector<std::pair<int, float>> &ref_top,

View File

@ -0,0 +1,120 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "layer_builder.h"
class BaseTestCreator {
protected:
std::string _type;
public:
explicit BaseTestCreator(const std::string& type) : _type(type) {}
virtual ~BaseTestCreator() = default;
virtual InferenceEngine::CNNLayerPtr create(const std::string& type) = 0;
virtual bool shouldCreate(const std::string& type) = 0;
};
template<class LT>
class LayerTestCreator : public BaseTestCreator {
public:
explicit LayerTestCreator(const std::string& type) : BaseTestCreator(type) {}
InferenceEngine::CNNLayerPtr create(const std::string& type) override {
InferenceEngine::LayerParams params;
params.type = type;
return std::make_shared<LT>(params);
}
bool shouldCreate(const std::string& type) override {
return type == _type;
}
};
static std::vector<std::shared_ptr<BaseTestCreator>>& getCreators() {
// there should be unique_ptr but it cant be used with initializer lists
static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("LogSoftMax"),
std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather"),
std::make_shared<LayerTestCreator<InferenceEngine::StridedSliceLayer>>("StridedSlice"),
std::make_shared<LayerTestCreator<InferenceEngine::ShuffleChannelsLayer>>("ShuffleChannels"),
std::make_shared<LayerTestCreator<InferenceEngine::DepthToSpaceLayer>>("DepthToSpace"),
std::make_shared<LayerTestCreator<InferenceEngine::ReverseSequenceLayer>>("ReverseSequence"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Abs"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atan"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atanh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Ceil"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Erf"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Floor"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("HardSigmoid"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Log"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Exp"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Reciprocal"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Selu"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softplus"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softsign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Tan"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceAnd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL1"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL2"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSumExp"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMax"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMean"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMin"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceOr"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceProd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSumSquare"),
std::make_shared<LayerTestCreator<InferenceEngine::TopKLayer>>("TopK"),
std::make_shared<LayerTestCreator<InferenceEngine::NonMaxSuppressionLayer>>("NonMaxSuppression"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterUpdateLayer>>("ScatterUpdate"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterElementsUpdateLayer>>("ScatterElementsUpdate")
};
return creators;
}
InferenceEngine::CNNLayer::Ptr CNNLayerValidationTests::createLayer(const std::string& type) {
for (auto& creator : getCreators()) {
if (!creator->shouldCreate(type))
continue;
return creator->create(type);
}
static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
return genericCreator.create(type);
}

View File

@ -92,13 +92,15 @@ public:
class CNNLayerValidationTests : public testing::TestWithParam<std::string>{
public:
static InferenceEngine::CNNLayer::Ptr createLayer(const std::string &type);
void SetUp() override {
auto params = GetParam();
type = params;
}
std::shared_ptr<LayerBuilder>& createConcreteLayer(const std::string& type) {
layer = std::make_shared<LayerBuilder>(TestsCommon::createLayer(type));
layer = std::make_shared<LayerBuilder>(createLayer(type));
return layer;
}