Removed tests for old IR reader (#1368)

This commit is contained in:
Ilya Lavrenov
2020-07-18 12:42:40 +03:00
committed by GitHub
parent cc19e57a06
commit 2ddf08d14b
17 changed files with 0 additions and 4549 deletions

View File

@@ -7,7 +7,6 @@
#include <map>
#include <functional_test_utils/layer_test_utils.hpp>
#include "common_test_utils/common_layers_params.hpp"
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/test_constants.hpp"

View File

@@ -9,7 +9,6 @@
#include <map>
#include <functional_test_utils/layer_test_utils.hpp>
#include "common_test_utils/common_layers_params.hpp"
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/test_constants.hpp"

View File

@@ -7,7 +7,6 @@
#include <map>
#include <functional_test_utils/layer_test_utils.hpp>
#include "common_test_utils/common_layers_params.hpp"
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/test_constants.hpp"

View File

@@ -17,7 +17,6 @@ file(GLOB
graph_tools/*.cpp
inference_engine_tests/*.cpp
stress_tests/*.cpp
topology_verification_tests/*.cpp
)
if (ENABLE_GNA)

View File

@@ -1,120 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "layer_builder.h"
class BaseTestCreator {
protected:
std::string _type;
public:
explicit BaseTestCreator(const std::string& type) : _type(type) {}
virtual ~BaseTestCreator() = default;
virtual InferenceEngine::CNNLayerPtr create(const std::string& type) = 0;
virtual bool shouldCreate(const std::string& type) = 0;
};
template<class LT>
class LayerTestCreator : public BaseTestCreator {
public:
explicit LayerTestCreator(const std::string& type) : BaseTestCreator(type) {}
InferenceEngine::CNNLayerPtr create(const std::string& type) override {
InferenceEngine::LayerParams params;
params.type = type;
return std::make_shared<LT>(params);
}
bool shouldCreate(const std::string& type) override {
return type == _type;
}
};
static std::vector<std::shared_ptr<BaseTestCreator>>& getCreators() {
// there should be unique_ptr but it cant be used with initializer lists
static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("LogSoftMax"),
std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather"),
std::make_shared<LayerTestCreator<InferenceEngine::StridedSliceLayer>>("StridedSlice"),
std::make_shared<LayerTestCreator<InferenceEngine::ShuffleChannelsLayer>>("ShuffleChannels"),
std::make_shared<LayerTestCreator<InferenceEngine::DepthToSpaceLayer>>("DepthToSpace"),
std::make_shared<LayerTestCreator<InferenceEngine::ReverseSequenceLayer>>("ReverseSequence"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Abs"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Acosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Asinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atan"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Atanh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Ceil"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cos"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Cosh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Erf"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Floor"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("HardSigmoid"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Log"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Exp"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Reciprocal"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Selu"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sin"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Sinh"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softplus"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Softsign"),
std::make_shared<LayerTestCreator<InferenceEngine::MathLayer>>("Tan"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceAnd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL1"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceL2"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceLogSumExp"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMax"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMean"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceMin"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceOr"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceProd"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSum"),
std::make_shared<LayerTestCreator<InferenceEngine::ReduceLayer>>("ReduceSumSquare"),
std::make_shared<LayerTestCreator<InferenceEngine::TopKLayer>>("TopK"),
std::make_shared<LayerTestCreator<InferenceEngine::NonMaxSuppressionLayer>>("NonMaxSuppression"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterUpdateLayer>>("ScatterUpdate"),
std::make_shared<LayerTestCreator<InferenceEngine::ScatterElementsUpdateLayer>>("ScatterElementsUpdate")
};
return creators;
}
InferenceEngine::CNNLayer::Ptr CNNLayerValidationTests::createLayer(const std::string& type) {
for (auto& creator : getCreators()) {
if (!creator->shouldCreate(type))
continue;
return creator->create(type);
}
static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
return genericCreator.create(type);
}

View File

@@ -1,129 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <utility>
#include <gtest/gtest.h>
#include <tests_common.hpp>
#include <memory>
#include "parameters.h"
#include "shapes.h"
using namespace InferenceEngine;
using namespace InferenceEngine::details;
class LayerBuilder {
private:
CNNLayerPtr layer;
std::string dataName = "data";
std::unique_ptr<Parameters> parameters;
public:
explicit LayerBuilder (InferenceEngine::CNNLayer::Ptr createdLayer) : layer(std::move(createdLayer)) {
parameters = std::unique_ptr<Parameters>(new Parameters(layer->type));
}
LayerBuilder& setParams(bool valid) {
if (valid) {
layer->params = parameters->getValidParameters();
} else {
layer->params = parameters->getInvalidParameters();
}
return *this;
}
LayerBuilder& setShapes(std::vector<DataPtr>& spData, bool valid_input) {
CommonTestUtils::InOutShapes shapes;
LayersWithNotEqualIO layersWithNotEqualIO;
LayersWithEqualIO layersWithEqualIO;
LayersWithNIO layersWithNIO;
std::vector<Layers*> layers{&layersWithNotEqualIO, &layersWithEqualIO, &layersWithNIO};
ShapesHelper* shapesHelper = nullptr;
for(const auto& layer : layers) {
if (layer->containLayer(this->layer->type)) {
shapesHelper = layer->factoryShape();
break;
}
}
if (valid_input) {
shapes = shapesHelper->getValidShapes();
} else {
shapes = shapesHelper->getInvalidInputShapes();
}
for (const auto& inData : shapes.inDims) {
DataPtr data = std::make_shared<Data>(dataName,
InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32,
SizeVector(inData.rbegin(), inData.rend()),
TensorDesc::getLayoutByDims(inData)));
spData.push_back(data);
layer->insData.push_back(data);
}
for (const auto& outData : shapes.outDims) {
layer->outData.push_back(std::make_shared<Data>(dataName,
InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32,
SizeVector(outData.rbegin(), outData.rend()),
TensorDesc::getLayoutByDims(outData))));
}
delete shapesHelper;
return *this;
}
CNNLayerPtr get() {
return layer;
}
int getNumOfParams() {
return parameters->getNumOfParameters();
}
int getNumOfLayerVariant() {
LayersWithNotEqualIO layersWithNotEqualIO;
LayersWithEqualIO layersWithEqualIO;
LayersWithNIO layersWithNIO;
Layers* layers[] = {&layersWithNotEqualIO, &layersWithEqualIO, &layersWithNIO};
int cnt = 0;
for(const auto& layer : layers) {
if (layer->containLayer(this->layer->type)) {
cnt++;
}
}
return cnt;
}
};
class CNNLayerValidationTests : public testing::TestWithParam<std::string>{
public:
static InferenceEngine::CNNLayer::Ptr createLayer(const std::string &type);
void SetUp() override {
auto params = GetParam();
type = params;
}
std::shared_ptr<LayerBuilder>& createConcreteLayer(const std::string& type) {
layer = std::make_shared<LayerBuilder>(createLayer(type));
return layer;
}
std::shared_ptr<LayerBuilder>& getBuilder() {
return layer;
}
CNNLayerPtr getLayer() {
return layer.get()->get();
}
int getNumOfParams() {
return layer.get()->getNumOfParams();
}
int getNumOfLayerVariant() {
return layer.get()->getNumOfLayerVariant();
}
protected:
std::string type;
bool valid_params = true;
bool valid_input = true;
std::shared_ptr<LayerBuilder> layer;
};
#define assertThat(type) SCOPED_TRACE("");createConcreteLayer(type)

View File

@@ -1,13 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <cstdint>
#include "mean_image.h"
template <>
const std::vector<short> MeanImage<short>::getValue() { return { 1000, 1001, 1002, 1003, 1004, 1005 }; }
template <>
const std::vector<float> MeanImage<float>::getValue() { return { 10.10f, 11.11f, 12.12f, 13.13f, 14.14f, 15.15f }; }
template <>
const std::vector<uint8_t> MeanImage<uint8_t>::getValue() { return { 10, 11, 12, 13, 14, 15 }; }

View File

@@ -1,12 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <vector>
template <typename T>
struct MeanImage {
const static std::vector<T> getValue();
};

View File

@@ -1,291 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <iostream>
#include <map>
#include <string>
#include <vector>
#include <queue>
#include <random>
enum class ParametersValues {
ZERO,
INT_POSITIVE,
INT_NEGATIVE,
FLOAT_POSITIVE,
FLOAT_NEGATIVE,
STRING
};
enum class ParameterRange {
SET,
SINGLE
};
using GoodBadParams = std::pair<std::vector<ParametersValues>, std::vector<ParametersValues>>;
using Params = std::map<std::string, std::pair<ParameterRange, GoodBadParams>>;
Params operator + (const Params& val1, const Params& val2) {
Params result;
result.insert(val1.begin(), val1.end());
result.insert(val2.begin(), val2.end());
return result;
}
class Parameters {
private:
// Common for Convolution, Deconvolution, Pooling layers
Params common {
// Parameter name, range, type of good values, type of bad
{"stride-x", {ParameterRange::SINGLE,
{{ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"stride-y", {ParameterRange::SINGLE,
{{ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"kernel-x", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"kernel-y", {ParameterRange::SINGLE, {{ParametersValues::ZERO, ParametersValues::INT_POSITIVE}, {ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"pad-x", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"pad-y", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}}
};
Params conv {
// Parameter name, range, type of good values, type of bad
{"dilation-x", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"dilation-y", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"output", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"group", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
};
Params pooling {
// Parameter name, range, type of good values, type of bad
{"pool-method", {ParameterRange::SINGLE,
{{ParametersValues::STRING},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"exclude-pad", {ParameterRange::SINGLE,
{{ParametersValues::STRING},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}}
};
Params detectionOutput {
// Parameter name, range, type of good values, type of bad
{"num_classes", {ParameterRange::SINGLE,
{{ParametersValues::INT_POSITIVE},
{ParametersValues::ZERO, ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"background_label_id", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"top_k", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"variance_encoded_in_target", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"keep_top_k", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"num_orient_classes", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"code_type", {ParameterRange::SINGLE,
{{ParametersValues::STRING},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"share_location", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"interpolate_orientation", {ParameterRange::SINGLE,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::STRING}}}},
{"nms_threshold", {ParameterRange::SINGLE,
{{ParametersValues::FLOAT_POSITIVE},
{ParametersValues::FLOAT_NEGATIVE, ParametersValues::STRING}}}},
{"confidence_threshold", {ParameterRange::SINGLE,
{{ParametersValues::FLOAT_POSITIVE},
{ParametersValues::FLOAT_NEGATIVE, ParametersValues::STRING}}}}
};
Params crop {
{"axis", {ParameterRange::SET,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"offset", {ParameterRange::SET,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"dim", {ParameterRange::SET,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"crop_begin", {ParameterRange::SET,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"crop_end", {ParameterRange::SET,
{{ParametersValues::ZERO, ParametersValues::INT_POSITIVE},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
};
Params interp {
{"height", {ParameterRange::SINGLE,
{{ParametersValues::INT_POSITIVE, ParametersValues::ZERO},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
{"factor", {ParameterRange::SINGLE,
{{ParametersValues::FLOAT_POSITIVE},
{ParametersValues::ZERO, ParametersValues::FLOAT_NEGATIVE, ParametersValues::STRING}}}},
{"shrink_factor", {ParameterRange::SINGLE,
{{ParametersValues::FLOAT_POSITIVE},
{ParametersValues::ZERO, ParametersValues::FLOAT_NEGATIVE, ParametersValues::STRING}}}},
{"zoom_factor", {ParameterRange::SINGLE,
{{ParametersValues::FLOAT_POSITIVE},
{ParametersValues::ZERO, ParametersValues::FLOAT_NEGATIVE, ParametersValues::STRING}}}},
{"width", {ParameterRange::SINGLE,
{{ParametersValues::INT_POSITIVE, ParametersValues::ZERO},
{ParametersValues::INT_NEGATIVE, ParametersValues::STRING}}}},
};
std::map<std::string, std::map<std::string, std::vector<std::string>>> stringParams {
{"Eltwise", {{"operation", {"sum", "max", "mul"}}}},
{"LRN", {{"region", {"across", "same"}}}},
{"Activation", {{"type", {"sigmoid", "tanh", "elu", "relu6"}}}},
{"Pooling", {{"pool_method", {"max", "avg"}}, {"exlude_pad", {"true", "false"}}}},
{"Resample", {{"type", {"caffe.ResampleParameter.LINEAR", "caffe.ResampleParameter.CUBIC",
"caffe.ResampleParameter.NEAREST"}}}},
{"DetectionOutput", {{"code_type", {"caffe.PriorBoxParameter.CENTER_SIZE", "caffe.PriorBoxParameter.CORNER"}}}}
};
std::map<std::string, Params> layerParamsNames {
// Layer name, Corresponding params names
{"Convolution", common + conv},
{"Deconvolution", common + conv},
{"Pooling", common + pooling},
{"DetectionOutput", detectionOutput},
{"Crop", crop},
{"Interp", interp}
};
const int zero = 0;
std::string type;
std::mt19937 gen;
std::uniform_int_distribution<int> distIntPositive;
std::uniform_int_distribution<int> distIntNegative;
std::uniform_real_distribution<float> distFloatNegative;
std::uniform_real_distribution<float> distFloatPositive;
std::queue<std::string> paramWasInvalid;
public:
Parameters() {}
Parameters(const std::string& type) : gen(static_cast<unsigned long>(std::chrono::system_clock::now().time_since_epoch().count())),
distIntPositive(1, 100),
distIntNegative(-100, -1),
distFloatNegative(-10.0, -0.1),
distFloatPositive(0.1, 10.0) {
this->type = type;
Params param = getParametersByLayerName();
for (auto iter : param) {
paramWasInvalid.push(iter.first);
}
}
Params getParametersByLayerName() {
return layerParamsNames[type];
}
std::vector<std::string> getDifferentParamValues(const std::vector<ParametersValues>& valuesType,
const std::vector<std::string>& stringValues) {
int magicNumber = 10;
std::vector<std::string> paramsValues = {};
for (auto i : valuesType) {
switch(i) {
case ParametersValues::ZERO: {
paramsValues.push_back("0");
break;
}
case ParametersValues::INT_POSITIVE: {
for (int j = 0; j < magicNumber; ++j) {
paramsValues.push_back(std::to_string(distIntPositive(gen)));
}
break;
}
case ParametersValues::INT_NEGATIVE: {
for (int j = 0; j < magicNumber; ++j) {
paramsValues.push_back(std::to_string(distIntNegative(gen)));
}
break;
}
case ParametersValues::FLOAT_POSITIVE: {
for (int j = 0; j < magicNumber; ++j) {
paramsValues.push_back(to_string_c_locale(distFloatPositive(gen)));
}
break;
}
case ParametersValues::FLOAT_NEGATIVE: {
for (int j = 0; j < magicNumber; ++j) {
paramsValues.push_back(to_string_c_locale(distFloatNegative(gen)));
}
break;
}
case ParametersValues::STRING: {
paramsValues.insert(paramsValues.begin(), stringValues.begin(), stringValues.end());
break;
}
}
}
return paramsValues;
}
std::map<std::string, std::string> getValidParameters() {
Params param = getParametersByLayerName();
std::map<std::string, std::string> params;
for (auto i : param) {
params[i.first] = getCorrectParamValue(i.second, i.first);
}
return params;
}
std::string getCorrectParamValue(const std::pair<ParameterRange, GoodBadParams>& values,
const std::string& paramName) {
std::string parameter = "";
ParameterRange howMany = values.first;
std::vector<ParametersValues> valuesType = values.second.first;
std::vector<std::string> paramsValues = getDifferentParamValues(valuesType, stringParams[type][paramName]);
std::uniform_int_distribution<int> indexesDist(0, static_cast<int>(paramsValues.size() - 1));
if (howMany == ParameterRange::SINGLE) {
int index = indexesDist(gen);
parameter = paramsValues[index];
} else {
int numOfDigits = indexesDist(gen);
for (int i = 0; i < numOfDigits; i++) {
parameter += paramsValues[i] + ", ";
}
}
return parameter;
}
std::string getIncorrectParamValue(const std::pair<ParameterRange, GoodBadParams>& values) {
std::string parameter = "";
std::vector<ParametersValues> valuesType = values.second.second;
std::vector<std::string> paramsValues = getDifferentParamValues(valuesType, {"foo", "bar"});
std::uniform_int_distribution<int> indexesDist(0, static_cast<int>(paramsValues.size() - 1));
int index = indexesDist(gen);
parameter = paramsValues[index];
return parameter;
}
std::map<std::string, std::string> getInvalidParameters() {
std::map<std::string, std::string> params = getValidParameters();
std::string paramName = paramWasInvalid.front();
paramWasInvalid.pop();
params[paramName] = getIncorrectParamValue(layerParamsNames[type][paramName]);
return params;
}
int getNumOfParameters() {
return static_cast<int>(layerParamsNames[type].size());
}
};

View File

@@ -1,389 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <fstream>
#include <gtest/gtest.h>
#include "cnn_network_impl.hpp"
#include <tests_common.hpp>
#include "ie_format_parser.h"
#include "ie_blob_proxy.hpp"
#include <string>
#include "pugixml.hpp"
#include "xml_parse_utils.h"
#include "mean_image.h"
#include "common_test_utils/xml_net_builder/xml_father.hpp"
class FormatParserTest : public TestsCommon {
public:
FormatParserTest() {
}
protected:
const char kPathSeparator =
#if defined _WIN32 || defined __CYGWIN__
'\\';
#else
'/';
#endif
const std::string parentDir = std::string("..") + std::to_string(FormatParserTest::kPathSeparator);
std::string getParentDir(std::string currentFile) const {
return parentDir + currentFile;
}
protected:
InferenceEngine::details::CNNNetworkImplPtr net;
ModelsPath _path_to_models;
InferenceEngine::InputInfo::Ptr getFirstInput() const {
return ::getFirstInput(net.get());
}
template<class LayerType>
std::shared_ptr<LayerType> getLayer(const std::string &name) const {
InferenceEngine::CNNLayerPtr ptr;
net->getLayerByName(name.c_str(), ptr, nullptr);
return std::dynamic_pointer_cast<LayerType>(ptr);
}
virtual void SetUp() {
_path_to_models += kPathSeparator;
}
void assertParseFail(const std::string &fileContent) {
try {
parse(fileContent);
FAIL() << "Parser didn't throw";
} catch (const std::exception &ex) {
SUCCEED() << ex.what();
}
}
void assertParseSucceed(const std::string &fileContent) {
ASSERT_NO_THROW(parse(fileContent));
}
void assertSetWeightsFail(const InferenceEngine::TBlob<uint8_t>::Ptr &binBlob) {
try {
parser->SetWeights(binBlob);
FAIL() << "Parser didn't throw";
} catch (const std::exception &ex) {
SUCCEED() << ex.what();
}
}
void assertSetWeightsSucceed(const InferenceEngine::TBlob<uint8_t>::Ptr &binBlob) {
ASSERT_NO_THROW(parser->SetWeights(binBlob));
}
void parse(const std::string &fileContent) {
// check which version it is...
pugi::xml_document xmlDoc;
auto res = xmlDoc.load_string(fileContent.c_str());
EXPECT_EQ(pugi::status_ok, res.status) << res.description() << " at offset " << res.offset;
pugi::xml_node root = xmlDoc.document_element();
int version = XMLParseUtils::GetIntAttr(root, "version", 2);
if (version < 2) THROW_IE_EXCEPTION << "Deprecated IR's versions: " << version;
if (version > 3) THROW_IE_EXCEPTION << "cannot parse future versions: " << version;
parser.reset(new InferenceEngine::details::FormatParser(version));
net = parser->Parse(root);
}
#define initlayerIn(name, id, portid) \
node("layer").attr("type", "Power").attr("name", name).attr("id", id)\
.node("power_data").attr("power", 1).attr("scale", 1).attr("shift", 0).close()\
.node("input")\
.node("port").attr("id", portid)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initlayerInV2(name, id, portid) \
node("layer").attr("type", "Power").attr("name", name).attr("id", id)\
.node("power_data").attr("power", 1).attr("scale", 1).attr("shift", 0).close()\
.node("input")\
.node("port").attr("id", portid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initInputlayer(name, id, portid) \
node("layer").attr("type", "Input").attr("name", name).attr("id", id)\
.node("output")\
.node("port").attr("id", portid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initInputlayer5D(name, id, portid) \
node("layer").attr("type", "Input").attr("name", name).attr("id", id)\
.node("output")\
.node("port").attr("id", portid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_DEPTH)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initPowerlayerInOutV2(name, id, portid, outputid) \
node("layer").attr("type", "Power").attr("name", name).attr("id", id)\
.node("power_data").attr("power", 1).attr("scale", 1).attr("shift", 0).close()\
.node("input")\
.node("port").attr("id", portid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.node("output")\
.node("port").attr("id", outputid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initPowerlayerInOut(name, id, portid, outputid) \
node("layer").attr("type", "Power").attr("name", name).attr("id", id)\
.node("power_data").attr("power", 1).attr("scale", 1).attr("shift", 0).close()\
.node("input")\
.node("port").attr("id", portid)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.node("output")\
.node("port").attr("id", outputid)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.close()
#define initlayerInOut(name, type, id, portid, outputid) \
node("layer").attr("type", type).attr("name", name).attr("id", id)\
.node("input")\
.node("port").attr("id", portid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.node("output")\
.node("port").attr("id", outputid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
#define initConv5DlayerInOut(name, id, group, output, kernel, pads_begin, pads_end, strides, dilations, inputid, outputid) \
node("layer").attr("type", "Convolution").attr("name", name).attr("id", id)\
.node("data").attr("group", group).attr("output", output).attr("kernel", kernel).attr("pads_begin", pads_begin).attr("pads_end", pads_end).attr("strides", strides).attr("dilations", dilations).close()\
.node("input")\
.node("port").attr("id", inputid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_DEPTH)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
.node("output")\
.node("port").attr("id", outputid)\
.node("dim", MT_BATCH)\
.node("dim", MT_CHANNELS)\
.node("dim", MT_DEPTH)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.close()\
.close()\
#define initedge(fl, fp, tl, tp)\
node("edge").attr("from-layer", fl).attr("from-port", fp).attr("to-layer", tl).attr("to-port", tp).close()
/*
auto net(const string & name1) const -> decltype(XMLFather().node("net").attr("name", name1).initlayers().initedges()) {
return XMLFather().node("net").attr("name", name1).initlayers().initedges();
}*/
#define MAKE_ALEXNET_FOR_MEAN_TESTS()\
xml().node("net").attr("name", "AlexNet")\
.node("input").attr("name", "data")\
.node("dim", MT_CHANNELS)\
.node("dim", MT_HEIGHT)\
.node("dim", MT_WIDTH)\
.newnode("layers")\
.initPowerlayerInOut("power", 0, 0, 1)\
.initlayerIn("power", 1, 0)\
.newnode("edges")\
.initedge(0,1,1,0)\
.newnode("pre-process")
#define MAKE_ALEXNET_FOR_MEAN_TESTS_V2()\
xml().node("net").attr("name", "AlexNet").attr("version", 2)\
.node("layers")\
.initInputlayer("data", 0, 0)\
.initPowerlayerInOutV2("power1", 1, 1, 2)\
.initlayerInV2("power2", 2, 3)\
.newnode("edges")\
.initedge(0,0,1,1)\
.initedge(1,2,2,3)\
.newnode("pre-process")
#define BEGIN_NET()\
_BEGIN_NET(2)
#define BEGIN_NET_V3()\
_BEGIN_NET(3)
#define BEGIN_NET_V2()\
_BEGIN_NET(2)
#define _BEGIN_NET(x)\
xml().node("net").attr("name", "AlexNet").attr("version", x)\
.node("layers")\
.initInputlayer("data", 0, 0)\
#define END_NET()\
.newnode("edges")\
.initedge(0,0,1,1)\
.close()
template<class T>
InferenceEngine::TBlob<uint8_t>::Ptr makeBinBlobForMeanTest() {
typename InferenceEngine::TBlob<T>::Ptr binBlobFloat(
new InferenceEngine::TBlob<T>({InferenceEngine::Precision::FP32,
{MT_HEIGHT, MT_WIDTH, MT_CHANNELS}, InferenceEngine::CHW}));
binBlobFloat->allocate();
std::vector<T> meanValues = MeanImage<T>::getValue();
std::copy(meanValues.begin(), meanValues.end(), (T *) binBlobFloat->data());
InferenceEngine::SizeVector dims_dst = {MT_HEIGHT, MT_WIDTH * sizeof(T), MT_CHANNELS};
typename InferenceEngine::TBlobProxy<uint8_t>::Ptr binBlob(new InferenceEngine::TBlobProxy<uint8_t>(
InferenceEngine::Precision::FP32, InferenceEngine::CHW, binBlobFloat, 0, dims_dst));
return binBlob;
}
template<class T>
void assertMeanImagePerChannelCorrect() {
std::vector<T> meanImage = MeanImage<T>::getValue();
auto &pp = getFirstInput()->getPreProcess();
ASSERT_EQ(MT_CHANNELS, pp.getNumberOfChannels());
for (unsigned channel = 0, globalPixel = 0; channel < MT_CHANNELS; channel++) {
auto actualMeanChannel = std::dynamic_pointer_cast<InferenceEngine::TBlob<T> >(pp[channel]->meanData);
ASSERT_EQ(MT_HEIGHT * MT_WIDTH, actualMeanChannel->size());
for (unsigned pixel = 0; pixel < actualMeanChannel->size(); pixel++, globalPixel++) {
ASSERT_FLOAT_EQ(meanImage[globalPixel], actualMeanChannel->readOnly()[pixel]);
}
}
}
template<class T>
void assertMeanImageCorrect() {
std::vector<T> meanImage = MeanImage<T>::getValue();
auto &pp = getFirstInput()->getPreProcess();
ASSERT_EQ(MT_CHANNELS, pp.getNumberOfChannels());
for (size_t c = 0; c < pp.getNumberOfChannels(); c++) {
auto actualMeanTBlob = std::dynamic_pointer_cast<InferenceEngine::TBlob<T> >(pp[c]->meanData);
ASSERT_EQ(MT_WIDTH, actualMeanTBlob->getTensorDesc().getDims().back());
ASSERT_EQ(MT_HEIGHT,
actualMeanTBlob->getTensorDesc().getDims()[actualMeanTBlob->getTensorDesc().getDims().size() -
2]);
ASSERT_EQ(MT_WIDTH * MT_HEIGHT, actualMeanTBlob->size());
for (unsigned index = 0; index < actualMeanTBlob->size(); index++) {
ASSERT_FLOAT_EQ(meanImage[index + c * MT_WIDTH * MT_HEIGHT], actualMeanTBlob->readOnly()[index]);
}
}
}
CommonTestUtils::XMLFather xml() {
return CommonTestUtils::XMLFather();
}
std::shared_ptr<InferenceEngine::details::FormatParser> parser;
public:
int getXmlVersion(pugi::xml_node &root) {
if (!root.child("InputData").empty()) return 2;
return 1;
}
std::string getXmlPath(const std::string &filePath) {
std::string xmlPath = filePath;
const auto openFlags = std::ios_base::ate | std::ios_base::binary;
std::ifstream fp(xmlPath, openFlags);
//TODO: Dueto multi directory build systems, and single directory build system
//, it is usualy a problem to deal with relative paths.
if (!fp.is_open()) {
fp.open(getParentDir(xmlPath), openFlags);
EXPECT_TRUE(fp.is_open())
<< "cannot open file " << xmlPath << " or " << getParentDir(xmlPath);
fp.close();
xmlPath = getParentDir(xmlPath);
}
return xmlPath;
}
std::string readFileContent(const std::string &filePath) {
const auto openFlags = std::ios_base::ate | std::ios_base::binary;
std::ifstream fp(getXmlPath(filePath), openFlags);
EXPECT_TRUE(fp.is_open()) << "Cannot open file: " << filePath;
if (!fp.is_open())
return std::string();
std::streamsize size = fp.tellg();
EXPECT_GE(size, 1) << "file is empty: " << filePath;
if (size == 0)
return std::string();
std::string str;
str.reserve((size_t) size);
fp.seekg(0, std::ios::beg);
str.assign((std::istreambuf_iterator<char>(fp)),
std::istreambuf_iterator<char>());
return str;
}
};

View File

@@ -1,245 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#ifndef SHAPES_H
#define SHAPES_H
#include <iostream>
#include <map>
#include <string>
#include <vector>
#include <random>
#include <chrono>
#include "common_test_utils/xml_net_builder/xml_net_builder.hpp"
using namespace testing;
struct Maps {
std::map<std::string, int> mapOfEqualShapes{
// Layer name, Correct num of input, Correct num of output
{"Convolution", 1},
{"Deconvolution", 1},
{"Crop", 1},
{"Interp", 1}
};
std::map<std::string, std::pair<int, int>> mapOfUnequalShapes{
// Layer name, Correct num of input, Correct num of output
{"Convolution", {3, 1}},
{"Deconvolution", {3, 1}},
{"Crop", {2, 1}},
{"DetectionOutput", {3, 1}},
{"Interp", {2, 1}}
};
std::map<std::string, std::pair<std::string, std::string>> mapOfContinuousShapes{
// Layer name, Correct num of input, Correct num of output
{"Slice", {"1", "N"}},
{"Eltwise", {"N", "1"}}
};
} maps;
class ShapesHelper {
protected:
std::string type;
public:
ShapesHelper() = default;
explicit ShapesHelper(std::string &type) {
this->type = type;
}
std::string getType() { return type; }
virtual CommonTestUtils::InOutShapes getValidShapes() = 0;
virtual CommonTestUtils::InOutShapes getInvalidInputShapes() = 0;
std::vector<std::vector<size_t>> generateShapes(const int &numOfShapes) {
std::mt19937 gen(
static_cast<unsigned long>(std::chrono::high_resolution_clock::now().time_since_epoch().count()));
std::uniform_int_distribution<unsigned long> dist(1, 256);
std::vector<std::vector<size_t>> shape;
shape.reserve(static_cast<unsigned long>(numOfShapes));
for (int i = 0; i < numOfShapes; ++i) {
shape.push_back({dist(gen), dist(gen), dist(gen), 7});
}
return shape;
}
virtual ~ShapesHelper() = default;
};
class EqualIOShapesHelper : public ShapesHelper {
public:
explicit EqualIOShapesHelper(std::string &type) : ShapesHelper(type) {};
CommonTestUtils::InOutShapes getValidShapes() override {
int numOfInput = {maps.mapOfEqualShapes[type]};
int numOfOutput = {maps.mapOfEqualShapes[type]};
std::vector<std::vector<size_t>> inputs = generateShapes(numOfInput);
std::vector<std::vector<size_t>> outputs = generateShapes(numOfOutput);
return {inputs, outputs};
}
CommonTestUtils::InOutShapes getInvalidInputShapes() override {
int numOfOutput = maps.mapOfEqualShapes[type];
// magic number: numOfInput has to be invalid
int numOfInput = maps.mapOfEqualShapes[type] + 99;
std::vector<std::vector<size_t>> inputs = generateShapes(numOfInput);
std::vector<std::vector<size_t>> outputs = generateShapes(numOfOutput);
return {inputs, outputs};
}
~EqualIOShapesHelper() override = default;
};
class NotEqualConcreteIOShapesHelper : public ShapesHelper {
public:
explicit NotEqualConcreteIOShapesHelper(std::string &type) : ShapesHelper(type) {};
CommonTestUtils::InOutShapes getValidShapes() override {
int numOfInput = maps.mapOfUnequalShapes[type].first;
int numOfOutput = maps.mapOfUnequalShapes[type].second;
std::vector<std::vector<size_t>> inputs = generateShapes(numOfInput);
std::vector<std::vector<size_t>> outputs = generateShapes(numOfOutput);
return {inputs, outputs};
}
CommonTestUtils::InOutShapes getInvalidInputShapes() override {
int numOfOutput = maps.mapOfUnequalShapes[type].second;
int numOfInput = maps.mapOfUnequalShapes[type].first + numOfOutput;
std::vector<std::vector<size_t>> inputs = generateShapes(numOfInput);
std::vector<std::vector<size_t>> outputs = generateShapes(numOfOutput);
return {inputs, outputs};
}
~NotEqualConcreteIOShapesHelper() override = default;
};
class NotEqualIOShapesHelper : public ShapesHelper {
private:
bool is_number(const std::string &s) {
return !s.empty() && std::find_if(s.begin(),
s.end(), [](char c) { return !std::isdigit(c); }) == s.end();
}
public:
explicit NotEqualIOShapesHelper(std::string &type) : ShapesHelper(type) {};
CommonTestUtils::InOutShapes getValidShapes() override {
int numOfInput;
int numOfOutput;
std::vector<std::vector<size_t>> inputs;
std::vector<std::vector<size_t>> outputs;
if (is_number(maps.mapOfContinuousShapes[type].first)) {
numOfInput = std::stoi(maps.mapOfContinuousShapes[type].first);
inputs = generateShapes(numOfInput);
outputs = generateShapes(100);
} else {
numOfOutput = std::stoi(maps.mapOfContinuousShapes[type].second);
outputs = generateShapes(numOfOutput);
inputs = generateShapes(100);
}
return {inputs, outputs};
}
CommonTestUtils::InOutShapes getInvalidInputShapes() override {
int numOfInput;
int numOfOutput;
std::vector<std::vector<size_t>> inputs;
std::vector<std::vector<size_t>> outputs;
if (is_number(maps.mapOfContinuousShapes[type].first)) {
numOfInput = std::stoi(maps.mapOfContinuousShapes[type].first) * 2;
inputs = generateShapes(numOfInput);
outputs = generateShapes(100);
} else {
numOfOutput = std::stoi(maps.mapOfContinuousShapes[type].second);
outputs = generateShapes(numOfOutput);
inputs = generateShapes(100);
}
return {inputs, outputs};
}
~NotEqualIOShapesHelper() override = default;
};
class Layers {
public:
virtual bool containLayer(std::string concrete_layer) = 0;
virtual ShapesHelper *factoryShape() = 0;
virtual ~Layers() = default;
};
class LayersWithEqualIO : public Layers {
private:
std::string layer = "";
public:
bool containLayer(std::string concrete_layer) override {
for (const auto &layer : maps.mapOfEqualShapes) {
if (concrete_layer == layer.first) {
this->layer = concrete_layer;
return true;
}
}
return false;
}
ShapesHelper *factoryShape() override {
return new EqualIOShapesHelper(this->layer);
}
~LayersWithEqualIO() override = default;
};
class LayersWithNotEqualIO : public Layers {
private:
std::string layer = "";
public:
bool containLayer(std::string concrete_layer) override {
for (const auto &layer : maps.mapOfUnequalShapes) {
if (concrete_layer == layer.first) {
this->layer = concrete_layer;
return true;
}
}
return false;
}
ShapesHelper *factoryShape() override {
return new NotEqualConcreteIOShapesHelper(this->layer);
}
~LayersWithNotEqualIO() override = default;
};
class LayersWithNIO : public Layers {
private:
std::string layer = "";
public:
bool containLayer(std::string concrete_layer) override {
for (const auto &layer : maps.mapOfContinuousShapes) {
if (concrete_layer == layer.first) {
this->layer = concrete_layer;
return true;
}
}
return false;
}
ShapesHelper *factoryShape() override {
return new NotEqualIOShapesHelper(this->layer);
}
~LayersWithNIO() override = default;
};
#endif // SHAPES_H

View File

@@ -1,657 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <cpp/ie_cnn_network.h>
#include <gtest/gtest.h>
#include <string>
#include "mean_image.h"
#define MT_BATCH 1
#define MT_CHANNELS 3
#define MT_HEIGHT 1
#define MT_WIDTH 2
#define LAYER_COUNT 3
#include "parser_tests_base.hpp"
#include "common_test_utils/xml_net_builder/xml_father.hpp"
using namespace std;
using namespace InferenceEngine;
using namespace InferenceEngine::details;
class V2FormatParserTest : public FormatParserTest {
};
TEST_F (V2FormatParserTest, invalidXml_ShouldThrow) {
string content = CommonTestUtils::XMLFather()
.node("net")
.attr("name", "AlexNet").attr("version", 2);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
ASSERT_THROW(parse(content), std::exception);
}
TEST_F (V2FormatParserTest, canParseDims) {
// <input name="data"><dim>10</dim><dim>3</dim><dim>227</dim><dim>227</dim></input>
string content = xml().node("net").attr("name", "AlexNet").attr("version", 2)
.node("layers")
.initInputlayer("data", 0, 0)
.initPowerlayerInOutV2("power1", 1, 1, 2)
.initlayerInV2("power2", 2, 3)
.newnode("edges")
.initedge(0, 0, 1, 1)
.initedge(1, 2, 2, 3);
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
}
TEST_F(V2FormatParserTest, failWhenNoDims) {
string content = xml().node("net").attr("name", "AlexNet").attr("version", 2)
.node("layer").attr("type", "Input").attr("name", "data").attr("id", 0);
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failOnZeroDim) {
string content = xml().node("net").attr("name", "AlexNet").attr("version", 2)
.node("layer").attr("type", "Input").attr("name", "data").attr("id", 0)
.node("output")
.node("port").attr("id", 0)
.node("dim", 0)
.close()
.close()
.close()
.close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, canParseMeanImageValues) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "104.5").close()
.newnode("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean").attr("value", "123");
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_VALUE);
ASSERT_EQ(MT_CHANNELS, pp.getNumberOfChannels());
InferenceEngine::PreProcessChannel::Ptr preProcessChannel;
ASSERT_FLOAT_EQ(104.5f, pp[0]->meanValue);
ASSERT_FLOAT_EQ(117.8f, pp[1]->meanValue);
ASSERT_FLOAT_EQ(123.f, pp[2]->meanValue);
ASSERT_EQ(nullptr, pp[0]->meanData);
ASSERT_EQ(nullptr, pp[1]->meanData);
ASSERT_EQ(nullptr, pp[2]->meanData);
}
TEST_F(V2FormatParserTest, canParseScaleValuesOnly) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("scale").attr("value", "104.5").close()
.newnode("channel").attr("id", "1").node("scale").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("scale").attr("value", "123");
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), NONE);
}
TEST_F(V2FormatParserTest, failIfOneOfMeanImageIdsMissed) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean").attr("value", "123");
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfValueAttributeIsNotValid) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value1", "104.5").close()
.newnode("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean").attr("value", "123");
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfMeanValueIsNotSpecified) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfMeanValueNotSpecifiedInPreProcessing) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "104.5").close()
.newnode("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean1").attr("value", "123");
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfIdLessThanZero) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "-1").node("mean").attr("value", "104.5").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfIdNotInteger) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "104.5").close()
.newnode("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2_2").node("mean").attr("value", "123").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfValueNotFloat) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "104,5").close()
.newnode("channel").attr("id", "1").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean").attr("value", "123").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfIdMoreThanNumChannels) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "4").node("mean").attr("value", "104.5").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfIdIsDuplicated) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("value", "104.5").close()
.newnode("channel").attr("id", "0").node("mean").attr("value", "117.8").close()
.newnode("channel").attr("id", "2").node("mean").attr("value", "123").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failParseMeanImageWithoutSpecifyingPrecision) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").node("mean").attr("offset", "0").attr("size", "5").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfOneOfMeanImageIfMeanNotSpecified) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.node("channel").attr("id", "0").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), NONE);
}
TEST_F(V2FormatParserTest, failIfOffsetValueMissing) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean").attr("offset", "").attr("size", "5").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfSizeValueMissing) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean").attr("offset", "1").attr("size", "").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, throwsIfSizeOfMeanElementsMismatchWithExpected) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "2").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "2").attr("size", "2").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "4").attr("size", "2").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, canHandleQ78MeanValues) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "Q78")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "4").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "4").attr("size", "4").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "8").attr("size", "4").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<short>();
assertSetWeightsSucceed(binBlob);
}
TEST_F(V2FormatParserTest, canParseBinFileWithMeanImageUINT8Values) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "U8")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "2").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "2").attr("size", "2").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "4").attr("size", "2").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<uint8_t>();
assertSetWeightsSucceed(binBlob);
assertMeanImagePerChannelCorrect<uint8_t>();
assertMeanImageCorrect<uint8_t>();
}
TEST_F(V2FormatParserTest, canParseBinFileWithMeanImageI16Values) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "I16")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "4").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "4").attr("size", "4").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "8").attr("size", "4").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<short>();
assertSetWeightsSucceed(binBlob);
assertMeanImagePerChannelCorrect<short>();
assertMeanImageCorrect<short>();
}
TEST_F(V2FormatParserTest, canParseBinFileWithMeanImageFloatValues) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "8").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<float>();
assertSetWeightsSucceed(binBlob);
assertMeanImagePerChannelCorrect<float>();
assertMeanImageCorrect<float>();
}
TEST_F(V2FormatParserTest, throwIfSizeDoesNotMatchExpectedMeanSize) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "9").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, failIfSizeExceedBinaryFileSize) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "8").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "1600").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<float>();
assertSetWeightsFail(binBlob);
}
TEST_F(V2FormatParserTest, failIfMixedAttributesAreSet) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("value", "0").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, parseSucceedIfMixedButAllValuesSet) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2().attr("mean-precision", "FP32")
.node("channel").attr("id", "0").node("mean")
.attr("value", "0").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("value", "0").attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("value", "0").attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_VALUE);
}
TEST_F(V2FormatParserTest, parseTileLayer) {
string content = BEGIN_NET()
.initlayerInOut("tile", "Tile", 1, 1, 2)
.node("data").attr("axis",3).attr("tiles", 88).close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto lp = getLayer<TileLayer>("tile");
ASSERT_TRUE(lp);
ASSERT_EQ(lp->axis, 3);
ASSERT_EQ(lp->tiles, 88);
}
TEST_F(V2FormatParserTest, checkPreProcessWithRefName) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.attr("mean-precision", "FP32")
.attr("reference-layer-name", "data")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "8").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
auto & pp = getFirstInput()->getPreProcess();
ASSERT_EQ(pp.getMeanVariant(), MEAN_IMAGE);
auto binBlob = makeBinBlobForMeanTest<float>();
assertSetWeightsSucceed(binBlob);
assertMeanImagePerChannelCorrect<float>();
assertMeanImageCorrect<float>();
}
TEST_F(V2FormatParserTest, failWhenPreProcessNameMissing) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2()
.attr("mean-precision", "FP32")
.attr("reference-layer-name", "foo")
.node("channel").attr("id", "0").node("mean")
.attr("offset", "0").attr("size", "8").close()
.newnode("channel").attr("id", "1").node("mean")
.attr("offset", "8").attr("size", "8").close()
.newnode("channel").attr("id", "2").node("mean")
.attr("offset", "16").attr("size", "8").close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, cannotParseUnknownEltwiseOperation) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "unknown").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, asserOnUnknownEltwiseOperation) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "unknown").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, canParseEmptyElementwiseNodeAsSum) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Sum);
}
TEST_F(V2FormatParserTest, canParseEmptyElementwiseNodeAsSumAmazonIR) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("elementwise_data").attr("operation", "").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Sum);
}
TEST_F(V2FormatParserTest, canParseMissedElementwiseOperationNodeAsSum) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Sum);
}
TEST_F(V2FormatParserTest, canParseMissedElementwiseDataNodeAsSum) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Sum);
}
TEST_F(V2FormatParserTest, canParseProdInElementwiseNode) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "prod").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Prod);
}
TEST_F(V2FormatParserTest, canParseMulInElementwiseNode) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "mul").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Prod);
}
TEST_F(V2FormatParserTest, canParseSumInElementwiseNode) {
string content = BEGIN_NET()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "sum").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Sum);
}
TEST_F(V2FormatParserTest, parsesNumberOfLayersCorrectly) {
string content = MAKE_ALEXNET_FOR_MEAN_TESTS_V2();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNNetwork network(net);
ASSERT_EQ(network.layerCount(), LAYER_COUNT);
}
TEST_F(V2FormatParserTest, canThrowExceptionIfNoType) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data1").attr("type", "tanH").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, canThrowExceptionIfMultipleTypes) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "tanH").attr("type", "sigmoid").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsTanH) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "tanH").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr tanh;
ASSERT_EQ(OK, net->getLayerByName("a", tanh, nullptr));
ASSERT_STREQ(tanh->type.c_str(), "tanh");
ASSERT_EQ(tanh->params.find("type"), tanh->params.end());
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsELU) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "elu").attr("alpha", "0.1").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr elu;
ASSERT_EQ(OK, net->getLayerByName("a", elu, nullptr));
ASSERT_STREQ(elu->type.c_str(), "elu");
ASSERT_FLOAT_EQ(elu->GetParamAsFloat("alpha"), 0.1);
ASSERT_EQ(elu->params.find("type"), elu->params.end());
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsRelu) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "relu").attr("negative_slope", "0.1").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr relu;
ASSERT_EQ(OK, net->getLayerByName("a", relu, nullptr));
ASSERT_STREQ(relu->type.c_str(), "relu");
auto *reluLayer = dynamic_cast<ReLULayer *>(relu.get());
ASSERT_NE(nullptr, reluLayer);
ASSERT_FLOAT_EQ(reluLayer->negative_slope, 0.1);
ASSERT_EQ(reluLayer->params.find("type"), reluLayer->params.end());
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsPRelu) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "pRelu").attr("channel_shared", "1").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr layer;
ASSERT_EQ(OK, net->getLayerByName("a", layer, nullptr));
ASSERT_STREQ(layer->type.c_str(), "prelu");
auto *preluLayer = dynamic_cast<PReLULayer *>(layer.get());
ASSERT_NE(nullptr, preluLayer);
ASSERT_EQ(preluLayer->_channel_shared, 1);
ASSERT_EQ(preluLayer->params.find("type"), preluLayer->params.end());
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsSigmoid) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "sigmoid").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr sigmoid;
ASSERT_EQ(OK, net->getLayerByName("a", sigmoid, nullptr));
ASSERT_STREQ(sigmoid->type.c_str(), "sigmoid");
ASSERT_EQ(sigmoid->params.find("type"), sigmoid->params.end());
}
TEST_F(V2FormatParserTest, canConvertActivationLayerAsClamp) {
string content = BEGIN_NET()
.initlayerInOut("a", "Activation", 1, 1, 2)
.node("data").attr("type", "clamp").attr("max","5").attr("min","-5").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr layer;
ASSERT_EQ(OK, net->getLayerByName("a", layer, nullptr));
ASSERT_STREQ(layer->type.c_str(), "clamp");
auto clamp = dynamic_cast<ClampLayer*>(layer.get());
ASSERT_NE(clamp, nullptr);
ASSERT_EQ(clamp->min_value, -5);
ASSERT_EQ(clamp->max_value, 5);
ASSERT_EQ(clamp->params.find("type"), clamp->params.end());
}

View File

@@ -1,190 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#define MT_BATCH 1
#define MT_CHANNELS 3
#define MT_DEPTH 2
#define MT_HEIGHT 1
#define MT_WIDTH 2
#define LAYER_COUNT 1
#include "parser_tests_base.hpp"
#include "parser_tests_base.hpp"
using namespace std;
using namespace InferenceEngine;
using namespace InferenceEngine::details;
using namespace testing;
class V3FormatParserTest : public FormatParserTest {
};
TEST_F(V3FormatParserTest, DISABLED_canNotParseEmptyElementwiseNode) {
string content = BEGIN_NET_V3()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V3FormatParserTest, DISABLED_canNotParseMissedElementwiseNodeType) {
string content = BEGIN_NET_V3()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V3FormatParserTest, cannotParseUnknownEltwiseOperation) {
string content = BEGIN_NET_V3()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "unknown").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V3FormatParserTest, canParseProdInElementwiseNode) {
string content = BEGIN_NET_V3()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "prod").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Prod);
}
TEST_F(V3FormatParserTest, canParseMulInElementwiseNode) {
string content = BEGIN_NET_V3()
.initlayerInOut("e", "Eltwise", 1, 1, 2)
.node("data").attr("operation", "mul").close()
.close()
END_NET();
ASSERT_NO_FATAL_FAILURE(assertParseSucceed(content));
CNNLayerPtr ewise;
ASSERT_EQ(OK, net->getLayerByName("e", ewise, nullptr));
auto *eltwise = dynamic_cast<EltwiseLayer *>(ewise.get());
ASSERT_NE(nullptr, eltwise);
ASSERT_EQ(eltwise->_operation, EltwiseLayer::Prod);
}
TEST_F(V3FormatParserTest, canParse5Dinput) {
string content = xml().node("net").attr("name", "Only_input_5D").attr("version", 3)
.initInputlayer5D("data", 0, 0);
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
TEST_F(V3FormatParserTest, DISABLE_conv3DInvalidKernel) {
string content = xml().node("net").attr("name", "5d_net").attr("version", 3)
.initConv5DlayerInOut("3D_conv", 0, 1, 64, "", "0,0,0", "0,0,0", "1,1,1", "1,1,1", 0, 0)
.close();
ASSERT_NO_FATAL_FAILURE(assertParseFail(content));
}
class V2ParserPublicSegments: public InferenceEngine::details::FormatParser {
public:
const std::map<std::string, LayerParseParameters>& getLayerParseParameters() {
return layersParseInfo;
}
};
TEST_F(V3FormatParserTest, LargeWeights) {
std::string model = R"V0G0N(
<net name="PVANET" version="3" batch="1">
<layers>
<layer name="data" type="Input" precision="FP32" id="0">
<output>
<port id="0">
<dim>1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="1" name="MatMul" precision="FP32" type="FullyConnected">
<data out-size="800000"/>
<input>
<port id="0">
<dim>1</dim>
<dim>1024</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>800000</dim>
</port>
</output>
<blobs>
<weights offset="891492352" size="3276800000"/>
<biases offset="4168292352" size="3200000"/>
</blobs>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="1" to-port="0"/>
</edges>
</net>)V0G0N";
parse(model);
auto params = ((V2ParserPublicSegments *)parser.get())->getLayerParseParameters();
ASSERT_NE(params.end(), params.find("MatMul"));
ASSERT_EQ(891492352, params["MatMul"].blobs["weights"].start);
ASSERT_EQ(3276800000, params["MatMul"].blobs["weights"].size);
ASSERT_EQ(4168292352, params["MatMul"].blobs["biases"].start);
ASSERT_EQ(3200000, params["MatMul"].blobs["biases"].size);
}
TEST_F(V3FormatParserTest, IncorrectWeights) {
std::string model = R"V0G0N(
<net name="PVANET" version="3" batch="1">
<layers>
<layer name="data" type="Input" precision="FP32" id="0">
<output>
<port id="0">
<dim>1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="1" name="MatMul" precision="FP32" type="FullyConnected">
<data out-size="800000"/>
<input>
<port id="0">
<dim>1</dim>
<dim>1024</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>800000</dim>
</port>
</output>
<blobs>
<weights offset="891492352" size="-64"/>
<biases offset="4168292352" size="3200000"/>
</blobs>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="1" to-port="0"/>
</edges>
</net>)V0G0N";
assertParseFail(model);
}

View File

@@ -1,217 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include "cpp/ie_cnn_network.h"
#include "common_test_utils/xml_net_builder/xml_net_builder.hpp"
#include "xml_helper.hpp"
#include "pugixml.hpp"
#include "ie_format_parser.h"
#include <stdio.h>
#include "details/ie_exception.hpp"
using namespace std;
using namespace InferenceEngine;
class V2TopologyVerificationTests : public ::testing::Test {
protected:
virtual void TearDown() {}
virtual void SetUp() {
xmlHelper.reset(new testing::XMLHelper(new details::FormatParser(2)));
}
public:
unique_ptr<CNNNetwork> cnnNetwork;
unique_ptr<testing::XMLHelper> xmlHelper;
string getNetworkWithConvLayer(string layerPrecision = "Q78", std::vector<size_t > layerInput = { 1, 3, 227, 227 }) {
std::vector<size_t > inputDims = { 1, 3, 227, 227 };
std::vector<size_t > outputDims = { 1, 96, 55, 55 };
return CommonTestUtils::V2NetBuilder::buildNetworkWithOneInput("",layerInput)
.havingLayers()
.convolutionLayer(layerPrecision, { {inputDims}, {outputDims} })
.finish();
}
string getNetworkWithConvLayerWithInputPrecision(string inputPrecision, string layerPrecision = "Q78",
std::vector<size_t > layerInput = {1, 3, 227, 227}) {
std::vector<size_t > inputDims = {1, 3, 227, 227};
std::vector<size_t > outputDims = {1, 96, 55, 55};
return CommonTestUtils::V2NetBuilder::buildNetworkWithOneInput("",layerInput, inputPrecision)
.havingLayers()
.convolutionLayer(layerPrecision, {{inputDims}, {outputDims}})
.finish();
}
string getNetworkWithPoolLayer(std::vector<size_t > layerInput = { 1, 3, 227, 227 }) {
std::vector<size_t > inputDims = { 1, 3, 227, 227 };
std::vector<size_t > outputDims = { 1, 96, 55, 55 };
return CommonTestUtils::V2NetBuilder::buildNetworkWithOneInput("",layerInput)
.havingLayers()
.poolingLayer("FP32", { { inputDims },{ outputDims } })
.finish();
}
string getNetworkWithCropLayer(CommonTestUtils::CropParams params, std::vector<size_t > layerInput = { 1, 3, 227, 227 }) {
std::vector<size_t > inputDims = { 1, 3, 227, 227 };
std::vector<size_t > outputDims = { 1, 3, 200, 227 };
return CommonTestUtils::V2NetBuilder::buildNetworkWithOneInput("",layerInput)
.havingLayers()
.cropLayer(params, { {inputDims}, {outputDims} })
.finish();
}
};
TEST_F(V2TopologyVerificationTests, testNoThrow) {
string testContent = getNetworkWithConvLayer();
xmlHelper->loadContent(testContent);
try {
xmlHelper->parse();
} catch (InferenceEngine::details::InferenceEngineException ex) {
FAIL() << ex.what();
}
}
TEST_F(V2TopologyVerificationTests, testDefaultPrecisionsForFP16InputAndOutputLayers) {
string testContent = getNetworkWithConvLayerWithInputPrecision(Precision(Precision::FP16).name(),
Precision(Precision::FP16).name());
InferenceEngine::details::CNNNetworkImplPtr cnnNetworkImplPtr;
xmlHelper->loadContent(testContent);
try {
cnnNetworkImplPtr = xmlHelper->parseWithReturningNetwork();
} catch (InferenceEngine::details::InferenceEngineException ex) {
FAIL() << ex.what();
}
OutputsDataMap outputsDataMap;
cnnNetworkImplPtr->getOutputsInfo(outputsDataMap);
for (auto outputData: outputsDataMap) {
ASSERT_TRUE(outputData.second->getPrecision() == Precision::FP32);
}
InputsDataMap inputsDataMap;
cnnNetworkImplPtr->getInputsInfo(inputsDataMap);
for (auto inputData: inputsDataMap) {
ASSERT_TRUE(inputData.second->getPrecision() == Precision::FP32);
}
}
TEST_F(V2TopologyVerificationTests, testDefaultPrecisionsFP32InputAndOutputLayers) {
string testContent = getNetworkWithConvLayerWithInputPrecision(Precision(Precision::FP32).name(),
Precision(Precision::FP32).name());
InferenceEngine::details::CNNNetworkImplPtr cnnNetworkImplPtr;
xmlHelper->loadContent(testContent);
try {
cnnNetworkImplPtr = xmlHelper->parseWithReturningNetwork();
} catch (InferenceEngine::details::InferenceEngineException ex) {
FAIL() << ex.what();
}
OutputsDataMap outputsDataMap;
cnnNetworkImplPtr->getOutputsInfo(outputsDataMap);
for (auto outputData: outputsDataMap) {
ASSERT_TRUE(outputData.second->getPrecision() == Precision::FP32);
}
InputsDataMap inputsDataMap;
cnnNetworkImplPtr->getInputsInfo(inputsDataMap);
for (auto inputData: inputsDataMap) {
ASSERT_TRUE(inputData.second->getPrecision() == Precision::FP32);
}
}
TEST_F(V2TopologyVerificationTests, testDefaultPrecisionsForQ78InputAndOutputLayers) {
string testContent = getNetworkWithConvLayerWithInputPrecision(Precision(Precision::Q78).name(),
Precision(Precision::Q78).name());
InferenceEngine::details::CNNNetworkImplPtr cnnNetworkImplPtr;
xmlHelper->loadContent(testContent);
try {
cnnNetworkImplPtr = xmlHelper->parseWithReturningNetwork();
} catch (InferenceEngine::details::InferenceEngineException ex) {
FAIL() << ex.what();
}
OutputsDataMap outputsDataMap;
cnnNetworkImplPtr->getOutputsInfo(outputsDataMap);
for (auto outputData: outputsDataMap) {
ASSERT_TRUE(outputData.second->getPrecision() == Precision::FP32);
}
InputsDataMap inputsDataMap;
cnnNetworkImplPtr->getInputsInfo(inputsDataMap);
for (auto inputData: inputsDataMap) {
ASSERT_TRUE(inputData.second->getPrecision() == Precision::I16);
}
}
//convolution input must be 4D
TEST_F(V2TopologyVerificationTests, testCheckConvolutionInputDim_More) {
string testContent = getNetworkWithConvLayer("Q78", { 1, 1, 3, 227, 227 });
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
EXPECT_THROW(xmlHelper->parse(), std::exception);
}
//convolution input must be 4D
TEST_F(V2TopologyVerificationTests, testCheckConvolutionInputDim_Less) {
string testContent = getNetworkWithConvLayer("Q78", { 227, 227 });
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
EXPECT_THROW(xmlHelper->parse(), std::exception);
}
//pooling input must be 4D
TEST_F(V2TopologyVerificationTests, testCheckPoolingInputDim_Less) {
string testContent = getNetworkWithPoolLayer({ 227, 227 });
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
EXPECT_THROW(xmlHelper->parse(), std::exception);
}
//pooling input must be 4D
TEST_F(V2TopologyVerificationTests, testCheckPoolingInputDim_More) {
string testContent = getNetworkWithPoolLayer({ 1, 1, 3, 227, 227 });
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
EXPECT_THROW(xmlHelper->parse(), std::exception);
}
TEST_F(V2TopologyVerificationTests, testLeayerPrecisionIsNotMIXED) {
string testContent = getNetworkWithConvLayer("MIXED");
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
EXPECT_THROW(xmlHelper->parse(), std::exception);
}
TEST_F(V2TopologyVerificationTests, testMixedPrecisionIfLayerAndNetworkPrecisionsDiffer) {
string testContent = getNetworkWithConvLayer("Q78");
xmlHelper->loadContent(testContent);
try {
xmlHelper->parse();
} catch (InferenceEngine::details::InferenceEngineException ex) {
FAIL() << ex.what();
}
}
TEST_F(V2TopologyVerificationTests, throwsIfCropDimIsTooBig) {
CommonTestUtils::CropData data = { 1, 0, 200 };
string testContent = getNetworkWithCropLayer({ data });
xmlHelper->loadContent(testContent);
// TODO: fix RTTI issue and replace by InferenceEngine::details::InferenceEngineException
ASSERT_THROW(xmlHelper->parse(), std::exception);
}
TEST_F(V2TopologyVerificationTests, testNoThrowWithProperCropParameters) {
CommonTestUtils::CropData data = { 2, 0, 200 };
string testContent = getNetworkWithCropLayer({ data });
xmlHelper->loadContent(testContent);
ASSERT_NO_THROW(xmlHelper->parse());
}

View File

@@ -1,94 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <string>
#include <list>
#include <sstream>
#include <memory>
#include <map>
#include <vector>
#include "parsers.h"
#include "pugixml.hpp"
#include <fstream>
#include <stdio.h>
#include "cpp/ie_cnn_network.h"
#include <gtest/gtest.h>
#include "xml_helper.hpp"
#include <pugixml.hpp>
namespace testing {
class XMLHelper::impl {
public:
std::unique_ptr<pugi::xml_node> _root;
std::unique_ptr<pugi::xml_document> _doc;
};
XMLHelper::XMLHelper(InferenceEngine::details::IFormatParser* p) {
parser.reset(p);
_impl = std::make_shared<impl>();
_impl->_doc.reset(new pugi::xml_document());
_impl->_root.reset(new pugi::xml_node());
}
void XMLHelper::loadContent(const std::string &fileContent) {
auto res = _impl->_doc->load_string(fileContent.c_str());
EXPECT_EQ(pugi::status_ok, res.status) << res.description() << " at offset " << res.offset;
*_impl->_root = _impl->_doc->document_element();
}
void XMLHelper::loadFile(const std::string &filename) {
auto res = _impl->_doc->load_file(filename.c_str());
EXPECT_EQ(pugi::status_ok, res.status) << res.description() << " at offset " << res.offset;
*_impl->_root = _impl->_doc->document_element();
}
void XMLHelper::parse() {
parser->Parse(*_impl->_root);
}
InferenceEngine::details::CNNNetworkImplPtr XMLHelper::parseWithReturningNetwork() {
return parser->Parse(*_impl->_root);
}
void XMLHelper::setWeights(const InferenceEngine::TBlob<uint8_t>::Ptr &weights) {
parser->SetWeights(weights);
}
std::string XMLHelper::readFileContent(const std::string & filePath) {
const auto openFlags = std::ios_base::ate | std::ios_base::binary;
std::ifstream fp (getXmlPath(filePath), openFlags);
EXPECT_TRUE(fp.is_open());
std::streamsize size = fp.tellg();
EXPECT_GE( size , 1) << "file is empty: " << filePath;
std::string str;
str.reserve((size_t)size);
fp.seekg(0, std::ios::beg);
str.assign((std::istreambuf_iterator<char>(fp)),
std::istreambuf_iterator<char>());
return str;
}
std::string XMLHelper::getXmlPath(const std::string & filePath){
std::string xmlPath = filePath;
const auto openFlags = std::ios_base::ate | std::ios_base::binary;
std::ifstream fp (xmlPath, openFlags);
//TODO: Dueto multi directory build systems, and single directory build system
//, it is usualy a problem to deal with relative paths.
if (!fp.is_open()) {
fp.open(getParentDir(xmlPath), openFlags);
EXPECT_TRUE(fp.is_open())
<< "cannot open file " << xmlPath <<" or " << getParentDir(xmlPath);
fp.close();
xmlPath = getParentDir(xmlPath);
}
return xmlPath;
}
}

View File

@@ -1,56 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <string>
#include <list>
#include <sstream>
#include <memory>
#include <map>
#include <vector>
#include <parsers.h>
#include <fstream>
#include <stdio.h>
#include "cpp/ie_cnn_network.h"
#include <gtest/gtest.h>
namespace testing {
class XMLHelper {
public:
explicit XMLHelper(InferenceEngine::details::IFormatParser* p);
void loadContent(const std::string &fileContent);
void loadFile(const std::string &filename);
void parse();
InferenceEngine::details::CNNNetworkImplPtr parseWithReturningNetwork();
void setWeights(const InferenceEngine::TBlob<uint8_t>::Ptr &weights);
std::string readFileContent(const std::string & filePath);
private:
std::string getXmlPath(const std::string & filePath);
const char kPathSeparator =
#if defined _WIN32 || defined __CYGWIN__
'\\';
#else
'/';
#endif
const std::string parentDir = std::string("..") + kPathSeparator;
std::string getParentDir(std::string currentFile) const {
return parentDir + currentFile;
}
std::unique_ptr<InferenceEngine::details::IFormatParser> parser;
std::vector<std::string> _classes;
// hide pugixml from public dependencies
class impl;
std::shared_ptr<impl> _impl;
};
}