[IE TESTS] Move old IE Unit tests to the new infra (#605)
This commit is contained in:
parent
31fe146539
commit
44cd77f54b
@ -10,6 +10,9 @@ add_subdirectory(extension_lib)
|
|||||||
addIeTargetTest(
|
addIeTargetTest(
|
||||||
NAME ${TARGET_NAME}
|
NAME ${TARGET_NAME}
|
||||||
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
|
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
|
||||||
|
INCLUDES
|
||||||
|
# TODO: remove after removing `cnn_network_ngraph_imp.hpp`
|
||||||
|
${IE_MAIN_SOURCE_DIR}/src/inference_engine
|
||||||
EXCLUDED_SOURCE_DIRS
|
EXCLUDED_SOURCE_DIRS
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/extension_lib
|
${CMAKE_CURRENT_SOURCE_DIR}/extension_lib
|
||||||
LINK_LIBRARIES
|
LINK_LIBRARIES
|
||||||
|
@ -4,8 +4,7 @@
|
|||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
#include <cnn_network_ngraph_impl.hpp>
|
#include <cnn_network_impl.hpp>
|
||||||
#include "tests_common.hpp"
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
@ -28,13 +27,14 @@
|
|||||||
|
|
||||||
#include "common_test_utils/file_utils.hpp"
|
#include "common_test_utils/file_utils.hpp"
|
||||||
#include "transformations/rt_info/primitives_priority_attribute.hpp"
|
#include "transformations/rt_info/primitives_priority_attribute.hpp"
|
||||||
|
#include "cnn_network_ngraph_impl.hpp"
|
||||||
|
|
||||||
using namespace testing;
|
using namespace testing;
|
||||||
using namespace InferenceEngine;
|
using namespace InferenceEngine;
|
||||||
|
|
||||||
class CNNNGraphImplTests : public TestsCommon {};
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestConvertNetwork) {
|
TEST(CNNNGraphImplTests, TestConvertNetwork) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -56,7 +56,7 @@ TEST_F(CNNNGraphImplTests, TestConvertNetwork) {
|
|||||||
ASSERT_EQ(cnnRefNet, cnnNet.getCNNNetwork());
|
ASSERT_EQ(cnnRefNet, cnnNet.getCNNNetwork());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestResultWithNotEqualName) {
|
TEST(CNNNGraphImplTests, TestResultWithNotEqualName) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -77,7 +77,7 @@ TEST_F(CNNNGraphImplTests, TestResultWithNotEqualName) {
|
|||||||
ASSERT_NO_THROW(cnnNet.getCNNNetwork());
|
ASSERT_NO_THROW(cnnNet.getCNNNetwork());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestGetOutputAfterConvertNetwork) {
|
TEST(CNNNGraphImplTests, TestGetOutputAfterConvertNetwork) {
|
||||||
const std::string testLayerName = "testReLU";
|
const std::string testLayerName = "testReLU";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -104,7 +104,7 @@ TEST_F(CNNNGraphImplTests, TestGetOutputAfterConvertNetwork) {
|
|||||||
ASSERT_EQ(2, outs.size());
|
ASSERT_EQ(2, outs.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestSetCurrentBatch) {
|
TEST(CNNNGraphImplTests, TestSetCurrentBatch) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -126,7 +126,7 @@ TEST_F(CNNNGraphImplTests, TestSetCurrentBatch) {
|
|||||||
ASSERT_NE(nullptr, cnnNet.getFunction());
|
ASSERT_NE(nullptr, cnnNet.getFunction());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestSetBatch) {
|
TEST(CNNNGraphImplTests, TestSetBatch) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -154,7 +154,7 @@ TEST_F(CNNNGraphImplTests, TestSetBatch) {
|
|||||||
ASSERT_EQ(2, cnnNet.getCNNNetwork()->getBatchSize());
|
ASSERT_EQ(2, cnnNet.getCNNNetwork()->getBatchSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestSaveAffinity) {
|
TEST(CNNNGraphImplTests, TestSaveAffinity) {
|
||||||
const std::string testAffinity = "testAffinity";
|
const std::string testAffinity = "testAffinity";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -179,7 +179,7 @@ TEST_F(CNNNGraphImplTests, TestSaveAffinity) {
|
|||||||
ASSERT_EQ(cnnLayer->affinity, testAffinity);
|
ASSERT_EQ(cnnLayer->affinity, testAffinity);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestAddOutput) {
|
TEST(CNNNGraphImplTests, TestAddOutput) {
|
||||||
const std::string testLayerName = "testReLU";
|
const std::string testLayerName = "testReLU";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -211,7 +211,7 @@ TEST_F(CNNNGraphImplTests, TestAddOutput) {
|
|||||||
ASSERT_TRUE(outputs.find(testLayerName) != outputs.end());
|
ASSERT_TRUE(outputs.find(testLayerName) != outputs.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestAddOutputFromConvertedNetwork) {
|
TEST(CNNNGraphImplTests, TestAddOutputFromConvertedNetwork) {
|
||||||
const std::string testLayerName = "testReLU";
|
const std::string testLayerName = "testReLU";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -244,7 +244,7 @@ TEST_F(CNNNGraphImplTests, TestAddOutputFromConvertedNetwork) {
|
|||||||
ASSERT_TRUE(outputs.find(testLayerName) != outputs.end());
|
ASSERT_TRUE(outputs.find(testLayerName) != outputs.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, ConstantAsInternalAndExternalLayer) {
|
TEST(CNNNGraphImplTests, ConstantAsInternalAndExternalLayer) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -266,7 +266,7 @@ TEST_F(CNNNGraphImplTests, ConstantAsInternalAndExternalLayer) {
|
|||||||
ASSERT_EQ(4, cnnNet.layerCount());
|
ASSERT_EQ(4, cnnNet.layerCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, SaveInputInfoAfterConversion) {
|
TEST(CNNNGraphImplTests, SaveInputInfoAfterConversion) {
|
||||||
std::string name = "param";
|
std::string name = "param";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -297,7 +297,7 @@ TEST_F(CNNNGraphImplTests, SaveInputInfoAfterConversion) {
|
|||||||
ASSERT_EQ(inputInfo->getPreProcess().getResizeAlgorithm(), ResizeAlgorithm::RESIZE_AREA);
|
ASSERT_EQ(inputInfo->getPreProcess().getResizeAlgorithm(), ResizeAlgorithm::RESIZE_AREA);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, SaveAttributesAfterConversion) {
|
TEST(CNNNGraphImplTests, SaveAttributesAfterConversion) {
|
||||||
std::string name = "prelu";
|
std::string name = "prelu";
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
@ -330,7 +330,7 @@ TEST_F(CNNNGraphImplTests, SaveAttributesAfterConversion) {
|
|||||||
ASSERT_EQ(layer->params["test"], "2");
|
ASSERT_EQ(layer->params["test"], "2");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, SavePrimitivesPriority) {
|
TEST(CNNNGraphImplTests, SavePrimitivesPriority) {
|
||||||
std::string model = R"V0G0N(
|
std::string model = R"V0G0N(
|
||||||
<net name="Activation" version="10">
|
<net name="Activation" version="10">
|
||||||
<layers>
|
<layers>
|
||||||
@ -391,7 +391,7 @@ TEST_F(CNNNGraphImplTests, SavePrimitivesPriority) {
|
|||||||
ASSERT_EQ("cpu:avx2", cnnLayer->params["PrimitivesPriority"]);
|
ASSERT_EQ("cpu:avx2", cnnLayer->params["PrimitivesPriority"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, ReadFromCNNNetReader) {
|
TEST(CNNNGraphImplTests, ReadFromCNNNetReader) {
|
||||||
std::string model = R"V0G0N(
|
std::string model = R"V0G0N(
|
||||||
<net name="Activation" version="10">
|
<net name="Activation" version="10">
|
||||||
<layers>
|
<layers>
|
||||||
@ -447,7 +447,7 @@ TEST_F(CNNNGraphImplTests, ReadFromCNNNetReader) {
|
|||||||
ASSERT_EQ(2, network.layerCount());
|
ASSERT_EQ(2, network.layerCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, CanChangeInputPrecision) {
|
TEST(CNNNGraphImplTests, CanChangeInputPrecision) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 16, 16});
|
ngraph::PartialShape shape({1, 3, 16, 16});
|
||||||
@ -493,7 +493,7 @@ TEST_F(CNNNGraphImplTests, CanChangeInputPrecision) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, CanChangeInputLayout) {
|
TEST(CNNNGraphImplTests, CanChangeInputLayout) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 16, 16});
|
ngraph::PartialShape shape({1, 3, 16, 16});
|
||||||
@ -539,7 +539,7 @@ TEST_F(CNNNGraphImplTests, CanChangeInputLayout) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, CanChangeOutputPrecision) {
|
TEST(CNNNGraphImplTests, CanChangeOutputPrecision) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 16, 16});
|
ngraph::PartialShape shape({1, 3, 16, 16});
|
||||||
@ -585,7 +585,7 @@ TEST_F(CNNNGraphImplTests, CanChangeOutputPrecision) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, CanChangeOutputLayout) {
|
TEST(CNNNGraphImplTests, CanChangeOutputLayout) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 16, 16});
|
ngraph::PartialShape shape({1, 3, 16, 16});
|
||||||
@ -631,7 +631,7 @@ TEST_F(CNNNGraphImplTests, CanChangeOutputLayout) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CNNNGraphImplTests, TestCheckStats) {
|
TEST(CNNNGraphImplTests, TestCheckStats) {
|
||||||
std::shared_ptr<ngraph::Function> ngraph;
|
std::shared_ptr<ngraph::Function> ngraph;
|
||||||
{
|
{
|
||||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||||
@ -651,3 +651,5 @@ TEST_F(CNNNGraphImplTests, TestCheckStats) {
|
|||||||
ASSERT_EQ(NOT_FOUND, cnnNet.getStats(&_stats, nullptr));
|
ASSERT_EQ(NOT_FOUND, cnnNet.getStats(&_stats, nullptr));
|
||||||
ASSERT_EQ(nullptr, _stats);
|
ASSERT_EQ(nullptr, _stats);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
@ -4,8 +4,6 @@
|
|||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
#include "tests_common.hpp"
|
|
||||||
|
|
||||||
#include <convert_function_to_cnn_network.hpp>
|
#include <convert_function_to_cnn_network.hpp>
|
||||||
#include <cpp/ie_cnn_network.h>
|
#include <cpp/ie_cnn_network.h>
|
||||||
|
|
||||||
@ -17,9 +15,7 @@
|
|||||||
using namespace testing;
|
using namespace testing;
|
||||||
using namespace InferenceEngine;
|
using namespace InferenceEngine;
|
||||||
|
|
||||||
using ConvertFunctionToCNNNetworkTests = TestsCommon;
|
TEST(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) {
|
||||||
|
|
||||||
TEST_F(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) {
|
|
||||||
std::shared_ptr<ngraph::Function> f;
|
std::shared_ptr<ngraph::Function> f;
|
||||||
{
|
{
|
||||||
auto param1 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{2, 2});
|
auto param1 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{2, 2});
|
||||||
@ -34,9 +30,9 @@ TEST_F(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
InferenceEngine::CNNNetwork nGraphImpl(f);
|
InferenceEngine::CNNNetwork nGraphImpl(f);
|
||||||
|
ASSERT_ANY_THROW(InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl));
|
||||||
try {
|
try {
|
||||||
auto net = InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl);
|
auto net = InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl);
|
||||||
FAIL();
|
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &err) {
|
} catch (InferenceEngine::details::InferenceEngineException &err) {
|
||||||
const std::string ref_msg = "Error of validate layer: prelu with type: PReLU. Number of inputs (2) is not equal to expected ones: 1";
|
const std::string ref_msg = "Error of validate layer: prelu with type: PReLU. Number of inputs (2) is not equal to expected ones: 1";
|
||||||
const std::string resp_msg = err.what();
|
const std::string resp_msg = err.what();
|
||||||
@ -44,7 +40,7 @@ TEST_F(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ConvertFunctionToCNNNetworkTests, ConvertConvolutionNetwork) {
|
TEST(ConvertFunctionToCNNNetworkTests, ConvertConvolutionNetwork) {
|
||||||
std::shared_ptr<ngraph::Function> f;
|
std::shared_ptr<ngraph::Function> f;
|
||||||
{
|
{
|
||||||
auto param1 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{1, 3, 64, 64});
|
auto param1 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{1, 3, 64, 64});
|
@ -3,7 +3,6 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
#include <single_layer_common.hpp>
|
|
||||||
|
|
||||||
#include <ie_core.hpp>
|
#include <ie_core.hpp>
|
||||||
#include <net_pass.h>
|
#include <net_pass.h>
|
||||||
@ -20,10 +19,10 @@ class LocaleTests : public ::testing::Test {
|
|||||||
<layer name="data" type="Input" precision="FP32" id="0">
|
<layer name="data" type="Input" precision="FP32" id="0">
|
||||||
<output>
|
<output>
|
||||||
<port id="0">
|
<port id="0">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
</output>
|
</output>
|
||||||
</layer>
|
</layer>
|
||||||
@ -34,18 +33,18 @@ class LocaleTests : public ::testing::Test {
|
|||||||
|
|
||||||
<input>
|
<input>
|
||||||
<port id="1">
|
<port id="1">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
</input>
|
</input>
|
||||||
<output>
|
<output>
|
||||||
<port id="2">
|
<port id="2">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
</output>
|
</output>
|
||||||
</layer>
|
</layer>
|
||||||
@ -53,24 +52,24 @@ class LocaleTests : public ::testing::Test {
|
|||||||
<data coeff="0.77,0.33"/>
|
<data coeff="0.77,0.33"/>
|
||||||
<input>
|
<input>
|
||||||
<port id="1">
|
<port id="1">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
<port id="2">
|
<port id="2">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
</input>
|
</input>
|
||||||
<output>
|
<output>
|
||||||
<port id="3">
|
<port id="3">
|
||||||
<dim>_IN_</dim>
|
<dim>2</dim>
|
||||||
<dim>_IC_</dim>
|
<dim>3</dim>
|
||||||
<dim>_IH_</dim>
|
<dim>5</dim>
|
||||||
<dim>_IW_</dim>
|
<dim>5</dim>
|
||||||
</port>
|
</port>
|
||||||
</output>
|
</output>
|
||||||
</layer>
|
</layer>
|
||||||
@ -98,7 +97,6 @@ class LocaleTests : public ::testing::Test {
|
|||||||
</net>
|
</net>
|
||||||
)V0G0N";
|
)V0G0N";
|
||||||
|
|
||||||
|
|
||||||
std::string _model_LSTM = R"V0G0N(
|
std::string _model_LSTM = R"V0G0N(
|
||||||
<net batch="1" name="model" version="2">
|
<net batch="1" name="model" version="2">
|
||||||
<layers>
|
<layers>
|
||||||
@ -196,7 +194,6 @@ class LocaleTests : public ::testing::Test {
|
|||||||
)V0G0N";
|
)V0G0N";
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
void SetUp() override {
|
void SetUp() override {
|
||||||
originalLocale = setlocale(LC_ALL, nullptr);
|
originalLocale = setlocale(LC_ALL, nullptr);
|
||||||
}
|
}
|
||||||
@ -204,25 +201,15 @@ protected:
|
|||||||
setlocale(LC_ALL, originalLocale.c_str());
|
setlocale(LC_ALL, originalLocale.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string getModel() const {
|
|
||||||
std::string model = _model;
|
|
||||||
|
|
||||||
REPLACE_WITH_NUM(model, "_IN_", 2);
|
|
||||||
REPLACE_WITH_NUM(model, "_IC_", 3);
|
|
||||||
REPLACE_WITH_NUM(model, "_IH_", 4);
|
|
||||||
REPLACE_WITH_NUM(model, "_IW_", 5);
|
|
||||||
|
|
||||||
return model;
|
|
||||||
}
|
|
||||||
|
|
||||||
void testBody(bool isLSTM = false) const {
|
void testBody(bool isLSTM = false) const {
|
||||||
InferenceEngine::Core core;
|
InferenceEngine::Core core;
|
||||||
|
|
||||||
// This model contains layers with float attributes.
|
// This model contains layers with float attributes.
|
||||||
// Conversion from string may be affected by locale.
|
// Conversion from string may be affected by locale.
|
||||||
std::string model = isLSTM ? _model_LSTM : getModel();
|
std::string model = isLSTM ? _model_LSTM : _model;
|
||||||
auto net = core.ReadNetwork(model, InferenceEngine::Blob::CPtr());
|
auto net = core.ReadNetwork(model, InferenceEngine::Blob::CPtr());
|
||||||
|
|
||||||
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
if (!isLSTM) {
|
if (!isLSTM) {
|
||||||
auto power_layer = dynamic_pointer_cast<PowerLayer>(net.getLayerByName("power"));
|
auto power_layer = dynamic_pointer_cast<PowerLayer>(net.getLayerByName("power"));
|
||||||
ASSERT_EQ(power_layer->scale, 0.75f);
|
ASSERT_EQ(power_layer->scale, 0.75f);
|
||||||
@ -250,6 +237,7 @@ protected:
|
|||||||
ASSERT_EQ(lstmcell_layer->GetParamAsFloat("min"), -ref_coeff);
|
ASSERT_EQ(lstmcell_layer->GetParamAsFloat("min"), -ref_coeff);
|
||||||
ASSERT_EQ(lstmcell_layer->GetParamAsFloat("max"), ref_coeff);
|
ASSERT_EQ(lstmcell_layer->GetParamAsFloat("max"), ref_coeff);
|
||||||
}
|
}
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
// Copyright (C) 2018-2020 Intel Corporation
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <ie_locked_memory.hpp>
|
||||||
|
#include "unit_test_utils/mocks/mock_allocator.hpp"
|
||||||
|
|
||||||
|
using namespace InferenceEngine;
|
||||||
|
using namespace ::testing;
|
||||||
|
|
||||||
|
TEST(LockedMemoryTest, canUnlockMemoryAfterUsage) {
|
||||||
|
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
||||||
|
char array[] = {1, 2, 3};
|
||||||
|
|
||||||
|
EXPECT_CALL(*allocator.get(), lock(reinterpret_cast<void*>(1), _)).WillRepeatedly(Return(reinterpret_cast<void*>(array)));
|
||||||
|
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
||||||
|
{
|
||||||
|
auto x = LockedMemory<char>(allocator.get(), reinterpret_cast<void*>(1), 1);
|
||||||
|
//force locking of memory
|
||||||
|
auto t = x[0];
|
||||||
|
(void)t;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LockedMemoryTest, canReadFromLockedMemory) {
|
||||||
|
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
||||||
|
char array[] = {1, 2, 3, 4, 5};
|
||||||
|
|
||||||
|
EXPECT_CALL(*allocator.get(), lock(reinterpret_cast<void*>(1), _)).WillRepeatedly(Return(reinterpret_cast<void*>(array)));
|
||||||
|
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
||||||
|
{
|
||||||
|
auto x = LockedMemory<char>(allocator.get(), reinterpret_cast<void*>(1), 0);
|
||||||
|
//we are getting first element
|
||||||
|
ASSERT_EQ(1, x[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LockedMemoryTest, canWriteToLockedMemory) {
|
||||||
|
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
||||||
|
char array[] = {1, 2, 3, 4, 5};
|
||||||
|
|
||||||
|
EXPECT_CALL(*allocator.get(), lock(reinterpret_cast<void*>(1), _)).WillRepeatedly(Return(reinterpret_cast<void*>(array)));
|
||||||
|
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
||||||
|
{
|
||||||
|
auto x = LockedMemory<char>(allocator.get(), reinterpret_cast<void*>(1), 0);
|
||||||
|
|
||||||
|
//we are getting first element
|
||||||
|
ASSERT_EQ(std::distance(array, &x[0]), 0);
|
||||||
|
x[0] = 5;
|
||||||
|
}
|
||||||
|
EXPECT_EQ(array[0], 5);
|
||||||
|
}
|
@ -1,67 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
|
||||||
#include <ie_layer_validators.hpp>
|
|
||||||
#include <memory>
|
|
||||||
#include <ie_data.h>
|
|
||||||
|
|
||||||
#include "layer_builder.h"
|
|
||||||
#include "shapes.h"
|
|
||||||
using namespace InferenceEngine;
|
|
||||||
using namespace InferenceEngine::details;
|
|
||||||
|
|
||||||
TEST_P(CNNLayerValidationTests, checkValidParams) {
|
|
||||||
|
|
||||||
assertThat(type)->setParams(valid_params);
|
|
||||||
auto layer = getLayer();
|
|
||||||
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
|
||||||
|
|
||||||
ASSERT_NO_THROW(validator->parseParams(layer.get()));
|
|
||||||
ASSERT_NO_THROW(validator->checkParams(layer.get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_P(CNNLayerValidationTests, checkInvalidParams) {
|
|
||||||
|
|
||||||
assertThat(type);
|
|
||||||
int numberOfParams = getNumOfParams();
|
|
||||||
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
|
||||||
auto layer_ = getLayer();
|
|
||||||
for (int i = 0; i < numberOfParams; ++i) {
|
|
||||||
layer->setParams(!valid_params);
|
|
||||||
ASSERT_THROW(validator->parseParams(layer_.get()), InferenceEngineException);
|
|
||||||
ASSERT_THROW(validator->checkParams(layer_.get()), InferenceEngineException);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_P(CNNLayerValidationTests, checkInvalidInputShapes) {
|
|
||||||
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
|
||||||
std::vector<DataPtr> spData;
|
|
||||||
assertThat(type)->setShapes(spData, !valid_input);
|
|
||||||
|
|
||||||
auto layer_ = getLayer();
|
|
||||||
InOutDims shapes;
|
|
||||||
InferenceEngine::details::getInOutShapes(layer_.get(), shapes);
|
|
||||||
ASSERT_THROW(validator->checkShapes(layer_.get(), shapes.inDims), InferenceEngineException);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_P(CNNLayerValidationTests, checkValidShapes) {
|
|
||||||
|
|
||||||
std::vector<DataPtr> spData;
|
|
||||||
assertThat(type)->setShapes(spData, valid_input);
|
|
||||||
auto layer = getLayer();
|
|
||||||
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
|
||||||
InOutDims shapes;
|
|
||||||
InferenceEngine::details::getInOutShapes(layer.get(), shapes);
|
|
||||||
ASSERT_NO_THROW(validator->checkShapes(layer.get(), shapes.inDims));
|
|
||||||
}
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(
|
|
||||||
InstantiationName, CNNLayerValidationTests,
|
|
||||||
::testing::Values(
|
|
||||||
"Convolution"
|
|
||||||
,"Deconvolution"
|
|
||||||
,"DetectionOutput"
|
|
||||||
)
|
|
||||||
);
|
|
@ -9,11 +9,11 @@
|
|||||||
#include "cnn_network_impl.hpp"
|
#include "cnn_network_impl.hpp"
|
||||||
#include <tests_common.hpp>
|
#include <tests_common.hpp>
|
||||||
#include "ie_format_parser.h"
|
#include "ie_format_parser.h"
|
||||||
|
#include "ie_blob_proxy.hpp"
|
||||||
#include <string>
|
#include <string>
|
||||||
#include "pugixml.hpp"
|
#include "pugixml.hpp"
|
||||||
#include "xml_parse_utils.h"
|
#include "xml_parse_utils.h"
|
||||||
#include "mean_image.h"
|
#include "mean_image.h"
|
||||||
#include "ie_blob_proxy.hpp"
|
|
||||||
|
|
||||||
#include "common_test_utils/xml_net_builder/xml_father.hpp"
|
#include "common_test_utils/xml_net_builder/xml_father.hpp"
|
||||||
|
|
||||||
@ -296,8 +296,7 @@ xml().node("net").attr("name", "AlexNet").attr("version", x)\
|
|||||||
std::vector<T> meanValues = MeanImage<T>::getValue();
|
std::vector<T> meanValues = MeanImage<T>::getValue();
|
||||||
std::copy(meanValues.begin(), meanValues.end(), (T *) binBlobFloat->data());
|
std::copy(meanValues.begin(), meanValues.end(), (T *) binBlobFloat->data());
|
||||||
InferenceEngine::SizeVector dims_dst = {MT_HEIGHT, MT_WIDTH * sizeof(T), MT_CHANNELS};
|
InferenceEngine::SizeVector dims_dst = {MT_HEIGHT, MT_WIDTH * sizeof(T), MT_CHANNELS};
|
||||||
typename InferenceEngine::TBlobProxy<uint8_t>::Ptr binBlob(new
|
typename InferenceEngine::TBlobProxy<uint8_t>::Ptr binBlob(new InferenceEngine::TBlobProxy<uint8_t>(
|
||||||
InferenceEngine::TBlobProxy<uint8_t>(
|
|
||||||
InferenceEngine::Precision::FP32, InferenceEngine::CHW, binBlobFloat, 0, dims_dst));
|
InferenceEngine::Precision::FP32, InferenceEngine::CHW, binBlobFloat, 0, dims_dst));
|
||||||
return binBlob;
|
return binBlob;
|
||||||
}
|
}
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
#include "tests_common.hpp"
|
|
||||||
#include "unit_test_utils/mocks/mock_allocator.hpp"
|
|
||||||
|
|
||||||
using namespace InferenceEngine;
|
|
||||||
using namespace ::testing;
|
|
||||||
|
|
||||||
using LockedMemoryTest = testing::Test;
|
|
||||||
|
|
||||||
TEST_F(LockedMemoryTest, canUnlockMemoryAfterUsage) {
|
|
||||||
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
|
||||||
char array [] = {1,2,3};
|
|
||||||
|
|
||||||
EXPECT_CALL(*allocator.get(), lock((void*)1, _)).WillRepeatedly(Return((void*)array));
|
|
||||||
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
|
||||||
{
|
|
||||||
auto x = LockedMemory<char>(allocator.get(), (void *) 1, 1);
|
|
||||||
//force locking of memory
|
|
||||||
auto t = x[0];
|
|
||||||
(void)t;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(LockedMemoryTest, canReadFromLockedMemory) {
|
|
||||||
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
|
||||||
char array [] = {1,2,3,4,5};
|
|
||||||
|
|
||||||
EXPECT_CALL(*allocator.get(), lock((void*)1, _)).WillRepeatedly(Return((void*)array));
|
|
||||||
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
|
||||||
{
|
|
||||||
auto x = LockedMemory<char>(allocator.get(), (void *) 1, 0);
|
|
||||||
//we are getting first element
|
|
||||||
ASSERT_EQ(1, x[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(LockedMemoryTest, canWriteToLockedMemory) {
|
|
||||||
std::unique_ptr<MockAllocator> allocator(new MockAllocator());
|
|
||||||
char array [] = {1,2,3,4,5};
|
|
||||||
|
|
||||||
EXPECT_CALL(*allocator.get(), lock((void*)1, _)).WillRepeatedly(Return((void*)array));
|
|
||||||
EXPECT_CALL(*allocator.get(), unlock(_)).Times(1);
|
|
||||||
{
|
|
||||||
auto x = LockedMemory<char>(allocator.get(), (void *) 1, 0);
|
|
||||||
|
|
||||||
//we are getting first element
|
|
||||||
ASSERT_EQ(std::distance(array, &x[0]), 0);
|
|
||||||
x[0] = 5;
|
|
||||||
}
|
|
||||||
EXPECT_EQ(array[0], 5);
|
|
||||||
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user