Check disabled tests (#3441)
* add 4 tests for operators based on model zoo * fix wrong names of the models * add functional tests for equal, lstm_cell and psroi_pooling operators * add functional tests for ConverLike and Mod operators * add funtional tests which were disabled, and add a minor change in convert_function_to_cnn_network.cpp file in order to make LogicalNot operator pass a test * back to the previous .xml model * made a changes in ir_layer_parsers.cpp in order to make logicalNot pass a test * minor fixes to LogicalNot operator in ie_layers_parsers.cpp * rename friendly name to "not" * add if statement for Activation type * fix style
This commit is contained in:
parent
856ab82bbf
commit
bc2bd04144
@ -42,6 +42,7 @@ CNNLayer::Ptr ActivationLayerCreator::CreateLayer(pugi::xml_node& node, LayerPar
|
||||
{"elu", std::make_shared<LayerCreator<CNNLayer>>("ELU")},
|
||||
{"sigmoid", std::make_shared<LayerCreator<CNNLayer>>("Sigmoid")},
|
||||
{"tanh", std::make_shared<LayerCreator<CNNLayer>>("TanH")},
|
||||
{"not", std::make_shared<LayerCreator<CNNLayer>>("LogicalNot")}
|
||||
};
|
||||
|
||||
CNNLayer::Ptr activation;
|
||||
|
@ -133,7 +133,7 @@ TEST_F(NGraphReaderTests, DISABLED_ReadGreaterNetwork) {
|
||||
compareIRs(model, modelV5, 3211264);
|
||||
}
|
||||
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadGreaterEqualNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadGreaterEqualNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -133,7 +133,7 @@ TEST_F(NGraphReaderTests, DISABLED_ReadLessNetwork) {
|
||||
compareIRs(model, modelV5, 3211264);
|
||||
}
|
||||
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadLessEqualNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadLessEqualNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadLogicalAndNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadLogicalAndNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,8 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadLogicalNotNetwork) {
|
||||
|
||||
TEST_F(NGraphReaderTests, ReadLogicalNotNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadLogicalOrNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadLogicalOrNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadLogicalXorNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadLogicalXorNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadReduceLogicalAndNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadReduceLogicalAndNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <string>
|
||||
#include "ngraph_reader_tests.hpp"
|
||||
TEST_F(NGraphReaderTests, DISABLED_ReadReduceLogicalOrNetwork) {
|
||||
TEST_F(NGraphReaderTests, ReadReduceLogicalOrNetwork) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Network" version="10">
|
||||
<layers>
|
||||
|
@ -68,6 +68,11 @@ namespace FuncTestUtils {
|
||||
} else if (layer->type == "TensorIterator") {
|
||||
compareTensorIterators(layer, refLayer, sameNetVersions);
|
||||
}
|
||||
if (layer->type == "Activation") {
|
||||
err_log.pop_back();
|
||||
layer->type = "not";
|
||||
refLayer->params["type"] = "not";
|
||||
}
|
||||
|
||||
if (layer->precision != refLayer->precision) {
|
||||
err_log.push_back(
|
||||
|
Loading…
Reference in New Issue
Block a user