[IE TEST] Continue run after crash (#10037)
This commit is contained in:
parent
3d223ebc2a
commit
3f15afb926
@ -59,5 +59,24 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
::testing::InitGoogleTest(&argc, argv);
|
||||
::testing::AddGlobalTestEnvironment(new LayerTestsUtils::TestEnvironment);
|
||||
|
||||
auto exernalSignalHandler = [](int errCode) {
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
|
||||
// set default handler for crash
|
||||
signal(SIGINT, SIG_DFL);
|
||||
signal(SIGTERM, SIG_DFL);
|
||||
|
||||
if (errCode == SIGINT || errCode == SIGTERM) {
|
||||
auto& s = LayerTestsUtils::Summary::getInstance();
|
||||
s.saveReport();
|
||||
exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// killed by extarnal
|
||||
signal(SIGINT, exernalSignalHandler);
|
||||
signal(SIGTERM , exernalSignalHandler);
|
||||
|
||||
return RUN_ALL_TESTS();
|
||||
}
|
||||
|
@ -13,11 +13,14 @@
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "common_test_utils/data_utils.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
#include "read_ir_test/read_ir.hpp"
|
||||
|
||||
#include <setjmp.h>
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace subgraph {
|
||||
@ -48,118 +51,143 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo<ReadIRParam
|
||||
}
|
||||
|
||||
void ReadIRTest::query_model() {
|
||||
if (functionRefs == nullptr) {
|
||||
functionRefs = ngraph::clone_function(*function);
|
||||
functionRefs->set_friendly_name("refFunction");
|
||||
}
|
||||
auto crashHandler = [](int errCode) {
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
// place to jump in case of a crash
|
||||
#ifdef _WIN32
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
if (functionRefs == nullptr) {
|
||||
functionRefs = ngraph::clone_function(*function);
|
||||
functionRefs->set_friendly_name("refFunction");
|
||||
}
|
||||
|
||||
auto &s = LayerTestsUtils::Summary::getInstance();
|
||||
s.saveReport();
|
||||
std::cout << "Unexpected application crash!" << std::endl;
|
||||
std::abort();
|
||||
};
|
||||
signal(SIGSEGV, crashHandler);
|
||||
s.setDeviceName(targetDevice);
|
||||
|
||||
auto &s = LayerTestsUtils::Summary::getInstance();
|
||||
s.setDeviceName(targetDevice);
|
||||
|
||||
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::SKIPPED);
|
||||
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
|
||||
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::SKIPPED);
|
||||
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
|
||||
} else {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::CRASHED);
|
||||
}
|
||||
try {
|
||||
SubgraphBaseTest::query_model();
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::PASSED);
|
||||
} catch (...) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED);
|
||||
}
|
||||
} else {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::CRASHED);
|
||||
}
|
||||
try {
|
||||
SubgraphBaseTest::query_model();
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::PASSED);
|
||||
} catch (...) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED);
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
}
|
||||
|
||||
void ReadIRTest::SetUp() {
|
||||
std::tie(pathToModel, targetDevice, configuration) = this->GetParam();
|
||||
function = core->read_model(pathToModel);
|
||||
const auto metaFile = CommonTestUtils::replaceExt(pathToModel, "meta");
|
||||
if (CommonTestUtils::fileExists(metaFile)) {
|
||||
pugi::xml_document doc;
|
||||
doc.load_file(metaFile.c_str());
|
||||
auto models = doc.child("meta_info").child("models");
|
||||
sourceModel = models.child("initial_model").attribute("name").as_string();
|
||||
for (const auto &model : models.children("model")) {
|
||||
ocuranceInModels.push_back({model.attribute("name").as_string(), model.attribute("count").as_uint()});
|
||||
}
|
||||
auto portsInfo = doc.child("meta_info").child("ports_info");
|
||||
auto getPortInfo = [&](size_t id) {
|
||||
LayerTestsUtils::PortInfo info;
|
||||
for (const auto &p : portsInfo.children()) {
|
||||
if (p.attribute("id").as_uint() == id) {
|
||||
info.convert_to_const = p.attribute("convert_to_const").as_bool();
|
||||
if (std::strcmp(p.attribute("min").as_string(), "undefined") != 0) {
|
||||
info.min = p.attribute("min").as_double();
|
||||
} else {
|
||||
info.min = -10;
|
||||
}
|
||||
if (std::strcmp(p.attribute("max").as_string(), "undefined") != 0) {
|
||||
info.max = p.attribute("max").as_double();
|
||||
} else {
|
||||
info.max = 10;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return info;
|
||||
};
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
auto params = function->get_parameters();
|
||||
for (const auto ¶m : params) {
|
||||
auto idx = -1;
|
||||
for (size_t i = 0; i < param->get_output_size(); i++) {
|
||||
for (const auto &node : param->get_output_target_inputs(i)) {
|
||||
const auto nodePtr = node.get_node()->shared_from_this();
|
||||
for (size_t port = 0; port < nodePtr->get_input_size(); ++port) {
|
||||
if (nodePtr->get_input_node_ptr(port)->shared_from_this() == param->shared_from_this()) {
|
||||
idx = port;
|
||||
break;
|
||||
// place to jump in case of a crash
|
||||
#ifdef _WIN32
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
std::tie(pathToModel, targetDevice, configuration) = this->GetParam();
|
||||
function = core->read_model(pathToModel);
|
||||
const auto metaFile = CommonTestUtils::replaceExt(pathToModel, "meta");
|
||||
if (CommonTestUtils::fileExists(metaFile)) {
|
||||
pugi::xml_document doc;
|
||||
doc.load_file(metaFile.c_str());
|
||||
auto models = doc.child("meta_info").child("models");
|
||||
sourceModel = models.child("initial_model").attribute("name").as_string();
|
||||
for (const auto &model : models.children("model")) {
|
||||
ocuranceInModels.push_back({model.attribute("name").as_string(), model.attribute("count").as_uint()});
|
||||
}
|
||||
auto portsInfo = doc.child("meta_info").child("ports_info");
|
||||
auto getPortInfo = [&](size_t id) {
|
||||
LayerTestsUtils::PortInfo info;
|
||||
for (const auto &p : portsInfo.children()) {
|
||||
if (p.attribute("id").as_uint() == id) {
|
||||
info.convert_to_const = p.attribute("convert_to_const").as_bool();
|
||||
if (std::strcmp(p.attribute("min").as_string(), "undefined") != 0) {
|
||||
info.min = p.attribute("min").as_double();
|
||||
} else {
|
||||
info.min = -10;
|
||||
}
|
||||
if (std::strcmp(p.attribute("max").as_string(), "undefined") != 0) {
|
||||
info.max = p.attribute("max").as_double();
|
||||
} else {
|
||||
info.max = 10;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return info;
|
||||
};
|
||||
|
||||
auto params = function->get_parameters();
|
||||
for (const auto ¶m : params) {
|
||||
auto idx = -1;
|
||||
for (size_t i = 0; i < param->get_output_size(); i++) {
|
||||
for (const auto &node : param->get_output_target_inputs(i)) {
|
||||
const auto nodePtr = node.get_node()->shared_from_this();
|
||||
for (size_t port = 0; port < nodePtr->get_input_size(); ++port) {
|
||||
if (nodePtr->get_input_node_ptr(port)->shared_from_this() == param->shared_from_this()) {
|
||||
idx = port;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
EXPECT_GE(idx, 0);
|
||||
EXPECT_GE(idx, 0);
|
||||
|
||||
auto info = getPortInfo(idx);
|
||||
if (info.convert_to_const) {
|
||||
const auto constant = ngraph::builder::makeConstant(param->get_element_type(),
|
||||
param->get_shape(),
|
||||
std::vector<double>{},
|
||||
true,
|
||||
info.max,
|
||||
info.min,
|
||||
1);
|
||||
ov::replace_node(param, constant);
|
||||
function->remove_parameter(param);
|
||||
}
|
||||
}
|
||||
}
|
||||
std::vector<InputShape> inputShapes;
|
||||
for (const auto& param : function -> get_parameters()) {
|
||||
if (param->get_partial_shape().is_static()) {
|
||||
inputShapes.push_back(InputShape{{}, {param->get_shape()}});
|
||||
} else {
|
||||
ov::Shape midShape;
|
||||
for (const auto s : param->get_partial_shape()) {
|
||||
int dimValue = s.get_length();
|
||||
if (s.is_dynamic()) {
|
||||
CommonTestUtils::fill_data_random(&dimValue, 1, s.get_max_length() - s.get_min_length(), s.get_min_length(), 1);
|
||||
auto info = getPortInfo(idx);
|
||||
if (info.convert_to_const) {
|
||||
const auto constant = ngraph::builder::makeConstant(param->get_element_type(),
|
||||
param->get_shape(),
|
||||
std::vector<double>{},
|
||||
true,
|
||||
info.max,
|
||||
info.min,
|
||||
1);
|
||||
ov::replace_node(param, constant);
|
||||
function->remove_parameter(param);
|
||||
}
|
||||
midShape.push_back(dimValue);
|
||||
}
|
||||
inputShapes.push_back(InputShape{param->get_partial_shape(), { param->get_partial_shape().get_min_shape(),
|
||||
param->get_partial_shape().get_max_shape(),
|
||||
midShape }});
|
||||
}
|
||||
std::vector<ov::Shape> staticShapes;
|
||||
for (const auto param : function->get_parameters()) {
|
||||
if (param->get_partial_shape().is_static()) {
|
||||
staticShapes.push_back(param->get_shape());
|
||||
} else {
|
||||
staticShapes.push_back(param->get_partial_shape().get_max_shape());
|
||||
}
|
||||
}
|
||||
std::vector<InputShape> inputShapes;
|
||||
for (const auto& param : function -> get_parameters()) {
|
||||
if (param->get_partial_shape().is_static()) {
|
||||
inputShapes.push_back(InputShape{{}, {param->get_shape()}});
|
||||
} else {
|
||||
ov::Shape midShape;
|
||||
for (const auto s : param->get_partial_shape()) {
|
||||
int dimValue = s.get_length();
|
||||
if (s.is_dynamic()) {
|
||||
CommonTestUtils::fill_data_random(&dimValue, 1, s.get_max_length() - s.get_min_length(), s.get_min_length(), 1);
|
||||
}
|
||||
midShape.push_back(dimValue);
|
||||
}
|
||||
inputShapes.push_back(InputShape{param->get_partial_shape(), { param->get_partial_shape().get_min_shape(),
|
||||
param->get_partial_shape().get_max_shape(),
|
||||
midShape }});
|
||||
}
|
||||
}
|
||||
init_input_shapes(inputShapes);
|
||||
} else {
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
init_input_shapes(inputShapes);
|
||||
}
|
||||
|
||||
} // namespace subgraph
|
||||
|
@ -16,6 +16,7 @@
|
||||
#include <cpp/ie_cnn_network.h>
|
||||
#include "gtest/gtest.h"
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "functional_test_utils/precision_utils.hpp"
|
||||
#include <ie_core.hpp>
|
||||
|
@ -8,17 +8,9 @@
|
||||
#include <base/behavior_test_utils.hpp>
|
||||
#include "behavior/plugin/life_time.hpp"
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <signal.h>
|
||||
#include <setjmp.h>
|
||||
#endif
|
||||
#include <setjmp.h>
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
|
||||
#ifndef _WIN32
|
||||
static jmp_buf env;
|
||||
#endif
|
||||
|
||||
std::string HoldersTest::getTestCaseName(testing::TestParamInfo<HoldersParams> obj) {
|
||||
std::string targetDevice;
|
||||
std::vector<int> order;
|
||||
@ -38,19 +30,6 @@ namespace BehaviorTestsDefinitions {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(targetDevice, order) = this->GetParam();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
|
||||
#ifndef _WIN32
|
||||
// configure handling of crash
|
||||
auto crashHandler = [](int errCode) {
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
siglongjmp(env, 1);
|
||||
};
|
||||
struct sigaction act;
|
||||
act.sa_handler = crashHandler;
|
||||
sigemptyset(&act.sa_mask);
|
||||
act.sa_flags = 0;
|
||||
sigaction(SIGSEGV, &act, 0);
|
||||
#endif
|
||||
}
|
||||
|
||||
void release_order_test(std::vector<int> order, const std::string &deviceName,
|
||||
@ -90,29 +69,35 @@ namespace BehaviorTestsDefinitions {
|
||||
}
|
||||
|
||||
TEST_P(HoldersTest, Orders) {
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
// Test failed if crash happens
|
||||
#ifdef _WIN32
|
||||
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(env, 1) == 0) {
|
||||
release_order_test(order, targetDevice, function);
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
|
||||
} else {
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST_P(HoldersTestImportNetwork, Orders) {
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
// Test failed if crash happens
|
||||
#ifdef _WIN32
|
||||
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(env, 1) == 0) {
|
||||
release_order_test(order, targetDevice, function);
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
|
||||
} else {
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
std::string HoldersTestOnImportedNetwork::getTestCaseName(testing::TestParamInfo<std::string> obj) {
|
||||
|
@ -7,6 +7,7 @@
|
||||
#endif
|
||||
|
||||
#include "single_layer_tests/op_impl_check/op_impl_check.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
@ -16,21 +17,26 @@ void OpImplCheckTest::run() {
|
||||
if (function == nullptr) {
|
||||
GTEST_FAIL() << "Target function is empty!";
|
||||
}
|
||||
auto crashHandler = [](int errCode) {
|
||||
auto& s = LayerTestsUtils::Summary::getInstance();
|
||||
s.saveReport();
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
std::abort();
|
||||
};
|
||||
signal(SIGSEGV, crashHandler);
|
||||
|
||||
summary.setDeviceName(targetDevice);
|
||||
try {
|
||||
auto executableNetwork = core->compile_model(function, targetDevice, configuration);
|
||||
summary.updateOPsImplStatus(function, true);
|
||||
} catch (...) {
|
||||
summary.updateOPsImplStatus(function, false);
|
||||
GTEST_FAIL() << "Error in the LoadNetwork!";
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
// place to jump in case of a crash
|
||||
#ifdef _WIN32
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
summary.setDeviceName(targetDevice);
|
||||
try {
|
||||
auto executableNetwork = core->compile_model(function, targetDevice, configuration);
|
||||
summary.updateOPsImplStatus(function, true);
|
||||
} catch (...) {
|
||||
summary.updateOPsImplStatus(function, false);
|
||||
GTEST_FAIL() << "Error in the LoadNetwork!";
|
||||
}
|
||||
} else {
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
#include "common_test_utils/ngraph_test_utils.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
|
@ -26,12 +26,6 @@ public:
|
||||
virtual void serialize();
|
||||
virtual void query_model();
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
ov::test::utils::PluginCache::get().core().reset();
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void compare(const std::vector<ov::Tensor> &expected,
|
||||
const std::vector<ov::Tensor> &actual);
|
||||
|
@ -30,40 +30,45 @@ void LayerTestsCommon::Run() {
|
||||
functionRefs = ngraph::clone_function(*function);
|
||||
functionRefs->set_friendly_name("refFunction");
|
||||
}
|
||||
auto crashHandler = [](int errCode) {
|
||||
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
// place to jump in case of a crash
|
||||
#ifdef _WIN32
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
auto &s = Summary::getInstance();
|
||||
s.saveReport();
|
||||
std::cout << "Unexpected application crash!" << std::endl;
|
||||
std::abort();
|
||||
};
|
||||
signal(SIGSEGV, crashHandler);
|
||||
s.setDeviceName(targetDevice);
|
||||
|
||||
auto &s = Summary::getInstance();
|
||||
s.setDeviceName(targetDevice);
|
||||
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED);
|
||||
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
|
||||
} else {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED);
|
||||
}
|
||||
|
||||
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED);
|
||||
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
|
||||
try {
|
||||
LoadNetwork();
|
||||
GenerateInputs();
|
||||
Infer();
|
||||
Validate();
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::PASSED);
|
||||
}
|
||||
catch (const std::runtime_error &re) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_(re.what());
|
||||
} catch (const std::exception &ex) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_(ex.what());
|
||||
} catch (...) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_("Unknown failure occurred.");
|
||||
}
|
||||
} else {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED);
|
||||
}
|
||||
|
||||
try {
|
||||
LoadNetwork();
|
||||
GenerateInputs();
|
||||
Infer();
|
||||
Validate();
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::PASSED);
|
||||
}
|
||||
catch (const std::runtime_error &re) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_(re.what());
|
||||
} catch (const std::exception &ex) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_(ex.what());
|
||||
} catch (...) {
|
||||
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
|
||||
GTEST_FATAL_FAILURE_("Unknown failure occurred.");
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
||||
#include "ngraph_functions/utils/ngraph_helpers.hpp"
|
||||
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "functional_test_utils/ov_tensor_utils.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
@ -25,6 +26,8 @@
|
||||
#include "shared_test_classes/base/utils/generate_inputs.hpp"
|
||||
#include "shared_test_classes/base/utils/compare_results.hpp"
|
||||
|
||||
#include <setjmp.h>
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
|
||||
@ -34,52 +37,56 @@ std::ostream& operator <<(std::ostream& os, const InputShape& inputShape) {
|
||||
}
|
||||
|
||||
void SubgraphBaseTest::run() {
|
||||
auto crashHandler = [](int errCode) {
|
||||
auto& s = LayerTestsUtils::Summary::getInstance();
|
||||
s.saveReport();
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
std::abort();
|
||||
};
|
||||
signal(SIGSEGV, crashHandler);
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
LayerTestsUtils::PassRate::Statuses status = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()
|
||||
? LayerTestsUtils::PassRate::Statuses::SKIPPED
|
||||
: LayerTestsUtils::PassRate::Statuses::CRASHED;
|
||||
summary.setDeviceName(targetDevice);
|
||||
summary.updateOPsStats(function, status);
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
// place to jump in case of a crash
|
||||
#ifdef _WIN32
|
||||
if (setjmp(CommonTestUtils::env) == 0) {
|
||||
#else
|
||||
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
|
||||
#endif
|
||||
LayerTestsUtils::PassRate::Statuses status = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()
|
||||
? LayerTestsUtils::PassRate::Statuses::SKIPPED
|
||||
: LayerTestsUtils::PassRate::Statuses::CRASHED;
|
||||
summary.setDeviceName(targetDevice);
|
||||
summary.updateOPsStats(function, status);
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
|
||||
ASSERT_FALSE(targetStaticShapes.empty()) << "Target Static Shape is empty!!!";
|
||||
std::string errorMessage;
|
||||
try {
|
||||
compile_model();
|
||||
for (const auto& targetStaticShapeVec : targetStaticShapes) {
|
||||
try {
|
||||
if (!inputDynamicShapes.empty()) {
|
||||
// resize ngraph function according new target shape
|
||||
// Note: output shapes of some nodes depend on the input data
|
||||
// so for some tests we need to override this function and replace parameter with constant node to get correct output shapes
|
||||
init_ref_function(functionRefs, targetStaticShapeVec);
|
||||
ASSERT_FALSE(targetStaticShapes.empty()) << "Target Static Shape is empty!!!";
|
||||
std::string errorMessage;
|
||||
try {
|
||||
compile_model();
|
||||
for (const auto& targetStaticShapeVec : targetStaticShapes) {
|
||||
try {
|
||||
if (!inputDynamicShapes.empty()) {
|
||||
// resize ngraph function according new target shape
|
||||
// Note: output shapes of some nodes depend on the input data
|
||||
// so for some tests we need to override this function and replace parameter with constant node to get correct output shapes
|
||||
init_ref_function(functionRefs, targetStaticShapeVec);
|
||||
}
|
||||
generate_inputs(targetStaticShapeVec);
|
||||
} catch (const std::exception& ex) {
|
||||
throw std::runtime_error("Incorrect target static shape: " +
|
||||
CommonTestUtils::vec2str(targetStaticShapeVec) + " " + ex.what());
|
||||
}
|
||||
generate_inputs(targetStaticShapeVec);
|
||||
} catch (const std::exception& ex) {
|
||||
throw std::runtime_error("Incorrect target static shape: " +
|
||||
CommonTestUtils::vec2str(targetStaticShapeVec) + " " + ex.what());
|
||||
infer();
|
||||
validate();
|
||||
}
|
||||
infer();
|
||||
validate();
|
||||
status = LayerTestsUtils::PassRate::Statuses::PASSED;
|
||||
} catch (const std::exception& ex) {
|
||||
status = LayerTestsUtils::PassRate::Statuses::FAILED;
|
||||
errorMessage = ex.what();
|
||||
} catch (...) {
|
||||
status = LayerTestsUtils::PassRate::Statuses::FAILED;
|
||||
errorMessage = "Unknown failure occurred.";
|
||||
}
|
||||
status = LayerTestsUtils::PassRate::Statuses::PASSED;
|
||||
} catch (const std::exception& ex) {
|
||||
status = LayerTestsUtils::PassRate::Statuses::FAILED;
|
||||
errorMessage = ex.what();
|
||||
} catch (...) {
|
||||
status = LayerTestsUtils::PassRate::Statuses::FAILED;
|
||||
errorMessage = "Unknown failure occurred.";
|
||||
}
|
||||
summary.updateOPsStats(function, status);
|
||||
if (status != LayerTestsUtils::PassRate::Statuses::PASSED) {
|
||||
GTEST_FATAL_FAILURE_(errorMessage.c_str());
|
||||
summary.updateOPsStats(function, status);
|
||||
if (status != LayerTestsUtils::PassRate::Statuses::PASSED) {
|
||||
GTEST_FATAL_FAILURE_(errorMessage.c_str());
|
||||
}
|
||||
} else {
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
}
|
||||
|
||||
|
55
src/tests/ie_test_utils/common_test_utils/crash_handler.cpp
Normal file
55
src/tests/ie_test_utils/common_test_utils/crash_handler.cpp
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright (C) 2022 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "crash_handler.hpp"
|
||||
|
||||
namespace CommonTestUtils {
|
||||
|
||||
// enviroment to restore in case of crash
|
||||
jmp_buf env;
|
||||
|
||||
CrashHandler::CrashHandler() {
|
||||
auto crashHandler = [](int errCode) {
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
|
||||
// reset custom signal handler to avoid infinit loop
|
||||
// if for some reasons sigsetjmp will not be available
|
||||
signal(SIGABRT, SIG_DFL);
|
||||
signal(SIGSEGV, SIG_DFL);
|
||||
signal(SIGILL, SIG_DFL);
|
||||
#ifndef _WIN32
|
||||
signal(SIGBUS, SIG_DFL);
|
||||
signal(SIGFPE, SIG_DFL);
|
||||
#endif
|
||||
|
||||
// goto sigsetjmp
|
||||
#ifdef _WIN32
|
||||
longjmp(env, 1);
|
||||
#else
|
||||
siglongjmp(env, 1);
|
||||
#endif
|
||||
};
|
||||
|
||||
// setup custom handler for signals
|
||||
signal(SIGABRT, crashHandler);
|
||||
signal(SIGSEGV, crashHandler);
|
||||
signal(SIGILL, crashHandler);
|
||||
#ifndef _WIN32
|
||||
signal(SIGFPE, crashHandler);
|
||||
signal(SIGBUS, crashHandler);
|
||||
#endif
|
||||
}
|
||||
|
||||
CrashHandler::~CrashHandler() {
|
||||
// reset custom signal handler to avoid infinit loop
|
||||
signal(SIGABRT, SIG_DFL);
|
||||
signal(SIGSEGV, SIG_DFL);
|
||||
signal(SIGILL, SIG_DFL);
|
||||
#ifndef _WIN32
|
||||
signal(SIGFPE, SIG_DFL);
|
||||
signal(SIGBUS, SIG_DFL);
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace CommonTestUtils
|
24
src/tests/ie_test_utils/common_test_utils/crash_handler.hpp
Normal file
24
src/tests/ie_test_utils/common_test_utils/crash_handler.hpp
Normal file
@ -0,0 +1,24 @@
|
||||
// Copyright (C) 2022 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common_utils.hpp"
|
||||
|
||||
#include <signal.h>
|
||||
#include <setjmp.h>
|
||||
|
||||
namespace CommonTestUtils {
|
||||
|
||||
extern jmp_buf env;
|
||||
|
||||
class CrashHandler {
|
||||
public:
|
||||
CrashHandler();
|
||||
~CrashHandler();
|
||||
};
|
||||
|
||||
} // namespace CommonTestUtils
|
Loading…
Reference in New Issue
Block a user