[conformance] SetUp timeout per test (#10426)

This commit is contained in:
Sofya Balandina 2022-03-15 18:28:19 +03:00 committed by GitHub
parent 37e05afd12
commit 499ffcaa59
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 166 additions and 62 deletions

View File

@ -47,6 +47,7 @@ The target is able to take the following command-line arguments:
* `--extract_body` allows to count extracted operation bodies to report.
* `--shape_mode` Optional. Allows to run `static`, `dynamic` or both scenarios. Default value is empty string allows to run both scenarios. Possible values
are `static`, `dynamic`, ``
* `--test_timeout` Setup timeout for each test in seconds, default timeout 900seconds (15 minutes).
* All `gtest` command-line parameters
The result of execution is `report.xml` file. It demonstrates tests statistic like pass rate, passed, crashed, skipped failed tests and plugin implementation

View File

@ -6,6 +6,7 @@
#include <gflags/gflags.h>
#include <iostream>
#include <limits.h>
namespace ov {
namespace test {
@ -35,6 +36,7 @@ static const char config_path_message[] = "Optional. Allows to specify path to f
static const char extract_body_message[] = "Optional. Allows to count extracted operation bodies to report. Default value is false.";
static const char shape_mode_message[] = "Optional. Allows to run `static`, `dynamic` or both scenarios. Default value is empty string allows to run both"
" scenarios. Possible values are `static`, `dynamic`, ``";
static const char test_timeout_message[] = "Optional. Setup timeout for each test in seconds, default timeout 900seconds (15 minutes).";
DEFINE_bool(h, false, help_message);
@ -50,6 +52,7 @@ DEFINE_bool(extend_report, false, extend_report_config_message);
DEFINE_bool(report_unique_name, false, report_unique_name_message);
DEFINE_bool(extract_body, false, extract_body_message);
DEFINE_string(shape_mode, "", shape_mode_message);
DEFINE_uint32(test_timeout, UINT_MAX, test_timeout_message);
/**
* @brief This function shows a help message
@ -72,6 +75,7 @@ static void showUsage() {
std::cout << " --output_folder \"<path>\" " << output_folder_message << std::endl;
std::cout << " --plugin_lib_name " << output_folder_message << std::endl;
std::cout << " --shape_mode \"<value>\" " << shape_mode_message << std::endl;
std::cout << " --test_timeout \"<value>\" " << test_timeout_message << std::endl;
}
} // namespace conformance

View File

@ -17,6 +17,8 @@
#include "gflag_config.hpp"
#include "conformance.hpp"
#include "common_test_utils/crash_handler.hpp"
using namespace ov::test::conformance;
int main(int argc, char* argv[]) {
@ -56,6 +58,8 @@ int main(int argc, char* argv[]) {
throw std::runtime_error("Incorrect value for `--shape_mode`. Should be `dynamic`, `static` or ``. Current value is `" + FLAGS_shape_mode + "`");
}
CommonTestUtils::CrashHandler::SetUpTimeout(FLAGS_test_timeout);
// ---------------------------Initialization of Gtest env -----------------------------------------------
ov::test::conformance::targetDevice = FLAGS_device.c_str();
ov::test::conformance::IRFolderPaths = CommonTestUtils::splitStringByDelimiter(FLAGS_input_folders);

View File

@ -56,19 +56,21 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo<ReadIRParam
void ReadIRTest::query_model() {
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
auto &s = LayerTestsUtils::Summary::getInstance();
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
jmpRes = setjmp(CommonTestUtils::env);
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
jmpRes = sigsetjmp(CommonTestUtils::env, 1);
#endif
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
if (functionRefs == nullptr) {
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("refFunction");
}
auto &s = LayerTestsUtils::Summary::getInstance();
s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
@ -83,7 +85,10 @@ void ReadIRTest::query_model() {
} catch (...) {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED);
}
} else {
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::HANGED);
IE_THROW() << "Crash happens";
}
}
@ -93,11 +98,14 @@ void ReadIRTest::SetUp() {
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
jmpRes = setjmp(CommonTestUtils::env);
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
jmpRes = sigsetjmp(CommonTestUtils::env, 1);
#endif
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
std::tie(pathToModel, targetDevice, configuration) = this->GetParam();
function = core->read_model(pathToModel);
const auto metaFile = CommonTestUtils::replaceExt(pathToModel, "meta");
@ -213,8 +221,10 @@ void ReadIRTest::SetUp() {
}
}
init_input_shapes(inputShapes);
} else {
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
IE_THROW() << "Hange happens";
}
}

View File

@ -74,9 +74,9 @@ namespace BehaviorTestsDefinitions {
// Test failed if crash happens
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
if (setjmp(CommonTestUtils::env) == CommonTestUtils::JMP_STATUS::ok) {
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
if (sigsetjmp(CommonTestUtils::env, 1) == CommonTestUtils::JMP_STATUS::ok) {
#endif
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
} else {
@ -90,9 +90,9 @@ namespace BehaviorTestsDefinitions {
// Test failed if crash happens
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
if (setjmp(CommonTestUtils::env) == CommonTestUtils::JMP_STATUS::ok) {
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
if (sigsetjmp(CommonTestUtils::env, 1) == CommonTestUtils::JMP_STATUS::ok) {
#endif
EXPECT_NO_THROW(release_order_test(order, targetDevice, function));
} else {

View File

@ -22,11 +22,14 @@ void OpImplCheckTest::run() {
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
jmpRes = setjmp(CommonTestUtils::env);
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
jmpRes = sigsetjmp(CommonTestUtils::env, 1);
#endif
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
summary.setDeviceName(targetDevice);
try {
auto executableNetwork = core->compile_model(function, targetDevice, configuration);
@ -35,8 +38,12 @@ void OpImplCheckTest::run() {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Error in the LoadNetwork!";
}
} else {
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
summary.updateOPsImplStatus(function, false);
IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
summary.updateOPsImplStatus(function, false);
IE_THROW() << "Hange happens";
}
}

View File

@ -33,23 +33,25 @@ void LayerTestsCommon::Run() {
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
auto &s = Summary::getInstance();
s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else {
s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED);
}
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
jmpRes = setjmp(CommonTestUtils::env);
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
jmpRes = sigsetjmp(CommonTestUtils::env, 1);
#endif
auto &s = Summary::getInstance();
s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else {
s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED);
}
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
try {
LoadNetwork();
GenerateInputs();
@ -67,7 +69,10 @@ void LayerTestsCommon::Run() {
s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED);
GTEST_FATAL_FAILURE_("Unknown failure occurred.");
}
} else {
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
s.updateOPsStats(functionRefs, PassRate::Statuses::HANGED);
IE_THROW() << "Crash happens";
}
}

View File

@ -37,25 +37,29 @@ std::ostream& operator <<(std::ostream& os, const InputShape& inputShape) {
}
void SubgraphBaseTest::run() {
bool isCurrentTestDisabled = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled();
LayerTestsUtils::PassRate::Statuses status = isCurrentTestDisabled ?
LayerTestsUtils::PassRate::Statuses::SKIPPED :
LayerTestsUtils::PassRate::Statuses::CRASHED;
summary.setDeviceName(targetDevice);
summary.updateOPsStats(function, status);
if (isCurrentTestDisabled)
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
if (setjmp(CommonTestUtils::env) == 0) {
jmpRes = setjmp(CommonTestUtils::env);
#else
if (sigsetjmp(CommonTestUtils::env, 1) == 0) {
jmpRes = sigsetjmp(CommonTestUtils::env, 1);
#endif
bool isCurrentTestDisabled = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled();
LayerTestsUtils::PassRate::Statuses status = isCurrentTestDisabled ?
LayerTestsUtils::PassRate::Statuses::SKIPPED :
LayerTestsUtils::PassRate::Statuses::CRASHED;
summary.setDeviceName(targetDevice);
summary.updateOPsStats(function, status);
if (isCurrentTestDisabled)
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
ASSERT_FALSE(targetStaticShapes.empty() && !function->get_parameters().empty()) << "Target Static Shape is empty!!!";
std::string errorMessage;
@ -89,7 +93,10 @@ void SubgraphBaseTest::run() {
if (status != LayerTestsUtils::PassRate::Statuses::PASSED) {
GTEST_FATAL_FAILURE_(errorMessage.c_str());
}
} else {
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
summary.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::HANGED);
IE_THROW() << "Crash happens";
}
}

View File

@ -3,13 +3,20 @@
//
#include "crash_handler.hpp"
#include <limits.h>
namespace CommonTestUtils {
// enviroment to restore in case of crash
jmp_buf env;
unsigned int CrashHandler::MAX_TEST_WORK_TIME = UINT_MAX;
CrashHandler::CrashHandler() {
// setup default value for timeout in 15 minutes
if (MAX_TEST_WORK_TIME == UINT_MAX) {
MAX_TEST_WORK_TIME = 900;
}
auto crashHandler = [](int errCode) {
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
@ -21,13 +28,21 @@ CrashHandler::CrashHandler() {
#ifndef _WIN32
signal(SIGBUS, SIG_DFL);
signal(SIGFPE, SIG_DFL);
signal(SIGALRM, SIG_DFL);
#endif
// goto sigsetjmp
#ifdef _WIN32
longjmp(env, 1);
longjmp(env, JMP_STATUS::anyError);
#else
siglongjmp(env, 1);
// reset timeout
alarm(0);
if (errCode == SIGALRM) {
std::cerr << "Test finished by timeout" << std::endl;
siglongjmp(env, JMP_STATUS::alarmErr);
} else {
siglongjmp(env, JMP_STATUS::anyError);
}
#endif
};
@ -38,6 +53,7 @@ CrashHandler::CrashHandler() {
#ifndef _WIN32
signal(SIGFPE, crashHandler);
signal(SIGBUS, crashHandler);
signal(SIGALRM, crashHandler);
#endif
}
@ -49,7 +65,23 @@ CrashHandler::~CrashHandler() {
#ifndef _WIN32
signal(SIGFPE, SIG_DFL);
signal(SIGBUS, SIG_DFL);
signal(SIGALRM, SIG_DFL);
#endif
// reset timeout
#ifndef _WIN32
alarm(0);
#endif
}
void CrashHandler::StartTimer() {
#ifndef _WIN32
alarm(MAX_TEST_WORK_TIME);
#endif
}
void CrashHandler::SetUpTimeout(unsigned int timeout) {
MAX_TEST_WORK_TIME = timeout;
}
} // namespace CommonTestUtils

View File

@ -15,10 +15,16 @@ namespace CommonTestUtils {
extern jmp_buf env;
enum JMP_STATUS { ok = 0, anyError = 1, alarmErr = 2 };
class CrashHandler {
private:
static unsigned int MAX_TEST_WORK_TIME;
public:
CrashHandler();
~CrashHandler();
static void SetUpTimeout(unsigned int timeout);
void StartTimer();
};
} // namespace CommonTestUtils

View File

@ -30,21 +30,24 @@ struct PassRate {
PASSED,
FAILED,
SKIPPED,
CRASHED
CRASHED,
HANGED
};
unsigned long passed = 0;
unsigned long failed = 0;
unsigned long skipped = 0;
unsigned long crashed = 0;
unsigned long hanged = 0;
bool isImplemented = false;
PassRate() = default;
PassRate(unsigned long p, unsigned long f, unsigned long s, unsigned long c) {
PassRate(unsigned long p, unsigned long f, unsigned long s, unsigned long c, unsigned long h) {
passed = p;
failed = f;
skipped = s;
crashed = c;
hanged = h;
if (!isImplemented && passed > 0) {
isImplemented = true;
}
@ -55,10 +58,10 @@ struct PassRate {
}
float getPassrate() const {
if (passed + failed + crashed == 0) {
if (passed + failed + crashed + hanged == 0) {
return 0.f;
} else {
return passed * 100.f / (passed + failed + skipped + crashed);
return passed * 100.f / (passed + failed + skipped + crashed + hanged);
}
}
};
@ -110,7 +113,7 @@ public:
#ifdef IE_TEST_DEBUG
void saveDebugReport(const char* className, const char* opName, unsigned long passed, unsigned long failed,
unsigned long skipped, unsigned long crashed);
unsigned long skipped, unsigned long crashed, unsigned long hanged);
#endif //IE_TEST_DEBUG
void saveReport();

View File

@ -110,8 +110,11 @@ def collect_statistic(root: ET.Element, is_conformance_mode: bool):
trusted_ops[device.tag] = 0
covered_ops[device.tag] = 0
for op in results[device.tag]:
# for correct display of reports without hanged item in report.xml
results[device.tag][op]["hanged"] = results[device.tag][op].get("hanged", 0)
op_test_cnt = int(results[device.tag][op]["passed"]) + int(results[device.tag][op]["failed"]) + \
int(results[device.tag][op]["crashed"]) + int(results[device.tag][op]["skipped"])
int(results[device.tag][op]["crashed"]) + int(results[device.tag][op]["skipped"]) + \
int(results[device.tag][op]["hanged"])
if op_test_cnt == 0:
continue
covered_ops[device.tag] += 1

View File

@ -116,7 +116,7 @@ function filterTable() {
selector = [];
select.forEach(item => {
if (item == '100p') {
selector.push('.value:visible[crashed="0"][failed="0"][skipped="0"][value!="---"]');
selector.push('.value:visible[crashed="0"][failed="0"][skipped="0"][hanged="0"][value!="---"]');
}
if (item == '100f') {
selector.push('.value:visible[passed="0"][value!="---"]');
@ -130,6 +130,9 @@ function filterTable() {
if (item == 'c') {
selector.push('.value:visible[crashed!="0"][value!="---"]');
}
if (item == 'h') {
selector.push('.value:visible[hanged!="0"][value!="---"]');
}
if (item == 's') {
selector.push('.value:visible[value!="---"][skipped!="0"]');
}

View File

@ -34,6 +34,7 @@
<span class="red">F:0</span><span>Failed</span>
<span class="grey">S:2</span><span>Skipped</span>
<span class="dark">C:0</span><span>Crashed</span>
<span class="grey-red">H:0</span><span>Hanged</span>
</div>
<div>
<span><b>Plugin operation implementation status:</b></span>
@ -75,6 +76,7 @@
<option value="f">Failed</option>
<option value="s">Skipped</option>
<option value="c">Crashed</option>
<option value="h">Hanged</option>
<option value="ex">Existing tests</option>
<option value="na">No tests</option>
<option value="ns">No status</option>
@ -137,24 +139,26 @@
<td class="value {{ d }} {% if results[d][op].implemented == 'true' -%} impl {% else -%} not_impl {% endif -%}"
passed="{{ results[d][op].passed }}" failed="{{ results[d][op].failed }}"
skipped="{{ results[d][op].skipped }}" crashed="{{ results[d][op].crashed }}"
hanged="{{ results[d][op].hanged }}"
value="{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}{{ results[d][op].passrate }}{% else -%}---{% endif -%}"
title="{% if results[d][op].implemented == 'true' -%}
{{op}} is implemented in {{d}} plugin
{% else -%}
{{op}} is not implemented in {{d}} plugin
{% endif -%}">
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped != '0' or results[d][op].hanged != '0') -%}
{{ results[d][op].passrate }} %<br />
{% else -%}
---<br />
{% endif -%}
<div class="flex">
<div>
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped != '0' or results[d][op].hanged != '0') -%}
<span class="green" title="Passed">P:{{ results[d][op].passed }}</span>
<span class="red" title="Failed">F:{{ results[d][op].failed }}</span>
<span class="grey" title="Skipped">S:{{ results[d][op].skipped }}</span>
<span class="dark" title="Crashed">C:{{ results[d][op].crashed }}</span>
<span class="grey-red" title="Hanged">H:{{ results[d][op].hanged }}</span>
{% else -%}
{% endif -%}
</div>

View File

@ -29,6 +29,9 @@ body {
.dark {
background: #8b000040;
}
.grey-red {
background: #5e121275;
}
.filters {
background: #FFF;
padding: 5px 10px;

View File

@ -70,20 +70,27 @@ void Summary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Sta
case PassRate::CRASHED:
passrate.crashed++;
break;
case PassRate::HANGED:
passrate.hanged++;
passrate.crashed--;
break;
}
} else {
switch (status) {
case PassRate::PASSED:
opsStats[op] = PassRate(1, 0, 0, 0);
opsStats[op] = PassRate(1, 0, 0, 0, 0);
break;
case PassRate::FAILED:
opsStats[op] = PassRate(0, 1, 0, 0);
opsStats[op] = PassRate(0, 1, 0, 0, 0);
break;
case PassRate::SKIPPED:
opsStats[op] = PassRate(0, 0, 1, 0);
opsStats[op] = PassRate(0, 0, 1, 0, 0);
break;
case PassRate::CRASHED:
opsStats[op] = PassRate(0, 0, 0, 1);
opsStats[op] = PassRate(0, 0, 0, 1, 0);
break;
case PassRate::HANGED:
opsStats[op] = PassRate(0, 0, 0, 0, 1);
break;
}
}
@ -96,7 +103,7 @@ void Summary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool imp
it->second.isImplemented = true;
}
} else {
opsStats[op] = PassRate(0, 0, 0, 0);
opsStats[op] = PassRate(0, 0, 0, 0, 0);
opsStats[op].isImplemented = implStatus;
}
}
@ -129,7 +136,8 @@ std::map<std::string, PassRate> Summary::getOpStatisticFromReport() {
auto f = std::stoi(child.attribute("failed").value());
auto s = std::stoi(child.attribute("skipped").value());
auto c = std::stoi(child.attribute("crashed").value());
PassRate obj(p, f, s, c);
auto h = std::stoi(child.attribute("hanged").value());
PassRate obj(p, f, s, c, h);
oldOpsStat.insert({entry, obj});
}
return oldOpsStat;
@ -217,11 +225,11 @@ void Summary::updateOPsImplStatus(const std::shared_ptr<ngraph::Function> &funct
#ifdef IE_TEST_DEBUG
void Summary::saveDebugReport(const char* className, const char* opName, unsigned long passed, unsigned long failed,
unsigned long skipped, unsigned long crashed) {
unsigned long skipped, unsigned long crashed, unsigned long hanged) {
std::string outputFilePath = "./part_report.txt";
std::ofstream file;
file.open(outputFilePath, std::ios_base::app);
file << className << ' ' << opName << ' ' << passed << ' ' << failed << ' ' << skipped << ' ' << crashed << '\n';
file << className << ' ' << opName << ' ' << passed << ' ' << failed << ' ' << skipped << ' ' << crashed << ' ' << hanged << '\n';
file.close();
}
#endif //IE_TEST_DEBUG
@ -302,6 +310,7 @@ void Summary::saveReport() {
entry.append_attribute("failed").set_value(it.second.failed);
entry.append_attribute("skipped").set_value(it.second.skipped);
entry.append_attribute("crashed").set_value(it.second.crashed);
entry.append_attribute("hanged").set_value(it.second.hanged);
entry.append_attribute("passrate").set_value(it.second.getPassrate());
}
@ -316,6 +325,7 @@ void Summary::saveReport() {
entry.append_attribute("failed").set_value(item.second.failed);
entry.append_attribute("skipped").set_value(item.second.skipped);
entry.append_attribute("crashed").set_value(item.second.crashed);
entry.append_attribute("hanged").set_value(item.second.hanged);
entry.append_attribute("passrate").set_value(item.second.getPassrate());
} else {
entry = currentDeviceNode.child(item.first.c_str());
@ -324,7 +334,8 @@ void Summary::saveReport() {
auto f = std::stoi(entry.attribute("failed").value()) + item.second.failed;
auto s = std::stoi(entry.attribute("skipped").value()) + item.second.skipped;
auto c = std::stoi(entry.attribute("crashed").value()) + item.second.crashed;
PassRate obj(p, f, s, c);
auto h = std::stoi(entry.attribute("hanged").value()) + item.second.hanged;
PassRate obj(p, f, s, c, h);
(implStatus || obj.isImplemented)
? entry.attribute("implemented").set_value(true)
@ -333,6 +344,7 @@ void Summary::saveReport() {
entry.attribute("failed").set_value(obj.failed);
entry.attribute("skipped").set_value(obj.skipped);
entry.attribute("crashed").set_value(obj.crashed);
entry.attribute("hanged").set_value(obj.hanged);
entry.attribute("passrate").set_value(obj.getPassrate());
}
}