[IE TESTS][IE CONFORMANCE] Implement infra to check operation implementation status in plugin (#8606)

* Init

* ini2

* init 3

* s

* Link error

* Init run

* Provide BinaryEltwiseExamples

* Report

* Update merge script

* Remove extra

* Filter + interface

* sssd

* Statistic

* Fix Win

* Update summary.cpp
This commit is contained in:
Irina Efode 2021-12-07 22:17:00 +03:00 committed by GitHub
parent d5d85dae90
commit d55e67736b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 471 additions and 46 deletions

View File

@ -0,0 +1,21 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "op_impl_check/op_impl_check.hpp"
#include "op_impl_check/single_op_graph.hpp"
#include "conformance.hpp"
namespace ConformanceTests {
using namespace ov::test::subgraph;
namespace {
INSTANTIATE_TEST_SUITE_P(conformance,
OpImplCheckTest,
::testing::Combine(
::testing::ValuesIn(createFunctions()),
::testing::Values(targetDevice),
::testing::Values(std::map<std::string, std::string>())),
OpImplCheckTest::getTestCaseName);
} // namespace
} // namespace ConformanceTests

View File

@ -0,0 +1,41 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "gtest/gtest.h"
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/common_utils.hpp"
#include "functional_test_utils/layer_test_utils/summary.hpp"
#include "functional_test_utils/ov_plugin_cache.hpp"
namespace ov {
namespace test {
namespace subgraph {
using OpImplParams = std::tuple<
std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Function>>, // Function to check
std::string, // Target Device
std::map<std::string, std::string>>; // Plugin Config
class OpImplCheckTest : public testing::WithParamInterface<OpImplParams>,
public CommonTestUtils::TestsCommon {
protected:
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();
std::shared_ptr<ov::runtime::Core> core = ov::test::utils::PluginCache::get().core();
std::shared_ptr<ov::Function> function;
std::string targetDevice;
std::map<std::string, std::string> configuration;
public:
void run();
void SetUp() override;
static std::string getTestCaseName(const testing::TestParamInfo<OpImplParams> &obj);
};
} // namespace subgraph
} // namespace test
} // namespace ov

View File

@ -0,0 +1,35 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <functional_test_utils/layer_test_utils/summary.hpp>
#include <ngraph_functions/subgraph_builders.hpp>
namespace ov {
namespace test {
namespace subgraph {
using OpGenerator = std::map<ov::DiscreteTypeInfo, std::function<std::shared_ptr<ov::Function>()>>;
OpGenerator getOpGeneratorMap();
static const std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Function>>> createFunctions() {
std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Function>>> res;
auto opsets = LayerTestsUtils::Summary::getInstance().getOpSets();
auto opGenerator = getOpGeneratorMap();
std::set<ngraph::NodeTypeInfo> opsInfo;
for (const auto& opset : opsets) {
const auto &type_info_set = opset.get_type_info_set();
opsInfo.insert(type_info_set.begin(), type_info_set.end());
}
for (const auto& type_info : opsInfo) {
res.push_back({type_info, opGenerator.find(type_info)->second()});
}
return res;
}
} // namespace subgraph
} // namespace test
} // namespace ov

View File

@ -0,0 +1,67 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <signal.h>
#ifdef _WIN32
#include <process.h>
#endif
#include "op_impl_check/op_impl_check.hpp"
namespace ov {
namespace test {
namespace subgraph {
void OpImplCheckTest::run() {
if (function == nullptr) {
GTEST_FAIL() << "Target function is empty!";
}
auto crashHandler = [](int errCode) {
auto& s = LayerTestsUtils::Summary::getInstance();
s.saveReport();
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
std::abort();
};
signal(SIGSEGV, crashHandler);
summary.setDeviceName(targetDevice);
try {
auto executableNetwork = core->compile_model(function, targetDevice, configuration);
summary.updateOPsImplStatus(function, true);
} catch (...) {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Error in the LoadNetwork!";
}
}
void OpImplCheckTest::SetUp() {
std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Function>> funcInfo;
std::tie(funcInfo, targetDevice, configuration) = this->GetParam();
function = funcInfo.second;
}
std::string OpImplCheckTest::getTestCaseName(const testing::TestParamInfo<OpImplParams> &obj) {
std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Function>> funcInfo;
std::string targetDevice;
std::map<std::string, std::string> config;
std::tie(funcInfo, targetDevice, config) = obj.param;
std::ostringstream result;
std::string friendlyName = funcInfo.first.name + std::string("_") + funcInfo.first.get_version();
result << "Function=" << friendlyName << "_";
result << "Device=" << targetDevice << "_";
result << "Config=(";
for (const auto& configItem : config) {
result << configItem.first << "=" << configItem.second << "_";
}
result << ")";
return result.str();
}
TEST_P(OpImplCheckTest, checkPluginImplementation) {
run();
}
} // namespace subgraph
} // namespace test
} // namespace ov

View File

@ -0,0 +1,75 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <op_impl_check/op_impl_check.hpp>
#include <op_impl_check/single_op_graph.hpp>
namespace ov {
namespace test {
namespace subgraph {
namespace {
std::shared_ptr<ov::Function> generate(const std::shared_ptr<ov::op::Op> &node) {
return nullptr;
}
std::shared_ptr<ov::Function> generateBinaryEltwise(const std::shared_ptr<ov::op::Op> &node) {
const auto params = ngraph::builder::makeDynamicParams(ov::element::f32, {{1, 2},
{1, 2}});
std::shared_ptr<ov::Node> eltwiseNode;
if (ov::is_type<ov::op::v0::SquaredDifference>(node)) {
eltwiseNode = std::make_shared<ov::op::v0::SquaredDifference>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Add>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Add>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Divide>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Divide>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::FloorMod>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::FloorMod>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Maximum>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Maximum>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Minimum>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Minimum>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Multiply>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Multiply>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Power>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Power>(params.front(), params.back());
} else if (ov::is_type<ov::op::v1::Subtract>(node)) {
eltwiseNode = std::make_shared<ov::op::v1::Subtract>(params.front(), params.back());
} else {
return nullptr;
}
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(eltwiseNode)};
return std::make_shared<ngraph::Function>(results, params, "BinaryEltwiseGraph");
}
} // namespace
template <typename T>
std::shared_ptr<ov::Function> generateGraph() {
std::shared_ptr<T> node = std::shared_ptr<T>(new T);
if (ov::is_type<ov::op::util::BinaryElementwiseArithmetic>(node)) {
return generateBinaryEltwise(node);
}
return generate(node);
}
OpGenerator getOpGeneratorMap() {
static OpGenerator opGeneratorMap{
#define _OPENVINO_OP_REG(NAME, NAMESPACE) {NAMESPACE::NAME::get_type_info_static(), generateGraph<NAMESPACE::NAME>},
#include "openvino/opsets/opset1_tbl.hpp"
#include "openvino/opsets/opset2_tbl.hpp"
#include "openvino/opsets/opset3_tbl.hpp"
#include "openvino/opsets/opset4_tbl.hpp"
#include "openvino/opsets/opset5_tbl.hpp"
#include "openvino/opsets/opset6_tbl.hpp"
#include "openvino/opsets/opset7_tbl.hpp"
#include "openvino/opsets/opset8_tbl.hpp"
#undef _OPENVINO_OP_REG
};
return opGeneratorMap;
}
} // namespace subgraph
} // namespace test
} // namespace ov

View File

@ -61,7 +61,7 @@ protected:
constexpr static const double disable_threshold = std::numeric_limits<double>::max();
double abs_threshold = disable_threshold, rel_threshold = disable_threshold;
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();;
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();
private:
std::vector<ov::runtime::Tensor> calculate_refs();

View File

@ -36,6 +36,7 @@ struct PassRate {
unsigned long failed = 0;
unsigned long skipped = 0;
unsigned long crashed = 0;
bool isImplemented = false;
PassRate() = default;
@ -44,6 +45,13 @@ struct PassRate {
failed = f;
skipped = s;
crashed = c;
if (!isImplemented && passed > 0) {
isImplemented = true;
}
}
void setImplementationStatus(bool implStatus) {
isImplemented = implStatus;
}
float getPassrate() const {
@ -87,10 +95,15 @@ public:
std::map<ngraph::NodeTypeInfo, PassRate> getOPsStats() { return opsStats; }
void updateOPsStats(const std::shared_ptr<ngraph::Function> &function, const PassRate::Statuses &status);
void updateOPsImplStatus(const std::shared_ptr<ngraph::Function> &function, const bool implStatus);
void updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status);
void updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus);
static Summary &getInstance();
std::vector<ngraph::OpSet> getOpSets() {
return opsets;
}
// #define IE_TEST_DEBUG

View File

@ -49,6 +49,12 @@ def aggregate_test_results(results: ET.SubElement, xml_reports: list):
for attr_name in device_results.find(op.tag).attrib:
if attr_name == "passrate":
continue
if attr_name == "implemented":
xml_value = op.attrib.get(attr_name) == "true"
aggregated_value = entry.attrib.get(attr_name) == "true"
str_value = "true" if xml_value or aggregated_value else "false"
device_results.find(entry.tag).set(attr_name, str_value)
continue
xml_value = int(op.attrib.get(attr_name))
aggregated_value = int(entry.attrib.get(attr_name))
device_results.find(entry.tag).set(attr_name, str(xml_value + aggregated_value))

View File

@ -69,6 +69,9 @@ def merge_xmls(xml_paths: list):
for attr_name in device_results.find(op_result.tag).attrib:
if attr_name == "passrate":
continue
# TODO
if attr_name == "implemented":
continue
total_tests_count_xml += int(op_result.attrib.get(attr_name))
total_tests_count_summary += int(current_op_res.attrib.get(attr_name))
if total_tests_count_xml > total_tests_count_summary:
@ -97,6 +100,7 @@ def collect_statistic(root: ET.Element, is_conformance_mode: bool):
op_res = dict()
results = dict()
covered_ops = dict()
for device in root.find("results"):
results[device.tag] = {op.tag: op.attrib for op in device}
@ -104,16 +108,20 @@ def collect_statistic(root: ET.Element, is_conformance_mode: bool):
general_test_count[device.tag] = 0
general_passed_tests[device.tag] = 0
trusted_ops[device.tag] = 0
covered_ops[device.tag] = 0
for op in results[device.tag]:
op_test_cnt = int(results[device.tag][op]["passed"]) + int(results[device.tag][op]["failed"]) + \
int(results[device.tag][op]["crashed"]) + int(results[device.tag][op]["skipped"])
if op_test_cnt == 0:
continue
covered_ops[device.tag] += 1
pass_rate = round(float(results[device.tag][op]["passrate"]), 1)
results[device.tag][op]["passrate"] = pass_rate
pass_rate_avg[device.tag] += pass_rate
if pass_rate == 100.:
trusted_ops[device.tag] += 1
device_general_test_count = \
int(results[device.tag][op]["passed"]) + int(results[device.tag][op]["failed"]) +\
int(results[device.tag][op]["crashed"]) + int(results[device.tag][op]["skipped"])
device_general_test_count = op_test_cnt
general_test_count[device.tag] += device_general_test_count
general_passed_tests[device.tag] += int(results[device.tag][op]["passed"])
@ -123,9 +131,9 @@ def collect_statistic(root: ET.Element, is_conformance_mode: bool):
op_res.update({op: {device.tag: device_general_test_count}})
pass_rate_avg[device.tag] /= len(results[device.tag])
pass_rate_avg[device.tag] = round(float(pass_rate_avg[device.tag]), 1)
general_pass_rate[device.tag] = general_passed_tests[device.tag] * 100 / general_test_count[device.tag]
general_pass_rate[device.tag] = 0 if general_test_count[device.tag] == 0 else (general_passed_tests[device.tag] * 100 / general_test_count[device.tag])
general_pass_rate[device.tag] = round(float(general_pass_rate[device.tag]), 1)
trusted_ops[device.tag] = round(float(trusted_ops[device.tag] * 100 / len(results[device.tag])), 1)
trusted_ops[device.tag] = round(float(trusted_ops[device.tag] * 100 / covered_ops[device.tag]), 1) if device.tag in covered_ops and covered_ops[device.tag] != 0 else 0
logger.info("Test number comparison between devices is started")
for op in op_res:
@ -145,14 +153,14 @@ def collect_statistic(root: ET.Element, is_conformance_mode: bool):
devices = results.keys()
logger.info("Statistic collecting is completed")
return devices, results, general_pass_rate, pass_rate_avg, general_test_count, trusted_ops
return devices, results, general_pass_rate, pass_rate_avg, general_test_count, trusted_ops, covered_ops
def create_summary(summary_root: ET.Element, output_folder: os.path, report_tag: str, is_conformance_mode: bool,
output_filename='report'):
if is_conformance_mode:
utils.update_conformance_test_counters(summary_root, logger)
device_list, results, general_pass_rate, pass_rate_avg, general_test_count, trusted_ops = \
device_list, results, general_pass_rate, pass_rate_avg, general_test_count, trusted_ops, covered_ops = \
collect_statistic(summary_root, is_conformance_mode)
timestamp = summary_root.attrib["timestamp"]
@ -168,7 +176,7 @@ def create_summary(summary_root: ET.Element, output_folder: os.path, report_tag:
res_summary = template.render(ordered_ops=op_list, devices=device_list, results=results, timestamp=timestamp,
general_pass_rate=general_pass_rate, pass_rate_avg=pass_rate_avg,
trusted_ops=trusted_ops,
trusted_ops=trusted_ops, covered_ops=covered_ops,
general_test_count=general_test_count, report_tag=report_tag)
report_path = os.path.join(output_folder, f'{output_filename}.html')

View File

@ -14,7 +14,7 @@ $(document).ready(function () {
$('#operationName').val('');
$('#status').prop("disabled", true).val('');
$('#devices').val(0);
$('#references').val(0);
$('#implementation').val(0);
$("#status").chosen("destroy");
$("#status").chosen({max_selected_options: 6});
filterTable();
@ -79,7 +79,7 @@ function filterTable() {
opsetNumber = $("#opsetNumber").val();
operationName = $('#operationName').val().trim();
status = $('#status').val();
references = $('#references').val();
implementation = $('#implementation').val();
$("#report #data tr").show();
$('#report').show();
@ -96,14 +96,18 @@ function filterTable() {
});
}
if (references != 0) {
if (references == 'nv') {
if (implementation != 0) {
if (implementation == 'ni') {
$("#report #data tr:not(:hidden)").filter(function () {
$(this).toggle($(this).find('th').hasClass("colorRed"))
$(this).toggle($(this).find('td').hasClass("not_impl"))
});
} else if (implementation == 'i') {
$("#report #data tr:not(:hidden)").filter(function () {
$(this).toggle($(this).find('td').hasClass("impl"));
});
} else {
$("#report #data tr:not(:hidden)").filter(function () {
$(this).toggle(!$(this).find('th').hasClass("colorRed"));
$(this).toggle(!$(this).find('td').hasClass("not_impl") && !$(this).find('td').hasClass("impl"));
});
}
}
@ -173,14 +177,14 @@ function calculateColumnStatistics(device) {
total = $("#report #data tr:not(:hidden)").length;
$('#statistic .table-primary[scope="row"] i').text(total);
// trusted op
count_trasted_op = $("#report #data tr:not(:hidden) ." + device + ".value[value^='100'][crashed='0'][failed='0'][skipped='0']").length;
all_operations = $("#report #data tr:not(:hidden) .value." + device).length;
count_trusted_op = $("#report #data tr:not(:hidden) ." + device + ".value[value^='100'][crashed='0'][failed='0'][skipped='0']").length;
all_operations = $("#report #data tr:not(:hidden) .value[value!='N/A'][value!='---']." + device).length;
if (!all_operations) {
trasted_op = "---";
trusted_op = "---";
} else {
trasted_op = (count_trasted_op * 100 / all_operations).toFixed(1) + ' %';
trusted_op = (count_trusted_op * 100 / all_operations).toFixed(1) + ' %';
}
$('#statistic .table-primary.' + device + '.trusted-ops').text(trasted_op);
$('#statistic .table-primary.' + device + '.trusted-ops').text(trusted_op);
$('#statistic .table-primary.' + device + '.test_total').text(all_operations || 0);
// tested op_counter
@ -188,10 +192,12 @@ function calculateColumnStatistics(device) {
passed_tested_op_count = 0;
$("#report #data tr:not(:hidden) ." + device + ".value span").each(function () {
text = $(this).text().split(':')[1];
if ($(this).hasClass('green')) {
passed_tested_op_count += +text;
if (text) {
if ($(this).hasClass('green')) {
passed_tested_op_count += +text;
}
tested_op_count += +text;
}
tested_op_count += +text;
});
// General Pass Rate
@ -207,7 +213,9 @@ function calculateColumnStatistics(device) {
// AVG Pass Rate
sum_pass_rate = 0;
$("#report #data tr:not(:hidden) ." + device + ".value").each(function () {
sum_pass_rate += +$(this).attr('value');
if ($(this).attr('value') != 'N/A' && $(this).attr('value') != '---') {
sum_pass_rate += +$(this).attr('value');
}
});
if (all_operations == 0) {
$('#statistic .table-primary.' + device + '.avg_pass_rate').text('---');

View File

@ -35,6 +35,11 @@
<span class="grey">S:2</span><span>Skipped</span>
<span class="dark">C:0</span><span>Crashed</span>
</div>
<div>
<span><b>Plugin operation implementation status:</b></span>
<div class="checkmark"></div><div>Implemented</div>
<div class="check"></div><div>Not implemented</div>
</div>
</div>
</div>
<!-- Filters block -->
@ -48,18 +53,19 @@
<label for="opsetNumber"><b>Opset Number</b></label>
<select id="opsetNumber" class="form-control"></select>
</div>
<div class="form-group">
<label for="references"><b>Ngraph references</b></label>
<select id="references" class="form-control">
<option value="0">All</option>
<option value="v">Verified</option>
<option value="nv">Not verified</option>
</select>
</div>
<div class="form-group">
<label for="devices"><b>Devices</b></label>
<select id="devices" class="form-control"></select>
</div>
<div class="form-group">
<label for="implementation"><b>Plugin Implementation</b></label>
<select id="implementation" class="form-control">
<option value="0">All</option>
<option value="i">Implemented</option>
<option value="ni">Not implemented</option>
<option value="ns">No status</option>
</select>
</div>
<div class="form-group col-5" style="padding-left:0">
@ -92,7 +98,7 @@
<tr>
<th class="table-primary" scope="row">Total: <i>{{ordered_ops|length}}</i></th>
{% for d in devices -%}
<td class="table-primary {{ d }} test_total">{{results[d]|length}}</td>
<td class="table-primary {{ d }} test_total"> {% if d in covered_ops -%} {{covered_ops[d]}} {% else -%} 0 {% endif -%}</td>
{% endfor %}
</tr>
<tr>
@ -127,17 +133,36 @@
{% for d in devices -%}
{% if op in results[d] -%}
<td class="value {{ d }}" passed="{{ results[d][op].passed }}" failed="{{ results[d][op].failed }}"
<td class="value {{ d }} {% if results[d][op].implemented == 'true' -%} impl {% else -%} not_impl {% endif -%}"
passed="{{ results[d][op].passed }}" failed="{{ results[d][op].failed }}"
skipped="{{ results[d][op].skipped }}" crashed="{{ results[d][op].crashed }}"
value="{{ results[d][op].passrate }}">
{{ results[d][op].passrate }} %<br />
<span class="green" title="Passed">P:{{ results[d][op].passed }}</span>
<span class="red" title="Failed">F:{{ results[d][op].failed }}</span>
<span class="grey" title="Skipped">S:{{ results[d][op].skipped }}</span>
<span class="dark" title="Crashed">C:{{ results[d][op].crashed }}</span>
value="{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}{{ results[d][op].passrate }}{% else -%}N/A{% endif -%}"
title="{% if results[d][op].implemented == 'true' -%}
{{op}} is implemented in {{d}} plugin
{% else -%}
{{op}} is not implemented in {{d}} plugin
{% endif -%}">
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}
{{ results[d][op].passrate }} %<br />
{% else -%}
---<br />
{% endif -%}
<div class="flex">
<div>
{% if (results[d][op].passed != '0' or results[d][op].failed != '0' or results[d][op].crashed != '0' or results[d][op].skipped) != '0' -%}
<span class="green" title="Passed">P:{{ results[d][op].passed }}</span>
<span class="red" title="Failed">F:{{ results[d][op].failed }}</span>
<span class="grey" title="Skipped">S:{{ results[d][op].skipped }}</span>
<span class="dark" title="Crashed">C:{{ results[d][op].crashed }}</span>
{% else -%}
{% endif -%}
</div>
<div class="{% if results[d][op].implemented == 'true' -%} checkmark {% else -%} check {% endif -%}"></div>
</div>
</td>
{% else -%}
<td class="table-secondary {{ d }}">N/A</td>
<td class="table-secondary {{ d }}">N/A</td>
{% endif -%}
{% endfor %}

View File

@ -67,10 +67,6 @@ form button {
min-height: 25px;
min-width: 25px;
}
.colorRed {
color:#cf1d1d;
font-weight: bold;
}
.form-group {
margin-bottom: 0;
}
@ -535,3 +531,69 @@ h2 {
background-position: -12px 2px;
}
.check {
width: 16px;
height: 16px;
background: #be2d2d;
text-indent: -1000;
border-radius: 8px;
position: relative;
}
.check::before {
display: block;
border-bottom: 2px solid #FFF;
position: absolute;
z:index: 10;
width: 8px;
top:7px;
left: 4px;
content: "";
}
.checkmark {
display: block;
width: 16px;
height: 16px;
position: relative;
}
.checkmark::before {
position: absolute;
display: block;
width: 2px;
height: 16px;
background-color: green;
left: 10px;
content: '';
top: 0px;
z-index: 20;
transform: rotate(45deg);
}
.checkmark::after {
position: absolute;
width: 2px;
content: '';
height: 7px;
background-color: green;
left:3px;
display: block;
top: 7px;
z-index: 20;
transform: rotate(-45deg);
}
.flex {
display: flex;
justify-content: space-between;
}
.not_impl:hover {
background: #ffdee1;
}
.impl:hover {
background: #e0fde6;
}

View File

@ -19,10 +19,15 @@ def update_passrates(results: ET.SubElement):
for attrib in op.attrib:
if attrib == "passrate":
continue
if attrib == "implemented":
continue
if attrib == "passed":
passed_tests = int(op.attrib.get(attrib))
total_tests += int(op.attrib.get(attrib))
passrate = float(passed_tests * 100 / total_tests) if passed_tests < total_tests else 100
if total_tests == 0:
passrate = 0
else:
passrate = float(passed_tests * 100 / total_tests) if passed_tests < total_tests else 100
op.set("passrate", str(round(passrate, 1)))

View File

@ -53,6 +53,9 @@ void Summary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Sta
auto &passrate = it->second;
switch (status) {
case PassRate::PASSED:
if (!passrate.isImplemented) {
passrate.isImplemented = true;
}
passrate.passed++;
passrate.crashed--;
break;
@ -85,6 +88,18 @@ void Summary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Sta
}
}
void Summary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus) {
auto it = opsStats.find(op);
if (it != opsStats.end()) {
if (!it->second.isImplemented && implStatus) {
it->second.isImplemented = true;
}
} else {
opsStats[op] = PassRate(0, 0, 0, 0);
opsStats[op].isImplemented = implStatus;
}
}
std::string Summary::getOpVersion(const ngraph::NodeTypeInfo &type_info) {
for (size_t i = 0; i < opsets.size(); i++) {
if (opsets[i].contains_type(type_info)) {
@ -120,6 +135,9 @@ std::map<std::string, PassRate> Summary::getOpStatisticFromReport() {
}
void Summary::updateOPsStats(const std::shared_ptr<ngraph::Function> &function, const PassRate::Statuses &status) {
if (function->get_parameters().empty()) {
return;
}
bool isFunctionalGraph = false;
for (const auto &op : function->get_ordered_ops()) {
if (!ngraph::is_type<ngraph::op::Parameter>(op) &&
@ -151,6 +169,41 @@ void Summary::updateOPsStats(const std::shared_ptr<ngraph::Function> &function,
}
}
void Summary::updateOPsImplStatus(const std::shared_ptr<ngraph::Function> &function, const bool implStatus) {
if (function->get_parameters().empty()) {
return;
}
bool isFunctionalGraph = false;
for (const auto &op : function->get_ordered_ops()) {
if (!ngraph::is_type<ngraph::op::Parameter>(op) &&
!ngraph::is_type<ngraph::op::Constant>(op) &&
!ngraph::is_type<ngraph::op::Result>(op)) {
isFunctionalGraph = true;
break;
}
}
for (const auto &op : function->get_ordered_ops()) {
if ((ngraph::is_type<ngraph::op::Parameter>(op) ||
ngraph::is_type<ngraph::op::Constant>(op) ||
ngraph::is_type<ngraph::op::Result>(op)) && isFunctionalGraph) {
continue;
} else if (ngraph::is_type<ngraph::op::TensorIterator>(op)) {
updateOPsImplStatus(op->get_type_info(), implStatus);
auto ti = ngraph::as_type_ptr<ngraph::op::TensorIterator>(op);
auto ti_body = ti->get_function();
updateOPsImplStatus(ti_body, implStatus);
} else if (ngraph::is_type<ngraph::op::v5::Loop>(op)) {
updateOPsImplStatus(op->get_type_info(), implStatus);
auto loop = ngraph::as_type_ptr<ngraph::op::v5::Loop>(op);
auto loop_body = loop->get_function();
updateOPsImplStatus(loop_body, implStatus);
} else {
updateOPsImplStatus(op->get_type_info(), implStatus);
}
}
}
#ifdef IE_TEST_DEBUG
void Summary::saveDebugReport(const char* className, const char* opName, unsigned long passed, unsigned long failed,
unsigned long skipped, unsigned long crashed) {
@ -233,6 +286,7 @@ void Summary::saveReport() {
std::string name = std::string(it.first.name) + "-" + getOpVersion(it.first);
opList.insert(name);
pugi::xml_node entry = currentDeviceNode.append_child(name.c_str());
entry.append_attribute("implemented").set_value(it.second.isImplemented);
entry.append_attribute("passed").set_value(it.second.passed);
entry.append_attribute("failed").set_value(it.second.failed);
entry.append_attribute("skipped").set_value(it.second.skipped);
@ -246,6 +300,7 @@ void Summary::saveReport() {
pugi::xml_node entry;
if (opList.find(item.first) == opList.end()) {
entry = currentDeviceNode.append_child(item.first.c_str());
entry.append_attribute("implemented").set_value(item.second.isImplemented);
entry.append_attribute("passed").set_value(item.second.passed);
entry.append_attribute("failed").set_value(item.second.failed);
entry.append_attribute("skipped").set_value(item.second.skipped);
@ -253,12 +308,16 @@ void Summary::saveReport() {
entry.append_attribute("passrate").set_value(item.second.getPassrate());
} else {
entry = currentDeviceNode.child(item.first.c_str());
auto implStatus = entry.attribute("implemented").value() == std::string("true") ? true : false;
auto p = std::stoi(entry.attribute("passed").value()) + item.second.passed;
auto f = std::stoi(entry.attribute("failed").value()) + item.second.failed;
auto s = std::stoi(entry.attribute("skipped").value()) + item.second.skipped;
auto c = std::stoi(entry.attribute("crashed").value()) + item.second.crashed;
PassRate obj(p, f, s, c);
(implStatus || obj.isImplemented)
? entry.attribute("implemented").set_value(true)
: entry.attribute("implemented").set_value(false);
entry.attribute("passed").set_value(obj.passed);
entry.attribute("failed").set_value(obj.failed);
entry.attribute("skipped").set_value(obj.skipped);