[CONFORMANCE] Correct passrate when added skipped tests (#16844)

* init

* Refactor

* Static and dynamic approach

* next

* fix

* small fixes

* fix
This commit is contained in:
Irina Efode 2023-04-14 17:00:19 +04:00 committed by GitHub
parent b9f82e37b9
commit fd824cf036
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 188 additions and 33 deletions

View File

@ -39,6 +39,13 @@ protected:
void init_input_shapes(const std::vector<InputShape>& shapes); void init_input_shapes(const std::vector<InputShape>& shapes);
void TearDown() override {
if (this->HasFailure() && !is_reported) {
summary.setDeviceName(targetDevice);
summary.updateOPsStats(function, ov::test::utils::PassRate::Statuses::FAILED, rel_influence_coef);
}
}
std::shared_ptr<ov::Core> core = ov::test::utils::PluginCache::get().core(); std::shared_ptr<ov::Core> core = ov::test::utils::PluginCache::get().core();
std::string targetDevice; std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
@ -57,6 +64,7 @@ protected:
ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance(); ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance();
bool is_report_stages = false; bool is_report_stages = false;
bool is_reported = false;
double rel_influence_coef = 1.f; double rel_influence_coef = 1.f;
virtual std::vector<ov::Tensor> calculate_refs(); virtual std::vector<ov::Tensor> calculate_refs();

View File

@ -41,6 +41,7 @@ std::ostream& operator <<(std::ostream& os, const InputShape& inputShape) {
} }
void SubgraphBaseTest::run() { void SubgraphBaseTest::run() {
is_reported = true;
bool isCurrentTestDisabled = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled(); bool isCurrentTestDisabled = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled();
ov::test::utils::PassRate::Statuses status = isCurrentTestDisabled ? ov::test::utils::PassRate::Statuses status = isCurrentTestDisabled ?

View File

@ -4,11 +4,13 @@
import defusedxml.ElementTree as ET import defusedxml.ElementTree as ET
from argparse import ArgumentParser from argparse import ArgumentParser
from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from hashlib import sha256 from hashlib import sha256
from utils.conformance_utils import get_logger, set_env_variable from utils.conformance_utils import get_logger, set_env_variable
from utils.constants import PY_OPENVINO, LD_LIB_PATH_NAME, PYTHON_NAME from utils.constants import PY_OPENVINO, LD_LIB_PATH_NAME, PYTHON_NAME, REL_WEIGHTS_FILENAME, REL_WEIGHTS_REPLACE_STR
from utils.file_utils import get_ov_path, find_latest_dir from utils.file_utils import get_ov_path, find_latest_dir
import defusedxml.ElementTree as ET
import os import os
@ -39,12 +41,19 @@ XML_EXTENSION = ".xml"
BIN_EXTENSION = ".bin" BIN_EXTENSION = ".bin"
META_EXTENSION = ".meta" META_EXTENSION = ".meta"
@dataclass
class TestStructure:
dynamic: float = 0.0
static: float = 0.0
def parse_arguments(): def parse_arguments():
parser = ArgumentParser() parser = ArgumentParser()
in_dir_help = "Path/s to input directory" in_dir_help = "Path/s to input directory"
rel_weights_dir = "Path to dir to save rel_weights_file"
parser.add_argument("--input_dir", help=in_dir_help, nargs="*", required=True) parser.add_argument("--input_dir", help=in_dir_help, nargs="*", required=True)
parser.add_argument("--rel_weights_dir", help=in_dir_help, type=str, default=None, required=False)
return parser.parse_args() return parser.parse_args()
@ -53,8 +62,67 @@ def check_file(path: Path):
logger.error(f"File {path} is not exist!") logger.error(f"File {path} is not exist!")
exit(-1) exit(-1)
def generate_op_name(type_info):
op_name = type_info.name
op_version = type_info.version_id.replace('opset', '')
return f"{op_name}-{op_version}"
def create_hash(in_dir_path: Path): def get_rel_weight(meta_info_file:Path):
try:
meta_info_root = ET.parse(meta_info_file).getroot()
graph_priority_node = meta_info_root.find("graph_priority")
value_attrib = float(graph_priority_node.attrib.get("value"))
return value_attrib
except:
logger.error(f"Meta info {meta_info_file} is incorrect!")
return 1
def update_rel_weight(meta_info_file:Path, additional_value: float):
try:
meta_info_root = ET.parse(meta_info_file).getroot()
graph_priority_node = meta_info_root.find("graph_priority")
value_attrib = float(graph_priority_node.attrib.get("value"))
graph_priority_node.set("value", str(value_attrib + additional_value))
with open(meta_info_file, "w") as xml_file:
xml_file.write(ET.tostring(meta_info_root).decode('utf8'))
logger.info(f"Meta info file {meta_info_file} was updated")
except:
logger.error(f"Meta info {meta_info_file} is incorrect!")
return
def is_report_op(op_name:str, is_convert_model:bool):
if "Parameter-1" == op_name or "Result-1" == op_name or "Constant-1" == op_name:
return False
if is_convert_model and "Convert-1" == op_name:
return True
if not is_convert_model:
return True
else:
return False
def generate_node_hash(node):
str_to_hash = ""
for input in node.inputs():
input_node = input.get_node()
len_shape = None
try:
len_shape = len(input.get_partial_shape())
except:
logger.error(f"Impossible to get input_shape for {input_node.name}")
str_to_hash += str(len_shape) + str(input.get_element_type().get_type_name()) + str(input.get_partial_shape().is_dynamic) + \
str(input_node.get_type_info().name) + str(input_node.get_type_info().version_id)
for output in node.outputs():
output_node = output.get_node()
len_shape = None
try:
len_shape = len(output.get_partial_shape())
except:
logger.error(f"Impossible to get output_shape for {output.names.pop()}")
str_to_hash += str(len_shape) + str(output.get_element_type().get_type_name()) + str(output.get_partial_shape().is_dynamic) + \
str(output_node.get_type_info().name) + str(output_node.get_type_info().version_id)
return str_to_hash
def create_hash(in_dir_path: Path, operations=dict()):
core = Core() core = Core()
models = in_dir_path.rglob("*.xml") models = in_dir_path.rglob("*.xml")
models = sorted(models) models = sorted(models)
@ -66,28 +134,27 @@ def create_hash(in_dir_path: Path):
check_file(bin_path) check_file(bin_path)
check_file(meta_path) check_file(meta_path)
is_convert_model = "Convert" in str(model_path)
str_to_hash = str() str_to_hash = str()
try: try:
model = core.read_model(model_path) model = core.read_model(model_path)
rel_weight = get_rel_weight(meta_path)
for node in model.get_ordered_ops(): for node in model.get_ordered_ops():
for input in node.inputs(): op_name = generate_op_name(node.get_type_info())
input_node = input.get_node() if is_report_op(op_name, is_convert_model):
len_shape = None if not op_name in operations.keys():
operations.update({op_name: TestStructure()})
if "static" in str(model_path):
operations[op_name].static += rel_weight
elif "dynamic" in str(model_path):
operations[op_name].dynamic += rel_weight
str_to_hash += generate_node_hash(node)
try: try:
len_shape = len(input.get_partial_shape()) for body_node in node.get_function().get_ordered_ops():
str_to_hash += generate_node_hash(body_node)
except: except:
logger.error(f"Impossible to get input_shape for {input_node.name}") pass
str_to_hash += str(len_shape) + str(input.get_element_type().get_type_name()) + str(input.get_partial_shape().is_dynamic) + \
str(input_node.get_type_info().name) + str(input_node.get_type_info().version)
for output in node.outputs():
output_node = output.get_node()
len_shape = None
try:
len_shape = len(output.get_partial_shape())
except:
logger.error(f"Impossible to get output_shape for {output.names.pop()}")
str_to_hash += str(len_shape) + str(output.get_element_type().get_type_name()) + str(output.get_partial_shape().is_dynamic) + \
str(output_node.get_type_info().name) + str(output_node.get_type_info().version)
except: except:
logger.error(f"Impossible to create hash for {model_path}") logger.error(f"Impossible to create hash for {model_path}")
ports_info = ET.parse(meta_path).getroot().find("ports_info") ports_info = ET.parse(meta_path).getroot().find("ports_info")
@ -96,20 +163,68 @@ def create_hash(in_dir_path: Path):
old_name = model_path old_name = model_path
new_name = str(sha256(str_to_hash.encode('utf-8')).hexdigest()) new_name = str(sha256(str_to_hash.encode('utf-8')).hexdigest())
model_path.rename(Path(model_path.parent, new_name + XML_EXTENSION)) new_meta_path = Path(meta_path.parent, new_name + META_EXTENSION)
meta_path.rename(Path(meta_path.parent, new_name + META_EXTENSION)) new_xml_path = Path(model_path.parent, new_name + XML_EXTENSION)
bin_path.rename(Path(bin_path.parent, new_name + BIN_EXTENSION)) new_bin_path = Path(bin_path.parent, new_name + BIN_EXTENSION)
if os.path.isfile(new_meta_path):
update_rel_weight(new_meta_path, rel_weight)
os.remove(meta_path)
os.remove(model_path)
os.remove(bin_path)
else:
model_path.rename(new_xml_path)
meta_path.rename(new_meta_path)
bin_path.rename(new_bin_path)
# logger.info(f"{old_name} -> {new_name}") # logger.info(f"{old_name} -> {new_name}")
return operations
def save_rel_weights(rel_weights_dir:Path, operations: dict):
if not rel_weights_dir.is_dir:
logger.info(f"Create rel weight_dir: {rel_weights_dir}")
os.mkdir(rel_weights_dir)
rel_weights_path = os.path.join(rel_weights_dir, REL_WEIGHTS_FILENAME.replace(REL_WEIGHTS_REPLACE_STR, ""))
dyn_rel_weights_path = os.path.join(rel_weights_dir, REL_WEIGHTS_FILENAME.replace(REL_WEIGHTS_REPLACE_STR, "dynamic"))
static_rel_weights_path = os.path.join(rel_weights_dir, REL_WEIGHTS_FILENAME.replace(REL_WEIGHTS_REPLACE_STR, "static"))
rel_weights_file = open(rel_weights_path, "w")
dyn_rel_weights_file = open(dyn_rel_weights_path, "w")
static_rel_weights_file = open(static_rel_weights_path, "w")
for op, rel_weight in operations.items():
if rel_weight.dynamic != 0:
dyn_rel_weights_file.write(f"{op}:{rel_weight.dynamic}\n")
if rel_weight.static != 0:
static_rel_weights_file.write(f"{op}:{rel_weight.static}\n")
rel_weights_file.write((f"{op}:{rel_weight.static + rel_weight.dynamic}\n"))
rel_weights_file.close()
dyn_rel_weights_file.close()
static_rel_weights_file.close()
logger.info(f"Relative weights are saved to {rel_weights_path}, {dyn_rel_weights_path}, {static_rel_weights_path}")
return rel_weights_path, dyn_rel_weights_path, static_rel_weights_path
if __name__=="__main__": if __name__=="__main__":
args = parse_arguments() args = parse_arguments()
operations = dict()
rel_weights_dir = None
if not args.rel_weights_dir is None:
rel_weights_dir = Path(args.rel_weights_dir)
if not rel_weights_dir.is_dir():
logger.info(f"Create rel weight_dir: {rel_weights_dir}")
os.mkdir(rel_weights_dir)
for in_dir in args.input_dir: for in_dir in args.input_dir:
if not Path(in_dir).is_dir: if not Path(in_dir).is_dir():
logger.error(f"Directory {in_dir} is not exist!") logger.error(f"Directory {in_dir} is not exist!")
exit(-1) exit(-1)
logger.info(f"Starting to rename models in {in_dir}") logger.info(f"Starting to rename models in {in_dir}")
create_hash(Path(in_dir)) operations = create_hash(Path(in_dir), operations)
if not rel_weights_dir is None:
save_rel_weights(rel_weights_dir, operations)
logger.info("The run is successfully completed") logger.info("The run is successfully completed")

View File

@ -21,7 +21,7 @@ from utils import file_utils
logger = get_logger('conformance_runner') logger = get_logger('conformance_runner')
has_python_api = True has_python_api = True
try: try:
from rename_conformance_ir import create_hash from rename_conformance_ir import create_hash, save_rel_weights
except: except:
logger.warning("Please set the above env variable to get the same conformance ir names run by run!") logger.warning("Please set the above env variable to get the same conformance ir names run by run!")
has_python_api = False has_python_api = False
@ -138,7 +138,8 @@ class Conformance:
exit(-1) exit(-1)
self._model_path = conformance_ir_path self._model_path = conformance_ir_path
if has_python_api: if has_python_api:
create_hash(Path(self._model_path)) op_rel_weight = create_hash(Path(self._model_path))
save_rel_weights(Path(self._model_path), op_rel_weight)
logger.info(f"All conformance IRs in {self._ov_bin_path} were renamed based on hash") logger.info(f"All conformance IRs in {self._ov_bin_path} were renamed based on hash")
else: else:
logger.warning("The OV Python was not built or Environment was not updated to requirments. Skip the step to rename Conformance IR based on a hash") logger.warning("The OV Python was not built or Environment was not updated to requirments. Skip the step to rename Conformance IR based on a hash")
@ -184,7 +185,8 @@ class Conformance:
def __summarize(self, xml_report_path:os.path, report_dir: os.path): def __summarize(self, xml_report_path:os.path, report_dir: os.path):
summary_root = ET.parse(xml_report_path).getroot() summary_root = ET.parse(xml_report_path).getroot()
create_summary(summary_root, report_dir, [], "", "", False, True) rel_weights_path = os.path.join(self._model_path, constants.REL_WEIGHTS_FILENAME.replace(constants.REL_WEIGHTS_REPLACE_STR, self._shape_mode))
create_summary(summary_root, report_dir, [], "", "", True, True, rel_weights_path)
copytree(os.path.join(SCRIPT_DIR_PATH, "template"), os.path.join(report_dir, "template")) copytree(os.path.join(SCRIPT_DIR_PATH, "template"), os.path.join(report_dir, "template"))
logger.info(f"Report was saved to {os.path.join(report_dir, 'report.html')}") logger.info(f"Report was saved to {os.path.join(report_dir, 'report.html')}")

View File

@ -11,6 +11,7 @@ from jinja2 import Environment, FileSystemLoader
from utils.conformance_utils import get_logger from utils.conformance_utils import get_logger
from utils import stat_update_utils from utils import stat_update_utils
from utils.constants import REL_WEIGHTS_FILENAME
# defuse_stdlib provide patched version of xml.etree.ElementTree which allows to use objects from xml.etree.ElementTree # defuse_stdlib provide patched version of xml.etree.ElementTree which allows to use objects from xml.etree.ElementTree
# in a safe manner without including unsafe xml.etree.ElementTree # in a safe manner without including unsafe xml.etree.ElementTree
@ -42,6 +43,7 @@ def parse_arguments():
conformance_mode_help = "Allow to align test number" conformance_mode_help = "Allow to align test number"
csv_help = "Allow to serialize report as csv file" csv_help = "Allow to serialize report as csv file"
expected_devices_help = "List of expected devices" expected_devices_help = "List of expected devices"
rel_weights_help = "Path to dir/file with rel weights"
parser.add_argument("--xml", help=xml_help, nargs="*", required=True) parser.add_argument("--xml", help=xml_help, nargs="*", required=True)
parser.add_argument("--out", help=out_help, default="") parser.add_argument("--out", help=out_help, default="")
@ -51,10 +53,29 @@ def parse_arguments():
parser.add_argument("--conformance_mode", help=conformance_mode_help, default=False) parser.add_argument("--conformance_mode", help=conformance_mode_help, default=False)
parser.add_argument("--csv", help=csv_help, default=False) parser.add_argument("--csv", help=csv_help, default=False)
parser.add_argument("--expected_devices", help=expected_devices_help, nargs="*", required=False) parser.add_argument("--expected_devices", help=expected_devices_help, nargs="*", required=False)
parser.add_argument("--rel_weights", help=rel_weights_help, type=str, required=False)
return parser.parse_args() return parser.parse_args()
def parse_rel_weights(rel_weights_path: os.path):
rel_weights = dict()
rel_weights_file_path = rel_weights_path
if os.path.isdir(rel_weights_path):
rel_weights_file_path = os.path.join(rel_weights_path, REL_WEIGHTS_FILENAME)
if os.path.isfile(rel_weights_file_path):
logger.info(f"Rel weights will be taken from {rel_weights_file_path}")
with open(rel_weights_path, "r") as rel_weights_file:
for line in rel_weights_file.readlines():
sep_pos = line.find(':')
op_name = line[:sep_pos:]
op_weight = float(line[sep_pos+1::].replace('\n', ''))
rel_weights.update({op_name: op_weight})
else:
logger.warning(f"Rel weights file does not exist! The expected passrates will be taken from runtime")
return rel_weights
def merge_xmls(xml_paths: list): def merge_xmls(xml_paths: list):
logger.info("Merging XML files is started") logger.info("Merging XML files is started")
@ -248,10 +269,12 @@ def serialize_to_csv(report_filename: str, output_dir: os.path, op_list: list, d
def create_summary(summary_root: Element, output_folder: os.path, expected_devices:list, report_tag: str, report_version: str, def create_summary(summary_root: Element, output_folder: os.path, expected_devices:list, report_tag: str, report_version: str,
is_conformance_mode: bool, is_serialize_to_csv: bool, output_filename='report'): is_conformance_mode: bool, is_serialize_to_csv: bool, rel_weights_path: str, output_filename='report'):
rel_weights = dict()
if is_conformance_mode: if is_conformance_mode:
stat_update_utils.update_conformance_test_counters(summary_root) stat_update_utils.update_conformance_test_counters(summary_root)
stat_update_utils.update_passrates(summary_root.find("results")) rel_weights = parse_rel_weights(rel_weights_path)
stat_update_utils.update_passrates(summary_root.find("results"), rel_weights)
device_list, results, general_pass_rate, general_pass_rate_rel, pass_rate_avg, pass_rate_avg_rel, general_test_count, trusted_ops, covered_ops = \ device_list, results, general_pass_rate, general_pass_rate_rel, pass_rate_avg, pass_rate_avg_rel, general_test_count, trusted_ops, covered_ops = \
collect_statistic(summary_root, is_conformance_mode) collect_statistic(summary_root, is_conformance_mode)
@ -297,7 +320,6 @@ def create_summary(summary_root: Element, output_folder: os.path, expected_devic
if is_serialize_to_csv: if is_serialize_to_csv:
serialize_to_csv(output_filename, output_folder, op_list, device_list, results) serialize_to_csv(output_filename, output_folder, op_list, device_list, results)
if __name__ == "__main__": if __name__ == "__main__":
args = parse_arguments() args = parse_arguments()
summary_root = merge_xmls(args.xml) summary_root = merge_xmls(args.xml)
@ -307,5 +329,6 @@ if __name__ == "__main__":
args.report_version, args.report_version,
args.conformance_mode, args.conformance_mode,
args.csv, args.csv,
args.rel_weights,
args.output_filename) args.output_filename)

View File

@ -32,3 +32,6 @@ RELEASE_DIR = "Release"
OP_CONFORMANCE = "OP" OP_CONFORMANCE = "OP"
API_CONFORMANCE = "API" API_CONFORMANCE = "API"
REL_WEIGHTS_FILENAME = "rel_weights_REPLACE.lst"
REL_WEIGHTS_REPLACE_STR = "REPLACE"

View File

@ -15,7 +15,7 @@ def update_rel_values(xml_node: ET.SubElement):
if not "relative_passed" in xml_node.attrib: if not "relative_passed" in xml_node.attrib:
xml_node.set("relative_passed", xml_node.attrib.get("passed")) xml_node.set("relative_passed", xml_node.attrib.get("passed"))
def update_passrates(results: ET.SubElement): def update_passrates(results: ET.SubElement, rel_weights={}):
for device in results: for device in results:
for op in device: for op in device:
passed_tests = 0 passed_tests = 0
@ -33,6 +33,9 @@ def update_passrates(results: ET.SubElement):
rel_passed_tests = float(op.attrib.get(attrib)) rel_passed_tests = float(op.attrib.get(attrib))
continue continue
elif attrib == "relative_all": elif attrib == "relative_all":
if op.tag in rel_weights.keys():
rel_all_tests = rel_weights[op.tag]
else:
rel_all_tests = float(op.attrib.get(attrib)) rel_all_tests = float(op.attrib.get(attrib))
continue continue
total_tests += int(float(op.attrib.get(attrib))) total_tests += int(float(op.attrib.get(attrib)))