Add attr info to rename_conformance_ir hash (#19277)
This commit is contained in:
parent
4703196f5c
commit
0cc3044764
@ -13,6 +13,8 @@ from utils.file_utils import get_ov_path, find_latest_dir
|
||||
import defusedxml.ElementTree as ET
|
||||
|
||||
import os
|
||||
import re
|
||||
import errno
|
||||
|
||||
logger = get_logger('rename_conformance_ir')
|
||||
|
||||
@ -58,8 +60,8 @@ def parse_arguments():
|
||||
|
||||
def check_file(path: Path):
|
||||
if not path.is_file:
|
||||
logger.error(f"File {path} is not exist!")
|
||||
exit(-1)
|
||||
logger.error(f"File {path} is not exist or can't be opened!")
|
||||
raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), path)
|
||||
|
||||
def generate_op_name(type_info):
|
||||
op_name = type_info.name
|
||||
@ -93,49 +95,44 @@ def is_report_op(op_name:str):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def generate_node_hash(node):
|
||||
str_to_hash = ""
|
||||
for input in node.inputs():
|
||||
input_node = input.get_node()
|
||||
len_shape = None
|
||||
|
||||
shape_str = ""
|
||||
try:
|
||||
len_shape = len(input.get_partial_shape())
|
||||
str_to_hash += re.sub(r"[\s+\[\]\{\}\']", "", str(node.get_attributes()))
|
||||
except:
|
||||
logger.error(f"Impossible to get attributes for {node.name}")
|
||||
|
||||
try:
|
||||
partial_shape = input.get_partial_shape()
|
||||
shape_str += str(len(partial_shape))
|
||||
shape_str += str(partial_shape.is_dynamic)
|
||||
except:
|
||||
logger.error(f"Impossible to get input_shape for {input_node.name}")
|
||||
str_to_hash += str(len_shape) + str(input.get_element_type().get_type_name()) + str(input.get_partial_shape().is_dynamic) + \
|
||||
|
||||
str_to_hash += shape_str + str(input.get_element_type().get_type_name()) + \
|
||||
str(input_node.get_type_info().name) + str(input_node.get_type_info().version_id)
|
||||
|
||||
for output in node.outputs():
|
||||
output_node = output.get_node()
|
||||
len_shape = None
|
||||
|
||||
shape_str = ""
|
||||
try:
|
||||
len_shape = len(output.get_partial_shape())
|
||||
partial_shape = output.get_partial_shape()
|
||||
shape_str += str(len(partial_shape))
|
||||
shape_str += str(partial_shape.is_dynamic)
|
||||
except:
|
||||
logger.error(f"Impossible to get output_shape for {output.names.pop()}")
|
||||
str_to_hash += str(len_shape) + str(output.get_element_type().get_type_name()) + str(output.get_partial_shape().is_dynamic) + \
|
||||
|
||||
str_to_hash += shape_str + str(output.get_element_type().get_type_name()) + \
|
||||
str(output_node.get_type_info().name) + str(output_node.get_type_info().version_id)
|
||||
|
||||
return str_to_hash
|
||||
|
||||
|
||||
def generate_convert_hash(model_path: Path):
|
||||
try:
|
||||
buf = dict()
|
||||
res = str()
|
||||
layers = ET.parse(model_path).getroot().find("layers")
|
||||
for op in layers:
|
||||
name = f'{op.attrib.get("type")}_{op.attrib.get("version")}'
|
||||
if not name in buf.keys():
|
||||
buf.update({name: list()})
|
||||
for child in op:
|
||||
buf[name].append(ET.tostring(child).decode('utf8').replace('\n', '').replace('\t', ''))
|
||||
for op_name, set_attributes in buf.items():
|
||||
res += op_name
|
||||
for attribute in set_attributes:
|
||||
res += attribute
|
||||
return res
|
||||
except ET.ParseError:
|
||||
logger.error(f' {model_path} is corrupted and skipped')
|
||||
return None
|
||||
|
||||
def create_hash(in_dir_path: Path, operations=dict()):
|
||||
core = Core()
|
||||
models = in_dir_path.rglob("*.xml")
|
||||
@ -144,22 +141,14 @@ def create_hash(in_dir_path: Path, operations=dict()):
|
||||
bin_path = model_path.with_suffix(BIN_EXTENSION)
|
||||
meta_path = model_path.with_suffix(META_EXTENSION)
|
||||
|
||||
check_file(model_path)
|
||||
check_file(bin_path)
|
||||
check_file(meta_path)
|
||||
try:
|
||||
check_file(model_path)
|
||||
check_file(bin_path)
|
||||
check_file(meta_path)
|
||||
|
||||
str_to_hash = str()
|
||||
rel_weight = get_rel_weight(meta_path)
|
||||
|
||||
str_to_hash = str()
|
||||
rel_weight = get_rel_weight(meta_path)
|
||||
# todo: remove w/a to provide correct convert reporting after merge CVS-110714
|
||||
if (os.sep + CONVERT_OP_NAME + os.sep) in str(model_path):
|
||||
str_to_hash = generate_convert_hash(model_path)
|
||||
if not CONVERT_OP_NAME in operations.keys():
|
||||
operations.update({CONVERT_OP_NAME: TestStructure()})
|
||||
if "static" in str(model_path):
|
||||
operations[CONVERT_OP_NAME].static += rel_weight
|
||||
elif "dynamic" in str(model_path):
|
||||
operations[CONVERT_OP_NAME].dynamic += rel_weight
|
||||
else:
|
||||
try:
|
||||
model = core.read_model(model_path)
|
||||
for node in model.get_ordered_ops():
|
||||
@ -179,23 +168,32 @@ def create_hash(in_dir_path: Path, operations=dict()):
|
||||
pass
|
||||
except:
|
||||
logger.error(f"Impossible to create hash for {model_path}")
|
||||
ports_info = ET.parse(meta_path).getroot().find("input_info")
|
||||
str_to_hash += ET.tostring(ports_info).decode('utf8').replace('\t', '')
|
||||
|
||||
old_name = model_path
|
||||
new_name = str(sha256(str_to_hash.encode('utf-8')).hexdigest())
|
||||
try:
|
||||
input_info = ET.parse(meta_path).getroot().find("input_info")
|
||||
str_to_hash += ET.tostring(input_info).decode('utf8').replace('\t', '')
|
||||
except:
|
||||
logger.error(f"Impossible to add input_info to hash for {model_path}")
|
||||
|
||||
new_meta_path = Path(meta_path.parent, new_name + META_EXTENSION)
|
||||
new_xml_path = Path(model_path.parent, new_name + XML_EXTENSION)
|
||||
new_bin_path = Path(bin_path.parent, new_name + BIN_EXTENSION)
|
||||
old_name = model_path
|
||||
new_name = str(sha256(str_to_hash.encode('utf-8')).hexdigest())
|
||||
|
||||
if not os.path.isfile(new_meta_path):
|
||||
model_path.rename(new_xml_path)
|
||||
meta_path.rename(new_meta_path)
|
||||
bin_path.rename(new_bin_path)
|
||||
logger.info(f"{old_name} -> {new_name}")
|
||||
elif old_name != new_name:
|
||||
update_rel_weight(new_meta_path, rel_weight)
|
||||
new_meta_path = Path(meta_path.parent, new_name + META_EXTENSION)
|
||||
new_xml_path = Path(model_path.parent, new_name + XML_EXTENSION)
|
||||
new_bin_path = Path(bin_path.parent, new_name + BIN_EXTENSION)
|
||||
|
||||
if not os.path.isfile(new_meta_path):
|
||||
model_path.rename(new_xml_path)
|
||||
meta_path.rename(new_meta_path)
|
||||
bin_path.rename(new_bin_path)
|
||||
# TODO: if some models are still not renaming, create new file and remove old file
|
||||
logger.info(f"{old_name} -> {new_name}")
|
||||
elif old_name != new_xml_path:
|
||||
# TODO: if some models are still not renaming and there are duplicates, remove files here
|
||||
logger.warning(f"Could not rename model {old_name} ! Model file name already exists {new_xml_path} ")
|
||||
update_rel_weight(new_meta_path, rel_weight)
|
||||
except:
|
||||
pass
|
||||
return operations
|
||||
|
||||
def save_rel_weights(rel_weights_dir:Path, operations: dict):
|
||||
@ -237,13 +235,11 @@ if __name__=="__main__":
|
||||
for in_dir in args.input_dir:
|
||||
if not Path(in_dir).is_dir():
|
||||
logger.error(f"Directory {in_dir} is not exist!")
|
||||
exit(-1)
|
||||
continue
|
||||
logger.info(f"Starting to rename models in {in_dir}")
|
||||
operations = create_hash(Path(in_dir), operations)
|
||||
|
||||
if not rel_weights_dir is None:
|
||||
save_rel_weights(rel_weights_dir, operations)
|
||||
|
||||
logger.info("The run is successfully completed")
|
||||
|
||||
|
||||
logger.info("The run is successfully completed")
|
Loading…
Reference in New Issue
Block a user