[CONFORMANCE] Prepare model filelist for SubgraphsDumper (#20617)
* [CONFORMANCE] Prepare model filelist for SubgraphsDumper * Improve default and custom scenario * Update constants.py * remove extra file * improveemnt
This commit is contained in:
parent
0d68bb3c14
commit
e4f4714fca
@ -0,0 +1,108 @@
|
||||
# Copyright (C) 2023 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
import sys
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
from utils.file_utils import prepare_filelist, find_latest_dir
|
||||
from utils.conformance_utils import get_logger
|
||||
from utils.constants import SUPPORTED_MODEL_EXTENSION
|
||||
|
||||
logger = get_logger("prepare_model_list")
|
||||
|
||||
def get_default_re_path(is_take_all_models = False):
|
||||
SCRIPT_DIR_PATH, _ = os.path.split(os.path.abspath(__file__))
|
||||
return os.path.join(SCRIPT_DIR_PATH, "..", "data", "custom_re_to_find_models.lst") if is_take_all_models else os.path.join(SCRIPT_DIR_PATH, "..", "data", "default_re_to_find_models.lst")
|
||||
|
||||
def parse_arguments():
|
||||
parser = ArgumentParser()
|
||||
|
||||
model_help = "Path to model directories path file to prepare filelist. Separator is `,`"
|
||||
output_help = "Path to output dir to save model list file"
|
||||
filename_help = "Output filename to save model list file"
|
||||
latest_only_help = "Use only latest directory matched reg exp. In other case all directories will be taken from the dir"
|
||||
|
||||
parser.add_argument("-m", "--model_dirs", type=str, help=model_help, required=True)
|
||||
parser.add_argument("-o", "--output_dir", type=str, help=output_help, required=False, default=".")
|
||||
parser.add_argument("-f", "--filename", type=str, help=filename_help, required=False, default="model_filelist")
|
||||
parser.add_argument("-l", "--latest_only", type=bool, help=latest_only_help, required=False, default=False)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def str_to_dir_list(input_str: str):
|
||||
dir_path_list = []
|
||||
while True:
|
||||
separator_pos = input_str.find(',')
|
||||
dir_path = ""
|
||||
if separator_pos == -1:
|
||||
if len(input_str) > 0:
|
||||
dir_path = input_str
|
||||
input_str = ""
|
||||
else:
|
||||
break
|
||||
else:
|
||||
dir_path = input_str[:separator_pos:]
|
||||
input_str = input_str[separator_pos+1::]
|
||||
separator_pos = input_str.find(',')
|
||||
if os.path.isdir(dir_path):
|
||||
dir_path_list.append(dir_path)
|
||||
logger.info(f"Model dir list: {dir_path_list}")
|
||||
return dir_path_list
|
||||
|
||||
|
||||
def read_dir_re_exp(re_exp_file_path: str):
|
||||
dir_re_exps = []
|
||||
if os.path.isfile(re_exp_file_path):
|
||||
with open(re_exp_file_path, "r") as re_exp_file:
|
||||
for line in re_exp_file.readlines():
|
||||
if "#" in line:
|
||||
continue
|
||||
dir_re_exps.append(line.replace('\n', ''))
|
||||
if len(dir_re_exps) == 0:
|
||||
dir_re_exps.append('*')
|
||||
logger.info(f"Model dir re exp list: {dir_re_exps}")
|
||||
return dir_re_exps
|
||||
|
||||
|
||||
def generate_model_list_file(input_str: str, re_exp_file_path: str, output_file_path: os.path, is_latest_only: bool):
|
||||
with open(output_file_path, 'w', newline='\n') as output_file:
|
||||
model_dir_paths = str_to_dir_list(input_str)
|
||||
dir_re_exps = read_dir_re_exp(re_exp_file_path)
|
||||
model_list = list()
|
||||
for model_dir_path in model_dir_paths:
|
||||
for dir_re_exp in dir_re_exps:
|
||||
dirs = [model_dir_path]
|
||||
if dir_re_exp != "*":
|
||||
if is_latest_only:
|
||||
dirs = [find_latest_dir(model_dir_path, dir_re_exp)]
|
||||
else:
|
||||
dirs = Path(model_dir_path).glob(dir_re_exp)
|
||||
for dir in dirs:
|
||||
try:
|
||||
logger.info(f"Processing dir: {dir}")
|
||||
model_list.extend(prepare_filelist(str(dir), SUPPORTED_MODEL_EXTENSION, is_save_to_file=False))
|
||||
if is_latest_only:
|
||||
break
|
||||
except:
|
||||
pass
|
||||
for line in model_list:
|
||||
output_file.write(f"{line}\n")
|
||||
output_file.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parse_arguments()
|
||||
os.makedirs(args.output_dir, exist_ok=True)
|
||||
logger.info(f"[ ARGUMENTS ] --model_dirs={args.model_dirs}")
|
||||
logger.info(f"[ ARGUMENTS ] --output_dir={args.output_dir}")
|
||||
logger.info(f"[ ARGUMENTS ] --filename={args.filename}")
|
||||
logger.info(f"[ ARGUMENTS ] --latest_only={args.latest_only}")
|
||||
re_file = get_default_re_path(not args.latest_only)
|
||||
if not args.latest_only:
|
||||
logger.warning(f"{re_file} will be taken to get all models from the dirs")
|
||||
output_model_list_file = os.path.join(args.output_dir, f"{args.filename}.lst")
|
||||
generate_model_list_file(args.model_dirs, re_file, output_model_list_file, args.latest_only)
|
||||
logger.info(f"Model file list is saved to {output_model_list_file}")
|
@ -0,0 +1 @@
|
||||
*
|
@ -0,0 +1,5 @@
|
||||
ww*_dynamic*
|
||||
ww*_pytorch*
|
||||
ww*_static*
|
||||
ww42*tfhub*
|
||||
# ww42*tflite*
|
@ -164,8 +164,7 @@ class Conformance:
|
||||
rmtree(conformance_ir_path)
|
||||
os.mkdir(conformance_ir_path)
|
||||
self._model_path = file_utils.prepare_filelist(self._model_path,
|
||||
["*.onnx", "*.pdmodel", "*.__model__", "*.pb", "*.xml",
|
||||
"*.tflite"])
|
||||
constants.SUPPORTED_MODEL_EXTENSION)
|
||||
logger.info(f"Stating model dumping from {self._model_path}")
|
||||
cmd = f'{subgraph_dumper_path} --input_folders="{self._model_path}" --output_folder="{conformance_ir_path}"'
|
||||
process = Popen(cmd, shell=True)
|
||||
|
@ -56,3 +56,16 @@ ENCODING = 'UTF-8'
|
||||
META_EXTENSION = ".meta"
|
||||
XML_EXTENSION = ".xml"
|
||||
BIN_EXTENSION = ".bin"
|
||||
|
||||
SUPPORTED_MODEL_EXTENSION = [
|
||||
# ONNX
|
||||
"*.onnx",
|
||||
#PDPD
|
||||
"*.pdmodel",
|
||||
# TF
|
||||
"*.pb",
|
||||
# OV IR
|
||||
"*.xml",
|
||||
# TFLITE
|
||||
"*.tflite",
|
||||
]
|
||||
|
@ -13,7 +13,7 @@ from . import constants
|
||||
from . import conformance_utils
|
||||
|
||||
# generates file list file inside directory. Returns path to saved filelist
|
||||
def prepare_filelist(input_dir: os.path, patterns: list):
|
||||
def prepare_filelist(input_dir: os.path, patterns: list, is_save_to_file = True):
|
||||
filelist_path = input_dir
|
||||
if os.path.isdir(filelist_path):
|
||||
filelist_path = os.path.join(input_dir, "conformance_ir_files.lst")
|
||||
@ -24,15 +24,17 @@ def prepare_filelist(input_dir: os.path, patterns: list):
|
||||
conformance_utils.UTILS_LOGGER.info(f"{filelist_path} is exists! The script will update it!")
|
||||
model_list = list()
|
||||
for pattern in patterns:
|
||||
for model in Path(input_dir).rglob(pattern):
|
||||
model_list.append(model)
|
||||
try:
|
||||
with open(filelist_path, 'w') as file:
|
||||
for xml in model_list:
|
||||
file.write(str(xml) + '\n')
|
||||
file.close()
|
||||
except:
|
||||
conformance_utils.UTILS_LOGGER.warning(f"Impossible to update {filelist_path}! Something going is wrong!")
|
||||
model_list.extend(Path(input_dir).rglob(pattern))
|
||||
if is_save_to_file:
|
||||
try:
|
||||
with open(filelist_path, 'w') as file:
|
||||
for xml in model_list:
|
||||
file.write(str(xml) + '\n')
|
||||
file.close()
|
||||
except:
|
||||
conformance_utils.UTILS_LOGGER.warning(f"Impossible to update {filelist_path}! Something going is wrong!")
|
||||
else:
|
||||
return model_list
|
||||
return filelist_path
|
||||
|
||||
def is_archieve(input_path: os.path):
|
||||
@ -68,27 +70,22 @@ def unzip_archieve(zip_path: os.path, dst_path: os.path):
|
||||
return dst_dir
|
||||
|
||||
# find latest changed directory
|
||||
def find_latest_dir(in_dir: Path, pattern_list = list()):
|
||||
get_latest_dir = lambda path: sorted(Path(path).iterdir(), key=os.path.getmtime)
|
||||
def find_latest_dir(in_dir: Path, pattern = "*"):
|
||||
get_latest_dir = lambda path: sorted(Path(path).glob(pattern), key=os.path.getmtime)
|
||||
entities = get_latest_dir(in_dir)
|
||||
entities.reverse()
|
||||
|
||||
for entity in entities:
|
||||
if entity.is_dir():
|
||||
if not pattern_list:
|
||||
return entity
|
||||
else:
|
||||
for pattern in pattern_list:
|
||||
if pattern in str(os.fspath(PurePath(entity))):
|
||||
return entity
|
||||
conformance_utils.UTILS_LOGGER.error(f"{in_dir} does not contain applicable directories to patterns: {pattern_list}")
|
||||
return entity
|
||||
conformance_utils.UTILS_LOGGER.error(f"{in_dir} does not contain applicable directories to pattern: {pattern}")
|
||||
exit(-1)
|
||||
|
||||
def get_ov_path(script_dir_path: os.path, ov_dir=None, is_bin=False):
|
||||
if ov_dir is None or not os.path.isdir(ov_dir):
|
||||
ov_dir = os.path.abspath(script_dir_path)[:os.path.abspath(script_dir_path).find(constants.OPENVINO_NAME) + len(constants.OPENVINO_NAME)]
|
||||
if is_bin:
|
||||
ov_dir = os.path.join(ov_dir, find_latest_dir(ov_dir, ['bin']))
|
||||
ov_dir = os.path.join(ov_dir, find_latest_dir(ov_dir, 'bin'))
|
||||
ov_dir = os.path.join(ov_dir, find_latest_dir(ov_dir))
|
||||
ov_dir = os.path.join(ov_dir, find_latest_dir(ov_dir, [constants.DEBUG_DIR, constants.RELEASE_DIR]))
|
||||
return ov_dir
|
||||
|
Loading…
Reference in New Issue
Block a user