[MO] tmp IR cleanup if serialization or fem.load_by_framework fails (#14712)

* cleanup tmp IR files after unexpected failures

* separated catching during tmp serialize and fe.load

* updated file path specification

* moved base MO arguments into utils

* removed redundant 'except' blocks

* shortened specifying MO args

* calling unit-tests from subprocess; joined 2 try/except block into a single one

* changed postfix so that test is run without duplications only once
This commit is contained in:
Pavel Esir 2022-12-22 19:35:13 +00:00 committed by GitHub
parent 31112e2c10
commit a7b3ae6a9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 88 additions and 26 deletions

View File

@ -401,15 +401,16 @@ def prepare_ir(argv: argparse.Namespace):
return graph, ngraph_function return graph, ngraph_function
def emit_ir(graph: Graph, argv: argparse.Namespace, non_default_params: dict): def read_model(fem: FrontEndManager, path_to_xml: str):
# We have to separate fe object lifetime from fem to # We have to separate fe object lifetime from fem to
# avoid segfault during object destruction. So fe must # avoid segfault during object destruction. So fe must
# be destructed before fem object explicitly. # be destructed before fem object explicitly.
def read_model(path_to_xml): fe = fem.load_by_framework(framework="ir")
fe = fem.load_by_framework(framework="ir") function = fe.convert(fe.load(path_to_xml))
function = fe.convert(fe.load(path_to_xml)) return function
return function
def emit_ir(graph: Graph, argv: argparse.Namespace, non_default_params: dict):
NormalizeTI().find_and_replace_pattern(graph) NormalizeTI().find_and_replace_pattern(graph)
for_graph_and_each_sub_graph_recursively(graph, RemoveConstOps().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively(graph, RemoveConstOps().find_and_replace_pattern)
for_graph_and_each_sub_graph_recursively(graph, CreateConstNodesReplacement().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively(graph, CreateConstNodesReplacement().find_and_replace_pattern)
@ -420,23 +421,36 @@ def emit_ir(graph: Graph, argv: argparse.Namespace, non_default_params: dict):
mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None
input_names = deepcopy(graph.graph['input_names']) if 'input_names' in graph.graph else [] input_names = deepcopy(graph.graph['input_names']) if 'input_names' in graph.graph else []
prepare_emit_ir(graph=graph,
data_type=graph.graph['cmd_params'].data_type,
output_dir=argv.output_dir,
output_model_name=argv.model_name,
mean_data=mean_data,
input_names=input_names,
meta_info=non_default_params,
use_temporary_path=True)
# This graph cleanup is required to avoid double memory consumption
graph.clear()
output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd() output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
orig_model_name = os.path.normpath(os.path.join(output_dir, argv.model_name)) orig_model_name = os.path.normpath(os.path.join(output_dir, argv.model_name))
fem = FrontEndManager() def clear_tmp_ir_files():
func = read_model(orig_model_name + "_tmp.xml") for suf in [".xml", ".bin", ".mapping"]:
# remove existing files
path_to_file = orig_model_name + "_tmp" + suf
if os.path.exists(path_to_file):
os.remove(path_to_file)
try:
prepare_emit_ir(graph=graph,
data_type=graph.graph['cmd_params'].data_type,
output_dir=argv.output_dir,
output_model_name=argv.model_name,
mean_data=mean_data,
input_names=input_names,
meta_info=non_default_params,
use_temporary_path=True)
fem = FrontEndManager()
func = read_model(fem, orig_model_name + "_tmp.xml")
except Exception as err:
raise Error('Exception occurred while serialization or reading of the temporary IR: {}'.format(
str(err),
)) from err
finally:
# This graph cleanup is required to avoid double memory consumption
graph.clear()
clear_tmp_ir_files()
return_code = "not executed" return_code = "not executed"
if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update): if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update):
@ -461,12 +475,6 @@ def emit_ir(graph: Graph, argv: argparse.Namespace, non_default_params: dict):
t = tm.Telemetry() t = tm.Telemetry()
t.send_event('mo', 'offline_transformations_status', message) t.send_event('mo', 'offline_transformations_status', message)
for suf in [".xml", ".bin", ".mapping"]:
# remove existing files
path_to_file = orig_model_name + "_tmp" + suf
if os.path.exists(path_to_file):
os.remove(path_to_file)
if return_code != 0: if return_code != 0:
raise Error("offline transformations step has failed.") raise Error("offline transformations step has failed.")
@ -866,7 +874,6 @@ def _convert(**args):
example_inputs, example_inputs,
out_dir) out_dir)
args['input_model'] = model_onnx args['input_model'] = model_onnx
if os.environ.get('SAVE_TO_BYTES_IO_ONNX_MODEL'): if os.environ.get('SAVE_TO_BYTES_IO_ONNX_MODEL'):
args['use_legacy_frontend'] = True args['use_legacy_frontend'] = True

View File

@ -101,3 +101,12 @@ def test_mo_model_analysis():
status = subprocess.run(args, env=os.environ) status = subprocess.run(args, env=os.environ)
assert not status.returncode assert not status.returncode
def test_convert_impl_tmp_irs_cleanup():
setup_env()
args = [sys.executable, '-m', 'pytest',
os.path.join(os.path.dirname(__file__), 'utils', 'convert_impl_tmp_irs_cleanup_test_actual.py')]
status = subprocess.run(args, env=os.environ)
assert not status.returncode

View File

@ -0,0 +1,46 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import unittest
from unittest.mock import patch
from openvino.tools.mo.convert import convert_model
from openvino.tools.mo.utils.error import Error
class TestConvertImplTmpIrsCleanup(unittest.TestCase):
test_model_file = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
"moc_tf_fe/test_models/mul_with_unknown_rank_y.pbtxt")
@staticmethod
def are_tmp_files_left(orig_model_name):
for suf in [".xml", ".bin", ".mapping"]:
path_to_file = orig_model_name.replace('.pbtxt', '_tmp' + suf)
if os.path.exists(path_to_file):
return True
return False
def test_tmp_irs_cleanup_convert_impl_1(self):
with patch("openvino.tools.mo.back.offline_transformations.apply_offline_transformations") as emit_ir_func:
emit_ir_func.side_effect = Error('offline transformations step has failed')
params = {'input_model': self.test_model_file, 'input_model_is_text': True, 'input': 'x[3],y[1 3]'}
self.assertRaisesRegex(Error, 'offline transformations step has failed', convert_model, **params)
self.assertFalse(self.are_tmp_files_left(self.test_model_file))
def test_tmp_irs_cleanup_convert_impl_2(self):
with patch("openvino.tools.mo.back.ie_ir_ver_2.emitter.add_net_rt_info") as emit_ir_func:
emit_ir_func.side_effect = Error('emitting tmp IR has failed')
params = {'input_model': self.test_model_file, 'input_model_is_text': True, 'input': 'x[3],y[1 3]'}
self.assertRaisesRegex(Error, 'emitting tmp IR has failed', convert_model, **params)
self.assertFalse(self.are_tmp_files_left(self.test_model_file))
def test_tmp_irs_cleanup_convert_impl_3(self):
with patch("openvino.tools.mo.convert_impl.read_model") as emit_ir_func:
emit_ir_func.side_effect = Exception('FEM read_model has failed')
params = {'input_model': self.test_model_file, 'input_model_is_text': True, 'input': 'x[3],y[1 3]'}
self.assertRaisesRegex(Error, 'FEM read_model has failed', convert_model, **params)
self.assertFalse(self.are_tmp_files_left(self.test_model_file))