Deprecated warnings fix in MO (#6016)

* Add code style fixes

* Revert "Add code style fixes"

This reverts commit 490934f243.

* Fix Invalid escape sequence

* Fix Invalid escape sequence
This commit is contained in:
Eugeny Volosenkov 2021-06-04 16:53:30 +03:00 committed by GitHub
parent 81be8c94b0
commit 859a3b8a30
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 22 additions and 22 deletions

View File

@ -140,7 +140,7 @@ def get_output_node_names_list(graph_def, user_defined_output_node_names_list: l
def deducing_metagraph_path(meta_graph_file: str):
match = re.search('^(.*)\.(data-\d*-of-\d*|index|meta)$', meta_graph_file)
match = re.search(r'^(.*)\.(data-\d*-of-\d*|index|meta)$', meta_graph_file)
if match is not None:
deduced_meta_graph_file = match.group(1) + '.meta'
if not os.path.isfile(deduced_meta_graph_file):
@ -173,7 +173,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
user_output_node_names_list: list = []):
# As a provisional solution, use a native TF methods to load a model protobuf
graph_def = tf_v1.GraphDef()
if isinstance(graph_file_name, str) and (re.match('.*\.(ckpt|meta)$', graph_file_name)):
if isinstance(graph_file_name, str) and (re.match(r'.*\.(ckpt|meta)$', graph_file_name)):
print('[ WARNING ] The value for the --input_model command line parameter ends with ".ckpt" or ".meta" '
'extension.\n'
'It means that the model is not frozen.\n'
@ -208,7 +208,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
# pylint: disable=no-member
with tf_v1.Session() as sess:
restorer = tf_v1.train.import_meta_graph(input_meta_graph_def)
restorer.restore(sess, re.sub('\.meta$', '', meta_graph_file))
restorer.restore(sess, re.sub(r'\.meta$', '', meta_graph_file))
outputs = get_output_node_names_list(input_meta_graph_def.graph_def, user_output_node_names_list)
graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, input_meta_graph_def.graph_def,
outputs)

View File

@ -108,7 +108,7 @@ def get_subgraph_output_tensors(node: Node):
tf_v1.import_graph_def(graph_def, name='')
all_constants, feed_dict = generate_feed_dict(graph, node)
for out_port, out_tensor_name in enumerate(node['output_tensors_names']):
if not match('.*:\d+', out_tensor_name):
if not match(r'.*:\d+', out_tensor_name):
out_tensor_name = out_tensor_name + ":" + str(out_port)
result_tensor = sess.run(graph.get_tensor_by_name(out_tensor_name), feed_dict=feed_dict)
result[out_port] = result_tensor

View File

@ -28,17 +28,17 @@ def deduce_framework_by_namespace(argv: Namespace):
def guess_framework_by_ext(input_model_path: str) -> int:
if re.match('^.*\.caffemodel$', input_model_path):
if re.match(r'^.*\.caffemodel$', input_model_path):
return 'caffe'
elif re.match('^.*\.pb$', input_model_path):
elif re.match(r'^.*\.pb$', input_model_path):
return 'tf'
elif re.match('^.*\.pbtxt$', input_model_path):
elif re.match(r'^.*\.pbtxt$', input_model_path):
return 'tf'
elif re.match('^.*\.params$', input_model_path):
elif re.match(r'^.*\.params$', input_model_path):
return 'mxnet'
elif re.match('^.*\.nnet$', input_model_path):
elif re.match(r'^.*\.nnet$', input_model_path):
return 'kaldi'
elif re.match('^.*\.mdl', input_model_path):
elif re.match(r'^.*\.mdl', input_model_path):
return 'kaldi'
elif re.match('^.*\.onnx$', input_model_path):
elif re.match(r'^.*\.onnx$', input_model_path):
return 'onnx'

View File

@ -36,8 +36,8 @@ class TestBOMFile(unittest.TestCase):
cls.existing_files = [name.rstrip() for name in bom_file.readlines()]
cls.expected_header = [re.compile(pattern) for pattern in [
'^# Copyright \([cC]\) [0-9\-]+ Intel Corporation$',
'^# SPDX-License-Identifier: Apache-2.0$',
r'^# Copyright \([cC]\) [0-9\-]+ Intel Corporation$',
r'^# SPDX-License-Identifier: Apache-2.0$',
]]
def test_bom_file(self):

View File

@ -18,4 +18,4 @@ class TestLoader(unittest.TestCase):
mock = Mock(__bool__=MagicMock(side_effect=Exception()))
self.assertRaises(Exception, load_tf_graph_def, path, meta_graph_file=mock)
self.assertRegex(out.getvalue(),
'\[ WARNING ] The value for the --input_model command line parameter ends with "\.ckpt"')
r'\[ WARNING ] The value for the --input_model command line parameter ends with "\.ckpt"')

View File

@ -341,7 +341,7 @@ class TestGraphShapeChecker(unittest.TestCase):
del graph.node['2_data']['shape']
with self.assertRaisesRegex(Error, "Graph contains data nodes \(1\) with inconsistent shapes:.*"):
with self.assertRaisesRegex(Error, r"Graph contains data nodes \(1\) with inconsistent shapes:.*"):
graph.check_shapes_consistency()
def test_check_shape_consistency_2(self):
@ -358,7 +358,7 @@ class TestGraphShapeChecker(unittest.TestCase):
graph.node['1_data']['shape'] = (1, 2, 3)
graph.node['2_data']['shape'] = (1, 2, 3)
with self.assertRaisesRegex(Error, "Graph contains data nodes \(2\) with inconsistent shapes:.*"):
with self.assertRaisesRegex(Error, r"Graph contains data nodes \(2\) with inconsistent shapes:.*"):
graph.check_shapes_consistency()

View File

@ -102,9 +102,9 @@ class TestFunction(unittest.TestCase):
nodes_attributes = {
'input': {'kind': 'op', 'type': 'Parameter', 'ports': {0: (shape, 'abc,def')}},
'input_data': {'shape': shape, 'kind': 'data'},
'add': {'kind': 'op', 'type': 'Add', 'ports': {2: (shape, 'ghi\,jkl')}},
'add': {'kind': 'op', 'type': 'Add', 'ports': {2: (shape, r'ghi\,jkl')}},
'add_data': {'shape': shape, 'kind': 'data'},
'add_const': {'kind': 'op', 'type': 'Const', 'ports': {0: (shape, 'mno,pqr\,stu')}},
'add_const': {'kind': 'op', 'type': 'Const', 'ports': {0: (shape, r'mno,pqr\,stu')}},
'add_const_data': {'shape': shape, 'kind': 'data'},
'result': {'kind': 'op', 'type': 'Result', 'ports': {0: (shape, None)}}
}

View File

@ -36,10 +36,10 @@ correct_proto_message_8 = 'model {good_list: [3.0, 5.0, ]}'
correct_proto_message_9 = ' first_stage_anchor_generator {grid_anchor_generator {height_stride: 16, width_stride:' \
' 16 scales: [ 0.25, 0.5, 1.0, 2.0], aspect_ratios: [] }}'
correct_proto_message_10 = 'train_input_reader {label_map_path: "C:\mscoco_label_map.pbtxt"' \
correct_proto_message_10 = r'train_input_reader {label_map_path: "C:\mscoco_label_map.pbtxt"' \
' tf_record_input_reader { input_path: "PATH_TO_BE_CONFIGURED/ mscoco_train.record" }}'
correct_proto_message_11 = 'model {path: "C:\[{],}" other_value: [1, 2, 3, 4]}'
correct_proto_message_11 = r'model {path: "C:\[{],}" other_value: [1, 2, 3, 4]}'
incorrect_proto_message_1 = 'model { bad_no_value }'
@ -121,14 +121,14 @@ class TestingSimpleProtoParser(unittest.TestCase):
def test_correct_proto_reader_from_string_with_windows_path(self):
result = SimpleProtoParser().parse_from_string(correct_proto_message_10)
expected_result = {
'train_input_reader': {'label_map_path': "C:\mscoco_label_map.pbtxt",
'train_input_reader': {'label_map_path': r"C:\mscoco_label_map.pbtxt",
'tf_record_input_reader': {
'input_path': "PATH_TO_BE_CONFIGURED/ mscoco_train.record"}}}
self.assertDictEqual(result, expected_result)
def test_correct_proto_reader_from_string_with_special_characters_in_string(self):
result = SimpleProtoParser().parse_from_string(correct_proto_message_11)
expected_result = {'model': {'path': "C:\[{],}",
expected_result = {'model': {'path': r"C:\[{],}",
'other_value': [1, 2, 3, 4]}}
self.assertDictEqual(result, expected_result)