Use layouts in legacy pre-processing (#9355)

* Use layouts in legacy pre-processing

* Use layouts for batch in MO

* Unify code for channel and batch dimension

* Fix issue when idx is None
This commit is contained in:
Maxim Vafin 2021-12-30 21:01:55 +03:00 committed by GitHub
parent 50a33436d4
commit 738a571742
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 250 additions and 50 deletions

View File

@ -5,16 +5,17 @@ import logging as log
import numpy as np
from openvino.tools.mo.ops.gather import Gather
from openvino.tools.mo.ops.split import Split
from openvino.tools.mo.back.replacement import BackReplacementPattern
from openvino.tools.mo.front.common.layout import get_dim_from_layout, get_features_dim
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.front.common.partial_infer.utils import mo_array
from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs
from openvino.tools.mo.graph.graph import Graph
from openvino.tools.mo.graph.graph import Node
from openvino.tools.mo.ops.concat import Concat
from openvino.tools.mo.ops.gather import Gather
from openvino.tools.mo.ops.op import Op, PermuteAttrs
from openvino.tools.mo.ops.split import Split
class ReverseChannels(Op):
@ -52,50 +53,47 @@ class InsertReverseChannels(BackReplacementPattern):
enabled = False
@staticmethod
def get_fw_index(node: Node, idx: int) -> int:
if not node.has_valid('rt_info'):
return idx
def get_channel_index(node: Node) -> int:
guessed_layout = 'NCHW'
if node.has_valid('rt_info'):
rt_info = node.rt_info
if not rt_info.contains('old_api_map_order'):
return idx
if rt_info.contains('old_api_map_order'):
old_api_map_version = rt_info.get_attribute_version('old_api_map_order')
old_api_map = rt_info.info['old_api_map_order', old_api_map_version]
if 'inverse_order' not in old_api_map.info:
return idx
if 'inverse_order' in old_api_map.info:
order = old_api_map.info['inverse_order']
node_name = node.soft_get('name', node.id)
if idx < 0:
assert not node.out_port(0).disconnected(), 'Cannot normalize negative axis {} in node {} ' \
'as out port is disconnected.'.format(idx, node_name)
data_rank = len(list(node.out_port(0).data.get_shape()))
idx = data_rank + idx
assert len(order) > idx >= 0, \
'Channel index {} is incompatible with old_api_map in node {}.'.format(idx, node_name)
return list(order).index(idx)
assert len(order) == len(guessed_layout)
guessed_layout = np.array(list(guessed_layout))[order]
guessed_layout = ''.join(guessed_layout)
idx, has_layout = get_dim_from_layout(node, 'C')
if has_layout:
return idx
else:
return get_features_dim(guessed_layout, len(node.shape))
def find_and_replace_pattern(self, graph: Graph):
all_params = [(p.soft_get('name', p.id), p, list(p.out_port(0).data.get_shape()))
for p in graph.get_op_nodes(type='Parameter')]
suitable_params = [(name, p, shape) for name, p, shape in all_params if
len(shape) == 4 and shape[self.get_fw_index(p, 1)] == 3]
suitable_params = []
for name, p, shape in all_params:
if len(shape) == 4:
idx = self.get_channel_index(p)
if idx is not None and shape[idx] == 3:
suitable_params.append((name, p, shape, idx))
log.debug('All network inputs: {}'.format({name: shape for name, _, shape in all_params}))
log.debug('Will reverse input channels for: {}'.format({name: shape for name, _, shape in suitable_params}))
log.debug('Will reverse input channels for: {}'.format({name: shape for name, _, shape, _ in suitable_params}))
if len(suitable_params) < len(all_params):
log.error('Network has {} inputs overall, but only {} of them are suitable for input channels reversing.\n'
'Suitable for input channel reversing inputs are 4-dimensional with 3 channels\nAll inputs: {}\n'
'Suitable inputs {}'.format(len(all_params), len(suitable_params),
{name: shape for name, _, shape in all_params},
{name: shape for name, _, shape in suitable_params}),
{name: shape for name, _, shape, _ in suitable_params}),
extra={'is_warning': True})
for name, parameter, _ in suitable_params:
reverse_index = int64_array(self.get_fw_index(parameter, 1))
for name, parameter, _, idx in suitable_params:
reverse_index = int64_array(idx)
if parameter.out_port(0).disconnected():
continue

View File

@ -5,6 +5,7 @@ import numpy as np
from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value
from openvino.tools.mo.front.common.partial_infer.utils import mo_array, int64_array
from openvino.tools.mo.graph.graph import Node
from openvino.tools.mo.utils.error import Error
nchw_to_nhwc_permute = int64_array([0, 2, 3, 1])
@ -112,3 +113,41 @@ def shape_for_layout(layout: str, **kwargs):
if depth is not None:
output_shape[get_depth_dim(layout, shape_len)] = depth
return output_shape
def get_dim_from_layout(node: Node, dim: str):
"""
Gets index of dimension from layout specified for node.
:param node: node to get dim for.
:param dim: name of dimension to get index for.
:return: tuple with index of the dimension and bool flag if the node has layout specified or no.
"""
layout = None
graph = node.graph
if 'layout_values' in graph.graph['cmd_params'] and graph.graph['cmd_params'].layout_values:
layout_values = graph.graph['cmd_params'].layout_values.copy()
if '' in layout_values:
in_nodes = graph.get_op_nodes(op='Parameter')
if len(in_nodes) == 1:
in_node = in_nodes[0]
layout_values[in_node.soft_get('name', in_node.id)] = layout_values['']
del layout_values['']
name = node.soft_get('name', node.id)
if name in layout_values:
if layout_values[name]['source_layout']:
layout = layout_values[name]['source_layout']
if layout:
from openvino.runtime import Layout # pylint: disable=no-name-in-module,import-error
layout_parsed = Layout(layout)
has_dim = layout_parsed.has_name(dim)
if has_dim:
idx = layout_parsed.get_index_by_name(dim)
if idx < 0:
idx = len(node.shape) + idx
return idx, True
else:
return None, True
else:
return None, False

View File

@ -5,13 +5,13 @@ import logging as log
import numpy as np
from openvino.tools.mo.ops.elementwise import Add, Mul
from openvino.tools.mo.front.common.layout import get_features_dim
from openvino.tools.mo.front.common.layout import get_dim_from_layout, get_features_dim
from openvino.tools.mo.front.common.partial_infer.utils import compatible_dims
from openvino.tools.mo.front.extractor import get_node_id_with_ports
from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs
from openvino.tools.mo.graph.graph import Graph, Node
from openvino.tools.mo.middle.replacement import MiddleReplacementPattern
from openvino.tools.mo.ops.elementwise import Add, Mul
from openvino.tools.mo.utils.cli_parser import get_node_name_with_port_from_input_value
from openvino.tools.mo.utils.error import Error
from openvino.tools.mo.utils.utils import refer_to_faq_msg
@ -42,14 +42,21 @@ class AddMeanScaleValues(MiddleReplacementPattern):
if all([x == optimize_value for x in value]):
return
assert input_node.has_valid('shape')
in_name = input_node.soft_get('name', input_node.id)
features_dim_idx, has_layout = get_dim_from_layout(input_node, 'C')
if features_dim_idx is None:
if has_layout:
log.warning('Layout for input {} doesn\'t have channel ("C") dimension to apply {} preprocessing. '
'Skipping this input.'.format(in_name, preprocessing_name))
features_dim_idx = get_features_dim(graph.graph['layout'], len(input_node.shape))
assert compatible_dims(value.size, input_node.shape[features_dim_idx]) or value.size == 1
assert compatible_dims(value.size, input_node.shape[features_dim_idx]) or value.size == 1, \
"Incompatible layout, please specify correct layout for the node"
shape = np.ones(len(input_node.shape), dtype=np.int64)
shape[features_dim_idx] = value.size
value = value.reshape(shape)
name = input_node.soft_get('name', input_node.id) + '/' + preprocessing_name
name = in_name + '/' + preprocessing_name
preprocessing = create_op_with_const_inputs(graph, op=op, port_value_dict={1: value}, op_attrs={'name': name})
if input_node.op == 'Parameter' and input_node.has_and_set('data_type'):

View File

@ -6,6 +6,7 @@ from typing import List
import networkx as nx
from openvino.tools.mo.front.common.layout import get_dim_from_layout
from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension
from openvino.tools.mo.graph.graph import Node, Graph, dict_includes
from openvino.tools.mo.utils.error import Error
@ -220,10 +221,20 @@ def override_batch(graph: Graph, batch: int):
batch: user defined integer value to override batch
"""
if batch is not None:
for node_id, data in graph.nodes(data=True):
if 'op' in data and data['op'] == 'Parameter' and not data.get('fixed_batch', False):
validate_batch_in_shape(data['shape'], data['name'])
data['shape'][0] = batch
in_nodes = graph.get_op_nodes(op='Parameter')
for node in in_nodes:
if not node.soft_get('fixed_batch', False):
name = node.soft_get('name', node.id)
idx, has_layout = get_dim_from_layout(node, 'N')
if has_layout:
if idx is not None:
node['shape'][idx] = batch
else:
log.warning(
'Layout for input {} doesn\'t have batch dimension. Skipping this input.'.format(name))
else:
validate_batch_in_shape(node['shape'], name)
node['shape'][0] = batch
def validate_batch_in_shape(shape, layer_name: str):
@ -242,6 +253,7 @@ def validate_batch_in_shape(shape, layer_name: str):
'dimension or not.\n\n For example, you want to set batch dimension equals 100 ' +
'for the input layer "data" with shape (10,34). Although you can not use --batch, ' +
'you should pass --input_shape (100,34) instead of --batch 100. \n\n' +
'You can also tell Model Optimizer where batch dimension is located by specifying --layout. \n\n' +
refer_to_faq_msg(39))
.format(layer_name, shape))
@ -328,4 +340,3 @@ def reverse_infer(graph: Graph, nodes: list):
if node.has_valid('reverse_infer'):
log.debug("Executed reverse infer for node '{}'".format(node.soft_get('name', node.id)))
node.reverse_infer(node)

View File

@ -2,6 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
import unittest
from argparse import Namespace
import numpy as np
@ -53,6 +54,22 @@ nodes3 = {
}
def get_nodes(shape, axis=1):
return {
**regular_op_with_shaped_data('placeholder1', shape,
{'type': 'Parameter', 'shape': shape, 'rt_info': RTInfo()}),
**regular_op_with_shaped_data('placeholder2', [1, 1, 1, 1], {'type': 'Parameter', 'shape': [1, 1, 1, 1]}),
**regular_op_with_shaped_data('mul', shape, {'type': 'Multiply'}),
**regular_op_with_shaped_data('reverse_channels', shape,
{'op': 'ReverseChannels', 'type': None, 'axis': int64_array(axis)}),
**regular_op_with_shaped_data('pad', shape, {'type': 'Pad'}),
**result('result'),
}
class ReverseInputChannelsTest(unittest.TestCase):
def check_graph_attrs(self, graph: Graph, parameter_node_names: list):
for node in graph.get_op_nodes():
@ -229,15 +246,53 @@ class ReverseInputChannelsTest(unittest.TestCase):
self.assertTrue(reverse_channels.axis == 1)
self.assertTrue(type(reverse_channels.axis) == np.ndarray)
def test_get_fw_index(self):
graph = build_graph(nodes, [*connect('placeholder1', 'result')])
def test_insert(self):
graph = build_graph(get_nodes([1, 3, 10, 10]),
[*connect('placeholder1', '0:mul'), *connect('placeholder2', '1:mul'),
*connect('mul', 'result')], nodes_with_edges_only=True,
cli=Namespace(reverse_input_channels=True))
InsertReverseChannels().find_and_replace_pattern(graph)
graph_ref = build_graph(get_nodes([1, 3, 10, 10]),
[*connect('placeholder1', 'reverse_channels'), *connect('reverse_channels', '0:mul'),
*connect('placeholder2', '1:mul'), *connect('mul', 'result')])
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_insert_old_api_map(self):
graph = build_graph(get_nodes([1, 10, 10, 3]),
[*connect('placeholder1', '0:mul'), *connect('placeholder2', '1:mul'),
*connect('mul', 'result')], nodes_with_edges_only=True,
cli=Namespace(reverse_input_channels=True))
node = Node(graph, 'placeholder1')
old_api_map = OldAPIMapOrder(version=0)
node.rt_info.info[('old_api_map_order', old_api_map.get_version())] = old_api_map
node.rt_info.info[('old_api_map_order', old_api_map.get_version())].old_api_transpose_parameter([0, 2, 3, 1])
self.assertTrue(InsertReverseChannels.get_fw_index(node, 0) == 0)
self.assertTrue(InsertReverseChannels.get_fw_index(node, 1) == 3)
self.assertTrue(InsertReverseChannels.get_fw_index(node, 2) == 1)
self.assertTrue(InsertReverseChannels.get_fw_index(node, 3) == 2)
self.assertTrue(InsertReverseChannels.get_fw_index(node, -2) == 1)
self.assertTrue(type(InsertReverseChannels.get_fw_index(node, 0)) == int)
InsertReverseChannels().find_and_replace_pattern(graph)
graph_ref = build_graph(get_nodes([1, 10, 10, 3], 3),
[*connect('placeholder1', 'reverse_channels'), *connect('reverse_channels', '0:mul'),
*connect('placeholder2', '1:mul'), *connect('mul', 'result')])
node2 = Node(graph_ref, 'placeholder1')
node2.rt_info = node.rt_info
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_insert_layout(self):
graph = build_graph(get_nodes([1, 10, 10, 3]),
[*connect('placeholder1', '0:mul'), *connect('placeholder2', '1:mul'),
*connect('mul', 'result')], nodes_with_edges_only=True,
cli=Namespace(reverse_input_channels=True,
layout_values={
'placeholder1': {'source_layout': 'nhwc', 'target_layout': None}}))
InsertReverseChannels().find_and_replace_pattern(graph)
graph_ref = build_graph(get_nodes([1, 10, 10, 3], 3),
[*connect('placeholder1', 'reverse_channels'), *connect('reverse_channels', '0:mul'),
*connect('placeholder2', '1:mul'), *connect('mul', 'result')])
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)

View File

@ -375,3 +375,93 @@ class AddMeanScaleValuesTest(unittest.TestCase):
self.check_graph_attrs(graph, graph_ref, [])
add_node = graph.get_op_nodes(type="Add")[0]
self.assertTrue(add_node.in_port(1).get_connection().get_source().node['value'].dtype == np.float32)
def test_mean_values_explicit_and_optimized_layout(self):
graph_ref = build_graph(nodes, [
*connect('parameter', '0:add_mean'),
*connect('mean', '1:add_mean'),
*connect('add_mean', 'result'),
*connect('parameter_2', 'result_2'),
])
argv = Namespace(mean_scale_values={'parameter': {'mean': np.array([1., 2., 3.])},
'parameter_2': {'mean': np.array([0., 0., 0.])}},
layout_values={'parameter': {'source_layout': 'nchw', 'target_layout': None},
'parameter_2': {'source_layout': 'nchw', 'target_layout': None}}
)
graph = build_graph(nodes, [*connect('parameter', 'result'), *connect('parameter_2', 'result_2')],
nodes_with_edges_only=True, cli=argv)
self.set_graph_attrs(graph, ['parameter', 'parameter_2'])
self.set_graph_attrs(graph_ref, ['parameter', 'parameter_2'])
graph.graph['layout'] = 'NHWC'
AddMeanScaleValues().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
(flag, resp) = compare_graphs(graph, graph_ref, 'result_2', check_op_attrs=True)
self.assertTrue(flag, resp)
self.check_graph_attrs(graph, graph_ref, ['parameter', 'parameter_2'])
def test_mean_values_explicit_and_scale_values_optimized_layout(self):
graph_ref = build_graph(nodes, [
*connect('parameter', '0:add_mean'),
*connect('mean', '1:add_mean'),
*connect('add_mean', 'result'),
])
argv = Namespace(mean_scale_values={'parameter': {'scale': np.array([1.]), 'mean': np.array([1., 2., 3.])}},
layout_values={'': {'source_layout': 'nchw', 'target_layout': None}}
)
graph = build_graph(nodes, [*connect('parameter', 'result')], nodes_with_edges_only=True, cli=argv)
self.set_graph_attrs(graph, ['parameter'])
self.set_graph_attrs(graph_ref, ['parameter'])
graph.graph['layout'] = 'NHWC'
AddMeanScaleValues().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
self.check_graph_attrs(graph, graph_ref, ['parameter'])
def test_mean_values_optimized_and_scale_values_explicit_layout(self):
graph_ref = build_graph(nodes, [
*connect('parameter', '0:mul_scale'),
*connect('scale', '1:mul_scale'),
*connect('mul_scale', 'result'),
])
argv = Namespace(
mean_scale_values={'parameter': {'scale': np.array([1., 2., 3.]), 'mean': np.array([0., 0., 0.])}},
layout_values={'': {'source_layout': 'nchw', 'target_layout': None}}
)
graph = build_graph(nodes, [*connect('parameter', 'result')], nodes_with_edges_only=True, cli=argv)
self.set_graph_attrs(graph, ['parameter'])
self.set_graph_attrs(graph_ref, ['parameter'])
graph.graph['layout'] = 'NHWC'
AddMeanScaleValues().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
self.check_graph_attrs(graph, graph_ref, ['parameter'])
def test_mean_values_explicit_and_scale_values_explicit_layout(self):
graph_ref = build_graph(nodes, [
*connect('parameter', '0:add_mean'),
*connect('mean', '1:add_mean'),
*connect('add_mean', '0:mul_scale'),
*connect('scale', '1:mul_scale'),
*connect('mul_scale', 'result'),
])
argv = Namespace(mean_scale_values=[[np.array([1., 2., 3.]), np.array([1., 2., 3.])]],
layout_values={'': {'source_layout': 'nchw', 'target_layout': None}}
)
graph = build_graph(nodes, [*connect('parameter', 'result')],
nodes_with_edges_only=True, cli=argv)
self.set_graph_attrs(graph, ['parameter'])
self.set_graph_attrs(graph_ref, ['parameter'])
graph.graph['layout'] = 'NHWC'
AddMeanScaleValues().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
self.assertTrue(flag, resp)
self.check_graph_attrs(graph, graph_ref, ['parameter'])