[MO] Move reverse input channels before mean/scale (#9182)
This commit is contained in:
parent
3f6a026ae9
commit
0e4c50e24d
@ -59,11 +59,14 @@ Framework-agnostic parameters:
|
|||||||
--reverse_input_channels
|
--reverse_input_channels
|
||||||
Switch the input channels order from RGB to BGR (or
|
Switch the input channels order from RGB to BGR (or
|
||||||
vice versa). Applied to original inputs of the model
|
vice versa). Applied to original inputs of the model
|
||||||
if and only if a number of channels equals 3. Applied
|
if and only if a number of channels equals 3.
|
||||||
after application of --mean_values and --scale_values
|
When --mean_values/--scale_values are also specified,
|
||||||
options, so numbers in --mean_values and
|
reversing of channels will be applied to user's input
|
||||||
--scale_values go in the order of channels used in the
|
data first, so that numbers in --mean_values and
|
||||||
original model.
|
--scale_values go in the order of channels used in
|
||||||
|
the original model. In other words, if both options are
|
||||||
|
specified then the data flow in the model looks as following:
|
||||||
|
Parameter -> ReverseInputChannels -> Mean/Scale apply -> the original body of the model.
|
||||||
--log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET}
|
--log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET}
|
||||||
Logger level
|
Logger level
|
||||||
--input INPUT Quoted list of comma-separated input nodes names with
|
--input INPUT Quoted list of comma-separated input nodes names with
|
||||||
|
@ -372,6 +372,12 @@ def apply_preprocessing(ov_function: Model, argv: argparse.Namespace):
|
|||||||
else:
|
else:
|
||||||
prep.output(node_name).tensor().set_layout(Layout(layout_value['target_layout']))
|
prep.output(node_name).tensor().set_layout(Layout(layout_value['target_layout']))
|
||||||
|
|
||||||
|
# Apply reverse_input_channels
|
||||||
|
if need_reverse:
|
||||||
|
for name, _ in suitable_params_ric:
|
||||||
|
prep.input(name).preprocess().reverse_channels()
|
||||||
|
log.debug('reverse_input_channels pre-processing applied to {}'.format(name))
|
||||||
|
|
||||||
for node_name, node_mean_scale_values in mean_scale_values.items():
|
for node_name, node_mean_scale_values in mean_scale_values.items():
|
||||||
# Apply mean first, then scale
|
# Apply mean first, then scale
|
||||||
if node_mean_scale_values['mean'] is not None:
|
if node_mean_scale_values['mean'] is not None:
|
||||||
@ -380,12 +386,6 @@ def apply_preprocessing(ov_function: Model, argv: argparse.Namespace):
|
|||||||
prep.input(node_name).preprocess().scale(node_mean_scale_values['scale'])
|
prep.input(node_name).preprocess().scale(node_mean_scale_values['scale'])
|
||||||
log.debug('Mean/Scale pre-processing applied to {}'.format(node_name))
|
log.debug('Mean/Scale pre-processing applied to {}'.format(node_name))
|
||||||
|
|
||||||
# Apply reverse_input_channels
|
|
||||||
if need_reverse:
|
|
||||||
for name, _ in suitable_params_ric:
|
|
||||||
prep.input(name).preprocess().reverse_channels()
|
|
||||||
log.debug('reverse_input_channels pre-processing applied to {}'.format(name))
|
|
||||||
|
|
||||||
# Apply pre-processing builder to a function
|
# Apply pre-processing builder to a function
|
||||||
ov_function = prep.build()
|
ov_function = prep.build()
|
||||||
|
|
||||||
|
@ -273,10 +273,12 @@ def get_common_cli_parser(parser: argparse.ArgumentParser = None):
|
|||||||
'the original input of the model.')
|
'the original input of the model.')
|
||||||
common_group.add_argument('--reverse_input_channels',
|
common_group.add_argument('--reverse_input_channels',
|
||||||
help='Switch the input channels order from RGB to BGR (or vice versa). Applied to '
|
help='Switch the input channels order from RGB to BGR (or vice versa). Applied to '
|
||||||
'original inputs of the model if and only if a number of channels equals 3. Applied '
|
'original inputs of the model if and only if a number of channels equals 3. '
|
||||||
'after application of --mean_values and --scale_values options, so numbers in '
|
'When --mean_values/--scale_values are also specified, reversing of channels will '
|
||||||
'--mean_values and --scale_values go in the order of channels used in the original '
|
'be applied to user\'s input data first, so that numbers in --mean_values '
|
||||||
'model.',
|
'and --scale_values go in the order of channels used in the original model. '
|
||||||
|
'In other words, if both options are specified, then the data flow in the model '
|
||||||
|
'looks as following: Parameter -> ReverseInputChannels -> Mean/Scale apply -> the original body of the model.',
|
||||||
action='store_true')
|
action='store_true')
|
||||||
common_group.add_argument('--log_level',
|
common_group.add_argument('--log_level',
|
||||||
help='Logger level',
|
help='Logger level',
|
||||||
|
@ -56,25 +56,23 @@ class TestPreprocessingMOC(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def check_constant(self, const_node, expected, shape=None):
|
||||||
|
self.assertEqual(const_node.get_type_name(), 'Constant')
|
||||||
|
self.assertTrue(np.allclose(const_node.get_vector(), expected))
|
||||||
|
if shape is not None:
|
||||||
|
assert const_node.shape == PartialShape(shape)
|
||||||
|
|
||||||
def check_scale_constant(self, node, expected, shape=None):
|
def check_scale_constant(self, node, expected, shape=None):
|
||||||
const_node = node.input(1).get_source_output().get_node()
|
const_node = node.input(1).get_source_output().get_node()
|
||||||
self.assertEqual(const_node.get_type_name(), 'Constant')
|
if node.get_type_name() != 'Divide':
|
||||||
if node.get_type_name() == 'Divide':
|
expected = 1. / expected
|
||||||
self.assertTrue(np.allclose(const_node.get_vector(), expected))
|
self.check_constant(const_node, expected, shape)
|
||||||
else:
|
|
||||||
self.assertTrue(np.allclose(const_node.get_vector(), 1. / expected))
|
|
||||||
if shape:
|
|
||||||
assert const_node.shape == PartialShape(shape)
|
|
||||||
|
|
||||||
def check_mean_constant(self, node, expected, shape=None):
|
def check_mean_constant(self, node, expected, shape=None):
|
||||||
const_node = node.input(1).get_source_output().get_node()
|
const_node = node.input(1).get_source_output().get_node()
|
||||||
self.assertEqual(const_node.get_type_name(), 'Constant')
|
if node.get_type_name() != 'Subtract':
|
||||||
if node.get_type_name() == 'Subtract':
|
expected = -expected.toList()
|
||||||
self.assertTrue(np.allclose(const_node.get_vector(), expected))
|
self.check_constant(const_node, expected, shape)
|
||||||
else:
|
|
||||||
self.assertTrue(np.allclose(const_node.get_vector(), -expected.toList()))
|
|
||||||
if shape:
|
|
||||||
self.assertEqual(const_node.shape, PartialShape(shape))
|
|
||||||
|
|
||||||
def test_scale_single_value(self):
|
def test_scale_single_value(self):
|
||||||
argv = Namespace(mean_scale_values=None, scale=2.0)
|
argv = Namespace(mean_scale_values=None, scale=2.0)
|
||||||
@ -615,3 +613,41 @@ class TestPreprocessingMOC(unittest.TestCase):
|
|||||||
self.assertTrue(op_node0.get_type_name() == 'Relu')
|
self.assertTrue(op_node0.get_type_name() == 'Relu')
|
||||||
op_node1 = list(function.get_parameters()[1].output(0).get_target_inputs())[0].get_node()
|
op_node1 = list(function.get_parameters()[1].output(0).get_target_inputs())[0].get_node()
|
||||||
self.assertTrue(op_node1.get_type_name() == 'Relu')
|
self.assertTrue(op_node1.get_type_name() == 'Relu')
|
||||||
|
|
||||||
|
def test_reverse_channels_and_mean_scale(self):
|
||||||
|
argv = Namespace(reverse_input_channels=True, mean_scale_values={
|
||||||
|
'input2a': {
|
||||||
|
'mean': np.array([1., 2., 3.]),
|
||||||
|
'scale': np.array([2., 4., 8.])}},
|
||||||
|
scale=None)
|
||||||
|
function = create_function2(shape2=[1, 3, 224, 224])
|
||||||
|
process_function(ov_function=function, argv=argv)
|
||||||
|
|
||||||
|
# Verify that first is gather, then subtract 'mean', then 'scale'
|
||||||
|
gather = list(function.get_parameters()[1].output(0).get_target_inputs())[0].get_node()
|
||||||
|
self.assertTrue(gather.get_type_name() == 'Gather')
|
||||||
|
range_node = gather.input(1).get_source_output().get_node()
|
||||||
|
self.assertTrue(range_node.get_type_name() == 'Range')
|
||||||
|
start = range_node.input(0).get_source_output().get_node()
|
||||||
|
end = range_node.input(1).get_source_output().get_node()
|
||||||
|
step = range_node.input(2).get_source_output().get_node()
|
||||||
|
self.check_constant(start, expected=[2], shape=[])
|
||||||
|
self.check_constant(end, expected=[-1], shape=[])
|
||||||
|
self.check_constant(step, expected=[-1], shape=[])
|
||||||
|
axes = gather.input(2).get_source_output().get_node()
|
||||||
|
self.check_constant(axes, expected=[1], shape=[1])
|
||||||
|
|
||||||
|
op_node = list(gather.output(0).get_target_inputs())[0].get_node()
|
||||||
|
self.assertTrue(op_node.get_type_name() == 'Subtract' or op_node.get_type_name() == 'Add')
|
||||||
|
self.check_mean_constant(op_node, expected=[1., 2., 3.], shape=[1, 3, 1, 1])
|
||||||
|
|
||||||
|
op_node = list(op_node.output(0).get_target_inputs())[0].get_node()
|
||||||
|
self.assertTrue(op_node.get_type_name() == 'Divide' or op_node.get_type_name() == 'Multiply')
|
||||||
|
self.check_scale_constant(op_node, expected=[2., 4., 8.], shape=[1, 3, 1, 1])
|
||||||
|
|
||||||
|
# Verify that input1 is not affected
|
||||||
|
op_node = list(function.get_parameters()[0].output(0).get_target_inputs())[0].get_node()
|
||||||
|
self.assertEqual(op_node.get_type_name(), 'Relu')
|
||||||
|
|
||||||
|
# Verify that guessed layout (?C??) is not appeared in input2
|
||||||
|
self.assertEqual(function.get_parameters()[1].layout, Layout())
|
||||||
|
Loading…
Reference in New Issue
Block a user