Added unit tests and readme for model optimizer (#79)
* added unit tests * added readme for model optimizer * added a list of supported IE plugins
This commit is contained in:
committed by
openvino-pushbot
parent
30594bb309
commit
17e66dc5a6
111
model-optimizer/mo/ops/activation_test.py
Normal file
111
model-optimizer/mo/ops/activation_test.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.activation import Activation
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestActivationOp(unittest.TestCase):
|
||||
nodes_attributes = {
|
||||
'node_1': {
|
||||
'shape': np.array([227, 227, 227, 227]),
|
||||
'value': None
|
||||
},
|
||||
'activation_node': {
|
||||
'op': 'Activation',
|
||||
'kind': 'op'
|
||||
},
|
||||
'node_3': {
|
||||
'shape': None
|
||||
}
|
||||
}
|
||||
|
||||
def test_assertion_activation_infer(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'activation_node'),
|
||||
('activation_node', 'node_3')
|
||||
],
|
||||
{
|
||||
'activation_node': {'operation': 'test'}
|
||||
})
|
||||
activation_node = Node(graph, 'activation_node')
|
||||
self.assertEqual(activation_node.op, 'Activation')
|
||||
self.assertRaises(KeyError, Activation.infer, activation_node)
|
||||
|
||||
def test_activation_infer(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'activation_node'),
|
||||
('activation_node', 'node_3')
|
||||
],
|
||||
{
|
||||
'node_1': {
|
||||
'value': np.array([0, 7, 3, -1])
|
||||
},
|
||||
'activation_node': {
|
||||
'operation': 'relu6'
|
||||
},
|
||||
'node_3': {
|
||||
'value': None
|
||||
}
|
||||
})
|
||||
graph.graph['layout'] = 'NCHW'
|
||||
activation_node = Node(graph, 'activation_node')
|
||||
Activation.infer(activation_node)
|
||||
exp_shape = np.array([227, 227, 227, 227])
|
||||
res_shape = graph.node['node_3']['shape']
|
||||
res_value = graph.node['node_3']['value']
|
||||
exp_value = np.array([0, 6, 3, 0])
|
||||
for i, value in enumerate(exp_shape):
|
||||
self.assertEqual(res_shape[i], value)
|
||||
for i, value in enumerate(exp_value):
|
||||
self.assertEqual(res_value[i], value)
|
||||
|
||||
def test_activation_elu_infer(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'activation_node'),
|
||||
('activation_node', 'node_3')
|
||||
],
|
||||
{
|
||||
'node_1': {
|
||||
'value': np.array([6, -4, -2, -1])
|
||||
},
|
||||
'activation_node': {
|
||||
'operation': 'elu',
|
||||
'alpha': 1.0,
|
||||
},
|
||||
'node_3': {
|
||||
'value': None
|
||||
}
|
||||
})
|
||||
graph.graph['layout'] = 'NCHW'
|
||||
activation_node = Node(graph, 'activation_node')
|
||||
Activation.infer(activation_node)
|
||||
exp_shape = np.array([227, 227, 227, 227])
|
||||
res_shape = graph.node['node_3']['shape']
|
||||
res_value = graph.node['node_3']['value']
|
||||
exp_value = np.array([6., -0.98168436, -0.86466472, -0.63212056])
|
||||
for i, value in enumerate(exp_shape):
|
||||
self.assertEqual(res_shape[i], value)
|
||||
for i, value in enumerate(exp_value):
|
||||
self.assertAlmostEqual(res_value[i], value)
|
||||
47
model-optimizer/mo/ops/clamp_test.py
Normal file
47
model-optimizer/mo/ops/clamp_test.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.front.common.partial_infer.elemental import copy_shape_infer
|
||||
from mo.ops.clamp import Clamp
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestClampOp(unittest.TestCase):
|
||||
nodes_attributes = {
|
||||
'node_1': {
|
||||
'shape': np.array([227, 227, 227, 227])
|
||||
},
|
||||
'clamp_node': {
|
||||
},
|
||||
'node_3': {
|
||||
'kind': 'data'
|
||||
}
|
||||
}
|
||||
|
||||
def test_clamp_op(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'clamp_node'),
|
||||
('clamp_node', 'node_3')
|
||||
])
|
||||
clamp_node = Clamp(graph, self.nodes_attributes['clamp_node']).add_node()
|
||||
self.assertEqual(clamp_node.type, 'Clamp')
|
||||
self.assertEqual(clamp_node.op, 'Clamp')
|
||||
self.assertEqual(clamp_node.infer, copy_shape_infer)
|
||||
47
model-optimizer/mo/ops/concat_test.py
Normal file
47
model-optimizer/mo/ops/concat_test.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.front.common.partial_infer.concat import concat_infer
|
||||
from mo.ops.concat import Concat
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestConcatOp(unittest.TestCase):
|
||||
nodes_attributes = {
|
||||
'node_1': {
|
||||
'shape': np.array([227, 227, 227, 227])
|
||||
},
|
||||
'concat_node': {
|
||||
},
|
||||
'node_3': {
|
||||
'kind': 'data'
|
||||
}
|
||||
}
|
||||
|
||||
def test_concat_op(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'concat_node'),
|
||||
('concat_node', 'node_3')
|
||||
])
|
||||
concat_node = Concat(graph, self.nodes_attributes['concat_node']).add_node()
|
||||
self.assertEqual(concat_node.type, 'Concat')
|
||||
self.assertEqual(concat_node.op, 'Concat')
|
||||
self.assertEqual(concat_node.infer, concat_infer)
|
||||
371
model-optimizer/mo/ops/convolution_test.py
Normal file
371
model-optimizer/mo/ops/convolution_test.py
Normal file
@@ -0,0 +1,371 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.front.common.partial_infer.utils import int64_array
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.convolution import Convolution
|
||||
from mo.utils.unittest.extractors import FakeValue
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
nodes_attributes = {'conv_input': {'value': None, 'kind': 'data'},
|
||||
'conv_node': {'type': 'Convolution', 'kind': 'op'},
|
||||
'conv_weights': {'value': FakeValue(None), 'kind': 'data'},
|
||||
'conv_output': {'value': None, 'kind': 'data'}
|
||||
}
|
||||
|
||||
|
||||
class TestConvolutionPartialInfer(unittest.TestCase):
|
||||
def test_caffe_conv2d_infer(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')],
|
||||
{'conv_output': {'is_output': True, 'shape': None},
|
||||
'conv_input': {'shape': np.array([1, 3, 227, 227])},
|
||||
'conv_weights': {'shape': np.array([64, 3, 3, 3]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
|
||||
'conv_node': {'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
|
||||
'conv_pad': np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
|
||||
'dilation': np.array([1, 1, 1, 1]), 'bias_addable': True, 'bias_term': False,
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'stride': np.array([1, 1, 1, 1]), 'group': 1,
|
||||
'kernel_spatial_idx': np.array([2, 3]),
|
||||
'input_feature_channel': 1,
|
||||
'output_feature_channel': 0,
|
||||
'output': 64, 'kernel_spatial': np.array([3, 3]),
|
||||
'spatial_dims': np.array([2, 3]), 'channel_dims': np.array([1]),
|
||||
'batch_dims': np.array([0])}
|
||||
})
|
||||
|
||||
conv_node = Node(graph, 'conv_node')
|
||||
Convolution.infer(conv_node)
|
||||
exp_shape = np.array([1, 64, 225, 225])
|
||||
res_shape = graph.node['conv_output']['shape']
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
def test_caffe_conv2d_infer_no_shape(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')],
|
||||
{'conv_output': {'is_output': True, 'shape': None},
|
||||
'conv_input': {'shape': None},
|
||||
'conv_weights': {'shape': None,
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
|
||||
'conv_node': {'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
|
||||
'conv_pad': np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
|
||||
'dilation': np.array([1, 1, 1, 1]), 'bias_addable': True, 'bias_term': False,
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'stride': np.array([1, 1, 1, 1]), 'group': 1,
|
||||
'output': 64, 'kernel_spatial': np.array([3, 3]),
|
||||
'spatial_dims': np.array([2, 3]), 'channel_dims': np.array([1]),
|
||||
'batch_dims': np.array([0])}
|
||||
})
|
||||
|
||||
conv_node = Node(graph, 'conv_node')
|
||||
Convolution.infer(conv_node)
|
||||
res_shape = graph.node['conv_output']['shape']
|
||||
self.assertIsNone(res_shape)
|
||||
|
||||
def test_deconv_infer_ideal(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')],
|
||||
{'conv_output': {'is_output': True, 'shape': None},
|
||||
'conv_input': {'shape': np.array([1, 21, 16, 16])},
|
||||
'conv_weights': {'shape': np.array([1, 21, 4, 4]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
|
||||
'conv_node': {#'spatial_dims': np.array([2, 3]), 'batch_dims': np.array([0]),
|
||||
'channel_dims': np.array([1]), 'bias_addable': True, 'bias_term': False,
|
||||
'batch_dims': np.array([0]),
|
||||
'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
|
||||
'kernel_spatial': np.array([4, 4]), 'output_spatial_shape': None,
|
||||
'kernel_spatial_idx': np.array([2, 3]),
|
||||
'input_feature_channel': 1,
|
||||
'output_feature_channel': 0,
|
||||
'output_padding': np.array([0, 0, 1, 1]),
|
||||
'type': 'Deconvolution', 'output': 21, 'dilation': np.array([1, 1, 1, 1]),
|
||||
'group': 1, 'stride': np.array([1, 1, 2, 2]), 'output_shape': None}
|
||||
})
|
||||
|
||||
deconv_node = Node(graph, 'conv_node')
|
||||
|
||||
Convolution.infer(deconv_node)
|
||||
res_shape = deconv_node['output_shape']
|
||||
exp_shape = np.array([1, 21, 35, 35])
|
||||
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
# Check that after double infer shape and pad attrs do not changes
|
||||
Convolution.infer(deconv_node)
|
||||
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
|
||||
def test_deconv_infer_no_shape(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')],
|
||||
{'conv_output': {'is_output': True, 'shape': None},
|
||||
'conv_input': {'shape': None},
|
||||
'conv_weights': {'shape': np.array([1, 21, 16, 16]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
|
||||
'conv_node': {'spatial_dims': np.array([2, 3]), 'batch_dims': np.array([0]),
|
||||
'channel_dims': np.array([1]),
|
||||
'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
|
||||
'kernel_spatial': np.array([4, 4]), 'output_spatial_shape': None,
|
||||
'kernel_spatial_idx': np.array([2, 3]),
|
||||
'input_feature_channel': 1,
|
||||
'output_feature_channel': 0,
|
||||
'type': 'Deconvolution', 'output': 21, 'dilation': np.array([1, 1, 1, 1]),
|
||||
'group': 1, 'stride': np.array([1, 1, 2, 2]), 'output_shape': None}
|
||||
})
|
||||
|
||||
deconv_node = Node(graph, 'conv_node')
|
||||
Convolution.infer(deconv_node)
|
||||
res_shape = deconv_node['output_shape']
|
||||
self.assertIsNone(res_shape)
|
||||
|
||||
def test_conv_infer_set_default_attrs_nchw(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[
|
||||
('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')
|
||||
],
|
||||
{
|
||||
'conv_output': {
|
||||
'is_output': True,
|
||||
'shape': None
|
||||
},
|
||||
'conv_input': {
|
||||
'shape': int64_array([1, 3, 224, 224])
|
||||
},
|
||||
'conv_weights': {
|
||||
'shape': int64_array([3, 64, 7, 7]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
|
||||
},
|
||||
'conv_node': {
|
||||
'type': 'Convolution',
|
||||
'bias_term': None,
|
||||
'stride': None,
|
||||
'dilation': None,
|
||||
|
||||
'batch_dims': int64_array([0]),
|
||||
'channel_dims': int64_array([1]),
|
||||
|
||||
'output_spatial_shape': None,
|
||||
|
||||
'input_feature_channel': 0,
|
||||
'output_feature_channel': 1,
|
||||
|
||||
'group': 1,
|
||||
'output_shape': None,
|
||||
'layout': 'NCHW'
|
||||
}
|
||||
})
|
||||
|
||||
conv_node = Node(graph, 'conv_node')
|
||||
conv_output = Node(graph, 'conv_output')
|
||||
|
||||
Convolution.infer(conv_node)
|
||||
|
||||
# Check bias_term attribute
|
||||
self.assertTrue(conv_node.has_valid('bias_term'))
|
||||
self.assertTrue(not conv_node.bias_term)
|
||||
# Check kernel_spatial_idx attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial_idx'))
|
||||
self.assertTrue(np.array_equal(int64_array([2, 3]), conv_node.kernel_spatial_idx))
|
||||
# Check spatial_dims attr detection
|
||||
self.assertTrue(conv_node.has_valid('spatial_dims'))
|
||||
self.assertTrue(np.array_equal(int64_array([2, 3]), conv_node.spatial_dims))
|
||||
# Check kernel_spatial attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial'))
|
||||
self.assertTrue(np.array_equal(int64_array([7, 7]), conv_node.kernel_spatial))
|
||||
# Check output attribute
|
||||
self.assertTrue(conv_node.has_valid('output'))
|
||||
self.assertEqual(64, conv_node.output)
|
||||
# Check dilation value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('dilation'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1]), conv_node.dilation))
|
||||
# Check stride value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('stride'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1]), conv_node.stride))
|
||||
# Check pad value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('pad'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0], [0, 0], [0, 0]]), conv_node.pad))
|
||||
# Check pad_spatial_shape
|
||||
self.assertTrue(conv_node.has_valid('pad_spatial_shape'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0]]), conv_node.pad_spatial_shape))
|
||||
# Check resulting output shape
|
||||
self.assertTrue(np.array_equal(int64_array([1, 64, 218, 218]), conv_output.shape))
|
||||
|
||||
def test_conv_infer_set_default_attrs_nhwc(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[
|
||||
('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')
|
||||
],
|
||||
{
|
||||
'conv_output': {
|
||||
'is_output': True,
|
||||
'shape': None
|
||||
},
|
||||
'conv_input': {
|
||||
'shape': int64_array([1, 224, 224, 3])
|
||||
},
|
||||
'conv_weights': {
|
||||
'shape': int64_array([3, 64, 7, 7]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
|
||||
},
|
||||
'conv_node': {
|
||||
'type': 'Convolution',
|
||||
'bias_term': None,
|
||||
'stride': None,
|
||||
'dilation': None,
|
||||
|
||||
'batch_dims': int64_array([0]),
|
||||
'channel_dims': int64_array([3]),
|
||||
|
||||
'output_spatial_shape': None,
|
||||
|
||||
'input_feature_channel': 0,
|
||||
'output_feature_channel': 1,
|
||||
|
||||
'group': 1,
|
||||
'output_shape': None,
|
||||
'layout': 'NHWC'
|
||||
}
|
||||
})
|
||||
|
||||
conv_node = Node(graph, 'conv_node')
|
||||
conv_output = Node(graph, 'conv_output')
|
||||
|
||||
Convolution.infer(conv_node)
|
||||
|
||||
# Check bias_term attribute
|
||||
self.assertTrue(conv_node.has_valid('bias_term'))
|
||||
self.assertTrue(not conv_node.bias_term)
|
||||
# Check kernel_spatial_idx attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial_idx'))
|
||||
self.assertTrue(np.array_equal(int64_array([2, 3]), conv_node.kernel_spatial_idx))
|
||||
# Check spatial_dims attr detection
|
||||
self.assertTrue(conv_node.has_valid('spatial_dims'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 2]), conv_node.spatial_dims))
|
||||
# Check kernel_spatial attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial'))
|
||||
self.assertTrue(np.array_equal(int64_array([7, 7]), conv_node.kernel_spatial))
|
||||
# Check output attribute
|
||||
self.assertTrue(conv_node.has_valid('output'))
|
||||
self.assertEqual(64, conv_node.output)
|
||||
# Check dilation value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('dilation'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1]), conv_node.dilation))
|
||||
# Check stride value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('stride'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1]), conv_node.stride))
|
||||
# Check pad value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('pad'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0], [0, 0], [0, 0]]), conv_node.pad))
|
||||
# Check pad_spatial_shape
|
||||
self.assertTrue(conv_node.has_valid('pad_spatial_shape'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0]]), conv_node.pad_spatial_shape))
|
||||
# Check resulting output shape
|
||||
self.assertTrue(np.array_equal(int64_array([1, 218, 218, 64]), conv_output.shape))
|
||||
|
||||
def test_conv_infer_3D_convolution(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[
|
||||
('conv_input', 'conv_node'),
|
||||
('conv_weights', 'conv_node'),
|
||||
('conv_node', 'conv_output')
|
||||
],
|
||||
{
|
||||
'conv_output': {
|
||||
'is_output': True,
|
||||
'shape': None
|
||||
},
|
||||
'conv_input': {
|
||||
'shape': int64_array([1, 3, 16, 224, 224])
|
||||
},
|
||||
'conv_weights': {
|
||||
'shape': int64_array([3, 64, 1, 7, 7]),
|
||||
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
|
||||
},
|
||||
'conv_node': {
|
||||
'type': 'Convolution',
|
||||
'bias_term': None,
|
||||
'stride': None,
|
||||
'dilation': None,
|
||||
|
||||
'batch_dims': int64_array([0]),
|
||||
'channel_dims': int64_array([1]),
|
||||
|
||||
'output_spatial_shape': None,
|
||||
|
||||
'input_feature_channel': 0,
|
||||
'output_feature_channel': 1,
|
||||
|
||||
'group': 1,
|
||||
'output_shape': None,
|
||||
'layout': 'NCHW'
|
||||
}
|
||||
})
|
||||
|
||||
conv_node = Node(graph, 'conv_node')
|
||||
conv_output = Node(graph, 'conv_output')
|
||||
|
||||
Convolution.infer(conv_node)
|
||||
|
||||
# Check bias_term attribute
|
||||
self.assertTrue(conv_node.has_valid('bias_term'))
|
||||
self.assertTrue(not conv_node.bias_term)
|
||||
# Check kernel_spatial_idx attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial_idx'))
|
||||
self.assertTrue(np.array_equal(int64_array([2, 3, 4]), conv_node.kernel_spatial_idx))
|
||||
# Check spatial_dims attr detection
|
||||
self.assertTrue(conv_node.has_valid('spatial_dims'))
|
||||
self.assertTrue(np.array_equal(int64_array([2, 3, 4]), conv_node.spatial_dims))
|
||||
# Check kernel_spatial attr detection
|
||||
self.assertTrue(conv_node.has_valid('kernel_spatial'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 7, 7]), conv_node.kernel_spatial))
|
||||
# Check output attribute
|
||||
self.assertTrue(conv_node.has_valid('output'))
|
||||
self.assertEqual(64, conv_node.output)
|
||||
# Check dilation value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('dilation'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1, 1]), conv_node.dilation))
|
||||
# Check stride value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('stride'))
|
||||
self.assertTrue(np.array_equal(int64_array([1, 1, 1, 1, 1]), conv_node.stride))
|
||||
# Check pad value. Should be set to default
|
||||
self.assertTrue(conv_node.has_valid('pad'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]), conv_node.pad))
|
||||
# Check pad_spatial_shape
|
||||
self.assertTrue(conv_node.has_valid('pad_spatial_shape'))
|
||||
self.assertTrue(np.array_equal(int64_array([[0, 0], [0, 0], [0, 0]]), conv_node.pad_spatial_shape))
|
||||
# Check resulting output shape
|
||||
self.assertTrue(np.array_equal(int64_array([1, 64, 16, 218, 218]), conv_output.shape))
|
||||
187
model-optimizer/mo/ops/crop_test.py
Normal file
187
model-optimizer/mo/ops/crop_test.py
Normal file
@@ -0,0 +1,187 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.front.common.partial_infer.utils import int64_array
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.crop import Crop
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestCropPartialInfer(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _create_graph_type1():
|
||||
nodes_attributes = {'crop_input': {'shape': None, 'value': None, 'kind': 'data'},
|
||||
'crop_node': {'type': 'Crop', 'kind': 'op'},
|
||||
'crop_output': {'shape': None, 'value': None, 'kind': 'data'}
|
||||
}
|
||||
return build_graph(nodes_attributes,
|
||||
[
|
||||
('crop_input', 'crop_node'), ('crop_node', 'crop_output')
|
||||
],
|
||||
{
|
||||
'crop_input': {'shape': int64_array([1, 3, 224, 224])},
|
||||
'crop_node': {'axis': int64_array([2, 3]),
|
||||
'crop_begin': int64_array([10, 15]),
|
||||
'crop_end': int64_array([10, 15])
|
||||
},
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def _create_graph_type2():
|
||||
nodes_attributes = {'crop_input': {'shape': None, 'value': None, 'kind': 'data'},
|
||||
'crop_node': {'type': 'Crop', 'kind': 'op'},
|
||||
'crop_output': {'shape': None, 'value': None, 'kind': 'data'}
|
||||
}
|
||||
return build_graph(nodes_attributes,
|
||||
[
|
||||
('crop_input', 'crop_node'), ('crop_node', 'crop_output')
|
||||
],
|
||||
{
|
||||
'crop_input': {'shape': int64_array([1, 3, 224, 224])},
|
||||
'crop_node': {'axis': int64_array([2, 3]), 'dim': int64_array([100, 150])},
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def _create_graph_type3():
|
||||
nodes_attributes = {'crop_input': {'shape': None, 'value': None, 'kind': 'data'},
|
||||
'crop_input2': {'shape': None, 'value': None, 'kind': 'data'},
|
||||
'crop_node': {'type': 'Crop', 'kind': 'op'},
|
||||
'crop_output': {'shape': None, 'value': None, 'kind': 'data'}
|
||||
}
|
||||
return build_graph(nodes_attributes,
|
||||
[
|
||||
('crop_input', 'crop_node'), ('crop_input2', 'crop_node'), ('crop_node', 'crop_output')
|
||||
],
|
||||
{
|
||||
'crop_input': {'shape': int64_array([1, 3, 224, 224])},
|
||||
'crop_input2': {'shape': int64_array([1, 3, 100, 150])},
|
||||
'crop_node': {'axis': 2, 'offset': int64_array([10, 15])},
|
||||
})
|
||||
|
||||
def test_crop_type1_infer(self):
|
||||
graph = self._create_graph_type1()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
Crop.infer(crop_node)
|
||||
|
||||
exp_shape = int64_array([1, 3, 204, 194])
|
||||
res_shape = graph.node['crop_output']['shape']
|
||||
|
||||
self.assertTrue(np.array_equal(exp_shape, res_shape),
|
||||
'shapes do not match expected: {} and given: {}'.format(exp_shape, res_shape))
|
||||
|
||||
def test_crop_type1_infer_neg1(self):
|
||||
graph = self._create_graph_type1()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['axis'] = None
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type1_infer_neg2(self):
|
||||
graph = self._create_graph_type1()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['crop_begin'] = int64_array([1, 2, 3])
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type2_infer(self):
|
||||
graph = self._create_graph_type2()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
Crop.infer(crop_node)
|
||||
|
||||
exp_shape = int64_array([1, 3, 100, 150])
|
||||
res_shape = graph.node['crop_output']['shape']
|
||||
|
||||
self.assertTrue(np.array_equal(exp_shape, res_shape),
|
||||
'shapes do not match expected: {} and given: {}'.format(exp_shape, res_shape))
|
||||
|
||||
def test_crop_type2_infer_neg1(self):
|
||||
graph = self._create_graph_type2()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['dim'] = int64_array([1, 2, 3])
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type2_infer_neg2(self):
|
||||
graph = self._create_graph_type2()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['dim'] = None
|
||||
crop_node['crop_begin'] = None
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type3_infer(self):
|
||||
graph = self._create_graph_type3()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
Crop.infer(crop_node)
|
||||
|
||||
exp_shape = int64_array([1, 3, 100, 150])
|
||||
res_shape = graph.node['crop_output']['shape']
|
||||
|
||||
self.assertTrue(np.array_equal(exp_shape, res_shape),
|
||||
'shapes do not match expected: {} and given: {}'.format(exp_shape, res_shape))
|
||||
|
||||
def test_crop_type3_infer_neg1(self):
|
||||
graph = self._create_graph_type3()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_input2 = Node(graph, 'crop_input2')
|
||||
crop_input2.shape = None
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type3_infer_neg2(self):
|
||||
graph = self._create_graph_type3()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['axis'] = None
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type3_infer_neg3(self):
|
||||
graph = self._create_graph_type3()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_node['offset'] = None
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
|
||||
def test_crop_type3_infer_neg4(self):
|
||||
graph = self._create_graph_type3()
|
||||
|
||||
crop_node = Node(graph, 'crop_node')
|
||||
crop_input2 = Node(graph, 'crop_input2')
|
||||
crop_input2.shape = int64_array([1, 4, 423, 563])
|
||||
|
||||
Crop.infer(crop_node)
|
||||
self.assertIsNone(crop_node.out_node().shape)
|
||||
65
model-optimizer/mo/ops/flatten_onnx_test.py
Normal file
65
model-optimizer/mo/ops/flatten_onnx_test.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
from generator import generator, generate
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.flatten_onnx import FlattenONNX
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
@generator
|
||||
class TestFlattenONNXOp(unittest.TestCase):
|
||||
# There are tests for InnerProduct.infer in mo/front/common/partial_infer/inner_product_test.py
|
||||
nodes_attributes = {
|
||||
'data_1': {
|
||||
'kind': 'data',
|
||||
'shape': np.array([1, 3, 224, 224])
|
||||
},
|
||||
'flatten': {
|
||||
'type': 'Reshape',
|
||||
'axis': None,
|
||||
'kind': 'op',
|
||||
},
|
||||
'data_2': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
}
|
||||
}
|
||||
|
||||
def _create_graph_with_flatten(self, axis):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[('data_1', 'flatten'),
|
||||
('flatten', 'data_2')],
|
||||
{'flatten': {'axis': axis}})
|
||||
return graph
|
||||
|
||||
@generate(*[(0, [1, 3 * 224 * 224]),
|
||||
(1, [1, 3 * 224 * 224]),
|
||||
(2, [3, 224 * 224]),
|
||||
(3, [3 * 224, 224]),
|
||||
(4, [3 * 224 * 224, 1]),
|
||||
])
|
||||
def test_flatten_infer_1(self, axis, ref):
|
||||
graph = self._create_graph_with_flatten(axis)
|
||||
flatten_node = Node(graph, 'flatten')
|
||||
|
||||
FlattenONNX.infer(flatten_node)
|
||||
|
||||
self.assertTrue(np.array_equal(flatten_node.out_node().shape, np.array(ref)))
|
||||
62
model-optimizer/mo/ops/flatten_test.py
Normal file
62
model-optimizer/mo/ops/flatten_test.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.flatten import Flatten
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
nodes_attributes = {'node_1': {'value': None, 'kind': 'data'},
|
||||
'flatten_1': {'type': 'Flatten', 'value': None, 'kind': 'op'},
|
||||
'node_2': {'value': None, 'kind': 'data'}
|
||||
}
|
||||
|
||||
|
||||
class TestFlattenPartialInfer(unittest.TestCase):
|
||||
def test_flatten_infer(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'flatten_1'),
|
||||
('flatten_1', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': np.array([1, 3 * 256 * 256])},
|
||||
'node_1': {'shape': np.array([1, 3, 256, 256])},
|
||||
'flatten_1': {'axis': 1, 'dim': []}
|
||||
})
|
||||
|
||||
flatten_node = Node(graph, 'flatten_1')
|
||||
|
||||
Flatten.infer(flatten_node)
|
||||
exp_shape = np.array([1, 3 * 256 * 256])
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
def test_flatten_infer_no_shape(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'flatten_1'),
|
||||
('flatten_1', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': None},
|
||||
'node_1': {'shape': None},
|
||||
'flatten_1': {'axis': 1}
|
||||
})
|
||||
|
||||
flatten_node = Node(graph, 'flatten_1')
|
||||
|
||||
Flatten.infer(flatten_node)
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
self.assertIsNone(res_shape)
|
||||
48
model-optimizer/mo/ops/inner_product_test.py
Normal file
48
model-optimizer/mo/ops/inner_product_test.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.front.common.partial_infer.inner_product import caffe_inner_product
|
||||
from mo.ops.inner_product import InnerProduct
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestInnerProductOp(unittest.TestCase):
|
||||
# There are tests for InnerProduct.infer in mo/front/common/partial_infer/inner_product_test.py
|
||||
nodes_attributes = {
|
||||
'node_1': {
|
||||
'shape': np.array([227, 5, 2, 1])
|
||||
},
|
||||
'fc_node': {
|
||||
},
|
||||
'node_3': {
|
||||
'kind': 'data'
|
||||
}
|
||||
}
|
||||
|
||||
def test_concat_op(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[
|
||||
('node_1', 'fc_node'),
|
||||
('fc_node', 'node_3')
|
||||
])
|
||||
fc_node = InnerProduct(graph, self.nodes_attributes['fc_node']).add_node()
|
||||
self.assertEqual(fc_node.type, 'FullyConnected')
|
||||
self.assertEqual(fc_node.op, 'FullyConnected')
|
||||
self.assertEqual(fc_node.infer, caffe_inner_product)
|
||||
95
model-optimizer/mo/ops/pad_test.py
Normal file
95
model-optimizer/mo/ops/pad_test.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.pad import Pad
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestPadONNXOp(unittest.TestCase):
|
||||
# There are tests for InnerProduct.infer in mo/front/common/partial_infer/inner_product_test.py
|
||||
node_attrs = {
|
||||
'data_in': {
|
||||
'kind': 'data',
|
||||
'shape': np.array([1, 3, 100, 200])
|
||||
},
|
||||
# optional input for one of the two flavors of pad op
|
||||
'data_pads': {
|
||||
'kind': 'data',
|
||||
'value': np.array([[0, 0], [0, 0], [1, 3], [2, 4]], dtype=np.int64),
|
||||
'shape': np.array([2, 4], dtype=np.int64)
|
||||
},
|
||||
'pad': {
|
||||
'op': 'Pad',
|
||||
'kind': 'op',
|
||||
'pads': None,
|
||||
},
|
||||
'data_out': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
}
|
||||
}
|
||||
|
||||
edge_attrs = [
|
||||
('data_in', 'pad'),
|
||||
('pad', 'data_out')
|
||||
]
|
||||
|
||||
def test_one_input(self):
|
||||
graph = build_graph(
|
||||
self.node_attrs,
|
||||
self.edge_attrs,
|
||||
{'pad': {'pads': np.array([[0, 0], [0, 0], [1, 3], [2, 4]], dtype=np.int64)}},
|
||||
nodes_with_edges_only=True,
|
||||
)
|
||||
pad_node = Node(graph, 'pad')
|
||||
Pad.infer(pad_node)
|
||||
self.assertTrue(np.array_equal(Node(graph, 'data_out').shape, np.array([1, 3, 100 + 1 + 3, 200 + 2 + 4])))
|
||||
|
||||
def test_two_inputs(self):
|
||||
graph = build_graph(
|
||||
self.node_attrs,
|
||||
self.edge_attrs + [('data_pads', 'pad')],
|
||||
nodes_with_edges_only=True,
|
||||
)
|
||||
pad_node = Node(graph, 'pad')
|
||||
Pad.infer(pad_node)
|
||||
self.assertTrue(np.array_equal(Node(graph, 'data_out').shape, np.array([1, 3, 100 + 1 + 3, 200 + 2 + 4])))
|
||||
|
||||
def test_one_input_and_no_pads(self):
|
||||
graph = build_graph(
|
||||
self.node_attrs,
|
||||
self.edge_attrs,
|
||||
nodes_with_edges_only=True,
|
||||
)
|
||||
pad_node = Node(graph, 'pad')
|
||||
with self.assertRaisesRegex(AssertionError, ".*pads attribute is missing.*"):
|
||||
Pad.infer(pad_node)
|
||||
|
||||
def test_two_inputs_and_pads(self):
|
||||
graph = build_graph(
|
||||
self.node_attrs,
|
||||
self.edge_attrs + [('data_pads', 'pad')],
|
||||
{'pad': {'pads': np.array([[0, 0], [0, 0], [1, 3], [2, 4]], dtype=np.int64)}},
|
||||
nodes_with_edges_only=True,
|
||||
)
|
||||
pad_node = Node(graph, 'pad')
|
||||
with self.assertRaisesRegex(AssertionError, ".*unexpected additional input argument.*"):
|
||||
Pad.infer(pad_node)
|
||||
96
model-optimizer/mo/ops/permute_test.py
Normal file
96
model-optimizer/mo/ops/permute_test.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
from generator import generator, generate
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.permute import Permute
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
@generator
|
||||
class TestPermuteOp(unittest.TestCase):
|
||||
nodes_attributes = {
|
||||
'data_1': {
|
||||
'kind': 'data',
|
||||
'shape': np.array([1, 3, 224, 224])
|
||||
},
|
||||
'transpose': {
|
||||
'type': 'Permute',
|
||||
'order': None,
|
||||
'reverse_order': False,
|
||||
'kind': 'op',
|
||||
},
|
||||
'data_2': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
}
|
||||
}
|
||||
|
||||
def _create_graph_with_transpose(self, order):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[('data_1', 'transpose'),
|
||||
('transpose', 'data_2')],
|
||||
{'transpose': {'order': order}})
|
||||
return graph
|
||||
|
||||
@generate(*[list(order) for order in list(itertools.permutations(np.arange(4)))])
|
||||
def test_transpose_infer_1(self, order):
|
||||
graph = self._create_graph_with_transpose(order)
|
||||
transpose_node = Node(graph, 'transpose')
|
||||
|
||||
Permute.infer(transpose_node)
|
||||
|
||||
ref = [transpose_node.in_node().shape[i] for i in order]
|
||||
self.assertTrue(np.array_equal(transpose_node.out_node().shape, np.array(ref)))
|
||||
|
||||
def test_transpose_infer_2(self):
|
||||
order = None
|
||||
graph = self._create_graph_with_transpose(order)
|
||||
transpose_node = Node(graph, 'transpose')
|
||||
transpose_node['reverse_order'] = True
|
||||
|
||||
Permute.infer(transpose_node)
|
||||
|
||||
ref = np.array([x for x in reversed(transpose_node.in_node().shape)])
|
||||
self.assertTrue(np.array_equal(transpose_node.out_node().shape, ref),
|
||||
"Shapes are not the same: {} and {}".format(transpose_node.out_node().shape, ref))
|
||||
|
||||
def test_transpose_infer_neg_1(self):
|
||||
order = np.array([0, 1, 2, 3])
|
||||
graph = self._create_graph_with_transpose(order)
|
||||
transpose_node = Node(graph, 'transpose')
|
||||
transpose_node['reverse_order'] = True
|
||||
|
||||
Permute.infer(transpose_node)
|
||||
|
||||
ref = None
|
||||
self.assertTrue(transpose_node.out_node().shape is None, "Output shape should be None")
|
||||
|
||||
def test_transpose_infer_neg_2(self):
|
||||
order = None
|
||||
graph = self._create_graph_with_transpose(order)
|
||||
transpose_node = Node(graph, 'transpose')
|
||||
transpose_node['reverse_order'] = False
|
||||
|
||||
Permute.infer(transpose_node)
|
||||
|
||||
ref = None
|
||||
self.assertTrue(transpose_node.out_node().shape is None, "Output shape should be None")
|
||||
122
model-optimizer/mo/ops/pooling_test.py
Normal file
122
model-optimizer/mo/ops/pooling_test.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.pooling import Pooling
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
nodes_attributes = {'node_1': {'value': None, 'kind': 'data'},
|
||||
'pool': {'type': 'Pooling', 'value': None, 'kind': 'op'},
|
||||
'node_2': {'value': None, 'kind': 'data'},
|
||||
}
|
||||
|
||||
|
||||
class TestPoolingPartialInfer(unittest.TestCase):
|
||||
def test_pooling_infer(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'pool'),
|
||||
('pool', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': None},
|
||||
'node_1': {'shape': np.array([1, 3, 256, 256])},
|
||||
'pool': {'window': np.array([1, 1, 1, 1]), 'stride': np.array([1, 1, 2, 2]),
|
||||
'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]]),
|
||||
'pad_spatial_shape': np.array([[3, 3], [3, 3]]),
|
||||
'pool_method': 'avg', 'exclude_pad': 'false', 'global_pool': 0,
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'kernel_spatial': np.array([3, 3]), 'spatial_dims': np.array([2, 3]),
|
||||
'channel_dims': np.array([1]), 'batch_dims': np.array([0]),
|
||||
'pooling_convention': 'full'}
|
||||
})
|
||||
|
||||
pool_node = Node(graph, 'pool')
|
||||
|
||||
Pooling.infer(pool_node)
|
||||
exp_shape = np.array([1, 3, 131, 131])
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
def test_pooling_infer_decrement_input_spatial(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'pool'),
|
||||
('pool', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': None},
|
||||
'node_1': {'shape': np.array([1, 3, 224, 224])},
|
||||
'pool': {'window': np.array([1, 1, 1, 1]), 'stride': np.array([1, 1, 3, 3]),
|
||||
'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]]),
|
||||
'pad_spatial_shape': np.array([[1, 1], [1, 1]]),
|
||||
'pool_method': 'avg', 'exclude_pad': 'false', 'global_pool': 0,
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'kernel_spatial': np.array([3, 3]), 'spatial_dims': np.array([2, 3]),
|
||||
'channel_dims': np.array([1]), 'batch_dims': np.array([0]),
|
||||
'pooling_convention': 'full'}
|
||||
})
|
||||
|
||||
pool_node = Node(graph, 'pool')
|
||||
|
||||
Pooling.infer(pool_node)
|
||||
exp_shape = np.array([1, 3, 75, 75])
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
def test_pooling_infer_no_convention(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'pool'),
|
||||
('pool', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': None},
|
||||
'node_1': {'shape': np.array([1, 3, 256, 256])},
|
||||
'pool': {'window': np.array([1, 1, 1, 1]), 'stride': np.array([1, 1, 2, 2]),
|
||||
'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]]),
|
||||
'pad_spatial_shape': np.array([[3, 3], [3, 3]]),
|
||||
'pool_method': 'avg', 'exclude_pad': 'false', 'global_pool': 0,
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'kernel_spatial': np.array([3, 3]), 'spatial_dims': np.array([2, 3]),
|
||||
'channel_dims': np.array([1]), 'batch_dims': np.array([0])}
|
||||
})
|
||||
|
||||
pool_node = Node(graph, 'pool')
|
||||
|
||||
Pooling.infer(pool_node)
|
||||
exp_shape = np.array([1, 3, 130, 130])
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
for i in range(0, len(exp_shape)):
|
||||
self.assertEqual(exp_shape[i], res_shape[i])
|
||||
|
||||
def test_pooling_infer_no_shape(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('node_1', 'pool'),
|
||||
('pool', 'node_2')],
|
||||
{'node_2': {'is_output': True, 'shape': None},
|
||||
'node_1': {'shape': None},
|
||||
'pool': {'window': np.array([1, 1, 1, 1]), 'stride': np.array([1, 1, 2, 2]),
|
||||
'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]]),
|
||||
'pad_spatial_shape': np.array([[3, 3], [3, 3]]),
|
||||
'pool_method': 'avg', 'exclude_pad': 'false',
|
||||
'output_spatial_shape': None, 'output_shape': None,
|
||||
'kernel_spatial': np.array([3, 3]), 'spatial_dims': np.array([2, 3]),
|
||||
'channel_dims': np.array([1]), 'batch_dims': np.array([0]),
|
||||
'pooling_convention': 'full'}
|
||||
})
|
||||
|
||||
pool_node = Node(graph, 'pool')
|
||||
Pooling.infer(pool_node)
|
||||
res_shape = graph.node['node_2']['shape']
|
||||
self.assertIsNone(res_shape)
|
||||
100
model-optimizer/mo/ops/power_test.py
Normal file
100
model-optimizer/mo/ops/power_test.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.power import Power
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
|
||||
class TestPowerOp(unittest.TestCase):
|
||||
@staticmethod
|
||||
def create_graph(single_input=True):
|
||||
nodes_attributes = {
|
||||
'input1': {
|
||||
'kind': 'data',
|
||||
'shape': np.array([1, 3, 224, 224]),
|
||||
'value': None,
|
||||
},
|
||||
'input2': {
|
||||
'kind': 'data',
|
||||
'shape': np.array([]),
|
||||
'value': np.array(1.0),
|
||||
},
|
||||
'power': {
|
||||
'kind': 'op',
|
||||
'shape': np.array([1, 3, 224, 224]),
|
||||
},
|
||||
'power_data': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
},
|
||||
}
|
||||
if single_input:
|
||||
return build_graph(nodes_attributes,
|
||||
[
|
||||
('input1', 'power'),
|
||||
('power', 'power_data')
|
||||
])
|
||||
else:
|
||||
return build_graph(nodes_attributes,
|
||||
[
|
||||
('input1', 'power'),
|
||||
('input2', 'power'),
|
||||
('power', 'power_data')
|
||||
])
|
||||
|
||||
def test_power_single_input_infer1(self):
|
||||
graph = self.create_graph(single_input=True)
|
||||
graph.graph['layout'] = 'NCHW'
|
||||
power_node = Node(graph, 'power')
|
||||
power_node['power'] = 1.0
|
||||
|
||||
Power.infer(power_node)
|
||||
|
||||
self.assertTrue(np.array_equal(power_node.out_node().shape, power_node.in_node(0).shape))
|
||||
|
||||
def test_power_two_input_infer1(self):
|
||||
graph = self.create_graph(single_input=False)
|
||||
graph.graph['layout'] = 'NCHW'
|
||||
power_node = Node(graph, 'power')
|
||||
|
||||
Power.infer(power_node)
|
||||
|
||||
self.assertTrue(np.array_equal(power_node.out_node().shape, power_node.in_node(0).shape))
|
||||
|
||||
def test_power_two_input_infer2(self):
|
||||
graph = self.create_graph(single_input=False)
|
||||
power_node = Node(graph, 'power')
|
||||
input2 = Node(graph, 'input2')
|
||||
input2.value = np.ones((1, 2, 3))
|
||||
|
||||
Power.infer(power_node)
|
||||
|
||||
self.assertIsNone(power_node.out_node().shape)
|
||||
|
||||
def test_power_two_input_infer3(self):
|
||||
graph = self.create_graph(single_input=False)
|
||||
power_node = Node(graph, 'power')
|
||||
input2 = Node(graph, 'input2')
|
||||
input2.value = None
|
||||
|
||||
Power.infer(power_node)
|
||||
|
||||
self.assertIsNone(power_node.out_node().shape)
|
||||
117
model-optimizer/mo/ops/slice_test.py
Normal file
117
model-optimizer/mo/ops/slice_test.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
from generator import generator
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.slice import Slice
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
nodes_attributes = {
|
||||
'data_1': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
},
|
||||
'begin': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
},
|
||||
'size': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
},
|
||||
'slice': {
|
||||
'op': 'Slice',
|
||||
'axis': None,
|
||||
'start': None,
|
||||
'end': None,
|
||||
'kind': 'op',
|
||||
},
|
||||
'data_2': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@generator
|
||||
class TestSliceOp(unittest.TestCase):
|
||||
def test_slice_infer_constant(self):
|
||||
# Testing constant path case
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data_1', 'slice'),
|
||||
('begin', 'slice'),
|
||||
('size', 'slice'),
|
||||
('slice', 'data_2')],
|
||||
{'data_1': {'shape': np.array([4]), 'value': np.array([1, 3, 224, 224])},
|
||||
'slice': {'start': np.array([1]), 'end': np.array([2])},
|
||||
'size': {'value': np.array([1])},
|
||||
'begin': {'value': np.array([1])}})
|
||||
|
||||
slice_node = Node(graph, 'slice')
|
||||
Slice.infer(slice_node)
|
||||
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().value, np.array([3])))
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([1])))
|
||||
self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 2, 1)])))
|
||||
|
||||
def test_slice_infer_non_constant(self):
|
||||
# Testing non-constant path case (when value in input is None)
|
||||
# with multiply params
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data_1', 'slice'),
|
||||
('begin', 'slice'),
|
||||
('size', 'slice'),
|
||||
('slice', 'data_2')],
|
||||
{'data_1': {'shape': np.array([4, 5, 6])},
|
||||
'slice': {'start': np.array([1, 2]),
|
||||
'end': np.array([4, 3])},
|
||||
'size': {'value': np.array([3, 1])},
|
||||
'begin': {'value': np.array([1, 2])}})
|
||||
|
||||
slice_node = Node(graph, 'slice')
|
||||
|
||||
Slice.infer(slice_node)
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().value, None))
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([3, 1, 6])))
|
||||
self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 4, 1), slice(2, 3, 1), slice(0, 6, 1)])))
|
||||
|
||||
def test_slice_infer_multiply_params(self):
|
||||
# Test case when size[i] == -1 (that means all
|
||||
# remaining elements in dimension i are included in the slice)
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data_1', 'slice'),
|
||||
('begin', 'slice'),
|
||||
('size', 'slice'),
|
||||
('slice', 'data_2')],
|
||||
{'data_1': {'shape': np.array([4, 5, 6])},
|
||||
'slice': {'start': np.array([1, 2]),
|
||||
'end': np.array([4, 1])},
|
||||
'size': {'value': np.array([3, -1])},
|
||||
'begin': {'value': np.array([1, 2])}})
|
||||
|
||||
slice_node = Node(graph, 'slice')
|
||||
|
||||
Slice.infer(slice_node)
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().value, None))
|
||||
self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([3, 3, 6])))
|
||||
self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 4, 1), slice(2, 5, 1), slice(0, 6, 1)])))
|
||||
196
model-optimizer/mo/ops/tile_test.py
Normal file
196
model-optimizer/mo/ops/tile_test.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.tile import Tile
|
||||
from mo.utils.unittest.graph import build_graph
|
||||
|
||||
nodes_attributes = {'data': {'value': None, 'shape': np.array([10, 20, 30, 40]), 'kind': 'data'},
|
||||
'tile_values': {'value': None, 'shape': np.array([4]), 'kind': 'data'},
|
||||
'tile': {'type': 'Tile', 'kind': 'op'},
|
||||
'tile_out': {'value': None, 'shape': None, 'kind': 'data'},
|
||||
}
|
||||
|
||||
|
||||
class TestTileInfer(unittest.TestCase):
|
||||
def test_tile_infer_correct(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([7, 1, 1, 1])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.array([70, 20, 30, 40]) == graph.node['tile_out']['shape']))
|
||||
self.assertEqual(tile_node.axis, 0)
|
||||
self.assertEqual(tile_node.tiles, 7)
|
||||
|
||||
def test_tile_infer_correct_2(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([1, 7, 1, 1])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.array([10, 140, 30, 40]) == graph.node['tile_out']['shape']))
|
||||
self.assertEqual(tile_node.axis, 1)
|
||||
self.assertEqual(tile_node.tiles, 7)
|
||||
|
||||
def test_tile_infer_correct_2d_tensor(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'data': {'shape': np.array([3, 7])},
|
||||
'tile_values': {'value': np.array([5, 1])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.array([15, 7]) == graph.node['tile_out']['shape']))
|
||||
self.assertEqual(tile_node.axis, 0)
|
||||
self.assertEqual(tile_node.tiles, 5)
|
||||
|
||||
def test_tile_infer_all_ones(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([1, 1, 1, 1])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.array([10, 20, 30, 40]) == graph.node['tile_out']['shape']))
|
||||
self.assertEqual(tile_node.axis, 0)
|
||||
self.assertEqual(tile_node.tiles, 1)
|
||||
|
||||
def test_tile_infer_two_non_one(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([2, 1, 1, 2])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile']['type'])
|
||||
self.assertTrue(np.all(np.array([20, 20, 30, 80]) == graph.node['tile_out']['shape']))
|
||||
self.assertFalse(tile_node.has_and_set('axis'))
|
||||
self.assertFalse(tile_node.has_and_set('tiles'))
|
||||
|
||||
def test_tile_infer_three_non_one(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([2, 1, 5, 2])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile']['type'])
|
||||
self.assertTrue(np.all(np.array([20, 20, 150, 80]) == graph.node['tile_out']['shape']))
|
||||
|
||||
def test_tile_infer_none_input_shape(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'data': {'shape': None},
|
||||
'tile_values': {'value': np.array([1, 7, 1, 1])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile_out']['shape'])
|
||||
|
||||
def test_tile_infer_values_test(self):
|
||||
input_data = np.arange(-30, 60, 0.25).reshape([2, 4, 3, -1])
|
||||
tile_values = np.array([3, 1, 1, 1])
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'data': {'shape': input_data.shape, 'value': input_data},
|
||||
'tile_values': {'value': tile_values}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.tile(input_data, tile_values) == graph.node['tile_out']['value']))
|
||||
self.assertEqual(tile_node.axis, 0)
|
||||
self.assertEqual(tile_node.tiles, 3)
|
||||
|
||||
def test_tile_infer_values_const_propagation(self):
|
||||
"""
|
||||
Test for constant propagation even if tile with multiple tile indices is not supported
|
||||
"""
|
||||
input_data = np.arange(-30, 60, 0.25).reshape([2, 4, 3, -1])
|
||||
tile_values = np.array([4, 3, 2, 5])
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'data': {'shape': input_data.shape, 'value': input_data},
|
||||
'tile_values': {'value': tile_values}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.tile(input_data, tile_values) == graph.node['tile_out']['value']))
|
||||
self.assertIsNone(tile_node.type)
|
||||
|
||||
def test_tile_infer_undefined_tile_values(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': None}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile_out']['shape'])
|
||||
|
||||
def test_tile_infer_shapes_mismatch(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile_values', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile_values': {'value': np.array([1, 2, 1]), 'shape': np.array([3])}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile_out']['shape'])
|
||||
|
||||
def test_tile_infer_one_input_correct(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile': {'axis': 1, 'tiles': 7}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertTrue(np.all(np.array([10, 140, 30, 40]) == graph.node['tile_out']['shape']))
|
||||
self.assertEqual(tile_node.axis, 1)
|
||||
self.assertEqual(tile_node.tiles, 7)
|
||||
|
||||
def test_tile_infer_one_input_correct_missing_axis(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile': {'tiles': 7}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile_out']['shape'])
|
||||
|
||||
def test_tile_infer_one_input_correct_missing_tiles(self):
|
||||
graph = build_graph(nodes_attributes,
|
||||
[('data', 'tile'),
|
||||
('tile', 'tile_out')],
|
||||
{'tile': {'axis': 1}})
|
||||
tile_node = Node(graph, 'tile')
|
||||
Tile.infer(tile_node)
|
||||
self.assertIsNone(graph.node['tile_out']['shape'])
|
||||
67
model-optimizer/mo/ops/unsqueeze_test.py
Normal file
67
model-optimizer/mo/ops/unsqueeze_test.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""
|
||||
Copyright (c) 2018 Intel Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
from generator import generator
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.ops.unsqueeze import Unsqueeze
|
||||
from mo.utils.unittest.graph import build_graph, compare_graphs
|
||||
|
||||
|
||||
@generator
|
||||
class TestUnsqueezeOp(unittest.TestCase):
|
||||
nodes_attributes = {
|
||||
'data_1': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
},
|
||||
'unsq': {
|
||||
'op': 'Unsqueeze',
|
||||
'kind': 'op',
|
||||
'unsqueeze_dims': None,
|
||||
},
|
||||
'data_2': {
|
||||
'kind': 'data',
|
||||
'shape': None,
|
||||
'value': None,
|
||||
}
|
||||
}
|
||||
|
||||
def test_unsqueeze_infer(self):
|
||||
graph = build_graph(self.nodes_attributes,
|
||||
[('data_1', 'unsq'),
|
||||
('unsq', 'data_2')],
|
||||
{'data_1': {'shape': np.array([1, 3, 64, 64])},
|
||||
'unsq': {'unsqueeze_dims': np.array([0, 4])}
|
||||
})
|
||||
|
||||
graph_ref = build_graph(self.nodes_attributes,
|
||||
[('data_1', 'unsq'),
|
||||
('unsq', 'data_2')],
|
||||
{'data_1': {'shape': np.array([1, 3, 64, 64])},
|
||||
'unsq': {'unsqueeze_dims': np.array([0, 4])},
|
||||
'data_2': {'shape': np.array([1, 1, 3, 64, 1, 64])}
|
||||
})
|
||||
|
||||
unsqueeze_node = Node(graph, 'unsq')
|
||||
Unsqueeze.infer(unsqueeze_node)
|
||||
|
||||
(flag, resp) = compare_graphs(graph, graph_ref, 'data_2')
|
||||
self.assertTrue(flag, resp)
|
||||
Reference in New Issue
Block a user