[MO][Opset13] Multinomial-13 support in MO IR Reader (#20446)

* Add Multinomial-13 to MO

* Add Multinomial tests for MO IR reader

* Move convert_type check

* Imports clean up

* Update pacgage BOM file

* Avoid files collision in tests
This commit is contained in:
Katarzyna Mitrus
2023-10-17 17:50:42 +02:00
committed by GitHub
parent f78e206ecb
commit 33a97740b2
3 changed files with 162 additions and 27 deletions

View File

@@ -928,6 +928,7 @@ openvino/tools/mo/ops/MatMul.py
openvino/tools/mo/ops/memory.py
openvino/tools/mo/ops/memoryoffset.py
openvino/tools/mo/ops/merge.py
openvino/tools/mo/ops/multinomial.py
openvino/tools/mo/ops/mvn.py
openvino/tools/mo/ops/mxfft.py
openvino/tools/mo/ops/mxrepeat.py
@@ -1106,4 +1107,4 @@ openvino/tools/mo/utils/tensorboard_util.py
openvino/tools/mo/utils/type_utils.py
openvino/tools/mo/utils/unsupported_ops.py
openvino/tools/mo/utils/utils.py
openvino/tools/mo/utils/version.py
openvino/tools/mo/utils/version.py

View File

@@ -0,0 +1,69 @@
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension
from openvino.tools.mo.front.extractor import bool_to_str
from openvino.tools.mo.graph.graph import Graph, Node
from openvino.tools.mo.ops.op import Op
class Multinomial(Op):
op = 'Multinomial'
enabled = False
def __init__(self, graph: Graph, attrs: dict):
super().__init__(graph, {
'type': self.op,
'op': self.op,
'version': 'opset13',
'infer': self.infer,
'in_ports_count': 2,
'out_ports_count': 1,
'type_infer': self.type_infer,
'with_replacement': False,
'log_probs': False,
'global_seed': 0,
'op_seed': 0,
'convert_type': np.int64,
}, attrs)
def backend_attrs(self):
return ['convert_type',
('with_replacement', lambda node: bool_to_str(
node, 'with_replacement')),
('log_probs', lambda node: bool_to_str(node, 'log_probs')),
'global_seed',
'op_seed']
def supported_attrs(self):
return ['convert_type',
'with_replacement',
'log_probs',
'global_seed',
'op_seed']
@staticmethod
def type_infer(node: Node):
assert node.has_valid('convert_type')
if node['convert_type'] == 'i32':
node.out_port(0).set_data_type(np.int32)
else:
node.out_port(0).set_data_type(np.int64)
@staticmethod
def infer(node: Node):
input_shape = node.in_node(0).shape
output_shape = []
if input_shape is not None and input_shape.size == 2:
output_shape.append(input_shape[0])
num_samples = node.in_port(1).data.get_value()
if num_samples is not None:
output_shape.append(num_samples)
else:
output_shape.append(dynamic_dimension)
node.out_port(0).data.set_shape(output_shape)

View File

@@ -10,7 +10,7 @@ import openvino.runtime.opset13 as opset13
import openvino.runtime.opset12 as opset12
import openvino.runtime.opset11 as opset11
import openvino.runtime.opset10 as opset10
from openvino.runtime import Model, serialize, Core, PartialShape, Dimension
from openvino.runtime import Model, serialize, Core, PartialShape, Dimension, Type
from openvino.tools.mo.utils.ir_reader.restore_graph import restore_graph_from_ir, save_restored_graph
from openvino.tools.mo.utils.logger import init_logger
@@ -23,16 +23,22 @@ class TestOps(unittest.TestCase):
@staticmethod
def check_graph_can_save(model, name):
with tempfile.TemporaryDirectory() as tmp:
model_xml = Path(tmp) / (name + '.xml')
model_bin = Path(tmp) / (name + '.bin')
tmp_path = Path(tmp)
model_xml = tmp_path / (name + '.xml')
model_bin = tmp_path / (name + '.bin')
serialize(model, model_xml, model_bin)
graph, _ = restore_graph_from_ir(model_xml, model_bin)
save_restored_graph(graph, tmp, {}, name)
save_restored_graph(graph, tmp, {}, name + '_restored')
# restore 2 times to validate that after save graph doesn't lose attributes etc.
graph, _ = restore_graph_from_ir(model_xml, model_bin)
restored_model_xml = tmp_path / (name + '_restored.xml')
restored_model_bin = tmp_path / (name + '_restored.bin')
graph, _ = restore_graph_from_ir(
restored_model_xml, restored_model_bin)
core = Core()
core.set_property({"ENABLE_MMAP": False})
# check that re-saved model can be read in runtime
Core().read_model(model_xml)
return graph
model = core.read_model(restored_model_xml)
return graph, model
def test_topk_11(self):
data_shape = [6, 12, 10, 24]
@@ -43,7 +49,7 @@ class TestOps(unittest.TestCase):
topk = opset11.topk(data_parameter, k_val, axis,
"max", "value", stable=True, name="TopK_11")
model = Model(topk, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'topk_model')
graph, _ = TestOps.check_graph_can_save(model, 'topk_model')
topk_node = graph.get_op_nodes(op="TopK")[0]
self.assertEqual(topk_node["version"], "opset11")
self.assertTrue(topk_node["stable"])
@@ -56,7 +62,7 @@ class TestOps(unittest.TestCase):
interpolate = opset11.interpolate(data_parameter, np.int32(
[20, 48]), "nearest", "sizes", axes=np.int32([2, 3]), name="Interpolate_11")
model = Model(interpolate, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'interpolate_model')
graph, _ = TestOps.check_graph_can_save(model, 'interpolate_model')
interpolate_node = graph.get_op_nodes(op="Interpolate")[0]
self.assertEqual(interpolate_node["version"], "opset11")
self.assertTrue("force_precision_in_ports" in interpolate_node)
@@ -69,7 +75,7 @@ class TestOps(unittest.TestCase):
interpolate = opset11.interpolate(data_parameter, np.float32(
[2., 2.]), "nearest", "scales", axes=np.int32([2, 3]), name="Interpolate_11")
model = Model(interpolate, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'interpolate_model')
graph, _ = TestOps.check_graph_can_save(model, 'interpolate_model')
interpolate_node = graph.get_op_nodes(op="Interpolate")[0]
self.assertEqual(interpolate_node["version"], "opset11")
self.assertTrue("force_precision_in_ports" not in interpolate_node)
@@ -81,7 +87,7 @@ class TestOps(unittest.TestCase):
interpolate = opset11.interpolate(data_parameter, np.int32(
[6, 12, 20, 48]), "nearest", "sizes", name="Interpolate_11")
model = Model(interpolate, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'interpolate_model')
graph, _ = TestOps.check_graph_can_save(model, 'interpolate_model')
interpolate_node = graph.get_op_nodes(op="Interpolate")[0]
self.assertEqual(interpolate_node["version"], "opset11")
self.assertTrue("force_precision_in_ports" in interpolate_node)
@@ -94,7 +100,7 @@ class TestOps(unittest.TestCase):
interpolate = opset10.interpolate(data_parameter, np.int32([20, 48]), np.float32(
[2, 2]), "nearest", "sizes", axes=np.int32([2, 3]), name="Interpolate_4")
model = Model(interpolate, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'interpolate4_model')
graph, _ = TestOps.check_graph_can_save(model, 'interpolate4_model')
interpolate_node = graph.get_op_nodes(op="Interpolate")[0]
self.assertEqual(interpolate_node["version"], "opset4")
@@ -105,7 +111,7 @@ class TestOps(unittest.TestCase):
unique = opset10.unique(data_parameter, axis=np.int32(
[2]), sorted=True, name="Unique_10")
model = Model(unique, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'unique_model')
graph, _ = TestOps.check_graph_can_save(model, 'unique_model')
unique_node = graph.get_op_nodes(op="Unique")[0]
self.assertEqual(unique_node["version"], "opset10")
self.assertListEqual(unique_node.out_port(
@@ -118,7 +124,7 @@ class TestOps(unittest.TestCase):
data_shape, name="Data", dtype=np.float32)
is_finite = opset10.is_finite(data_parameter, name="Is_finite_10")
model = Model(is_finite, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'is_finite_model')
graph, _ = TestOps.check_graph_can_save(model, 'is_finite_model')
is_finite_node = graph.get_op_nodes(op="IsFinite")[0]
self.assertEqual(is_finite_node["version"], "opset10")
@@ -128,7 +134,7 @@ class TestOps(unittest.TestCase):
data_shape, name="Data", dtype=np.float32)
is_inf = opset10.is_inf(data_parameter, name="Is_inf_10")
model = Model(is_inf, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'is_inf_model')
graph, _ = TestOps.check_graph_can_save(model, 'is_inf_model')
is_inf_node = graph.get_op_nodes(op="IsInf")[0]
self.assertEqual(is_inf_node["version"], "opset10")
@@ -138,7 +144,7 @@ class TestOps(unittest.TestCase):
data_shape, name="Data", dtype=np.float32)
is_nan = opset10.is_nan(data_parameter, name="Is_nan_10")
model = Model(is_nan, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'is_nan_model')
graph, _ = TestOps.check_graph_can_save(model, 'is_nan_model')
is_nan_node = graph.get_op_nodes(op="IsNaN")[0]
self.assertEqual(is_nan_node["version"], "opset10")
@@ -177,7 +183,7 @@ class TestOps(unittest.TestCase):
out2 = if_node.set_output(then_body_res_2, else_body_res_2)
model = Model([out1, out2], [parameter_x, parameter_y])
graph = TestOps.check_graph_can_save(model, 'if_model')
graph, _ = TestOps.check_graph_can_save(model, 'if_model')
if_node = graph.get_op_nodes(op="If")[0]
self.assertEqual(if_node["version"], "opset8")
_, layer_info, _ = if_node['IE'][0]
@@ -192,7 +198,7 @@ class TestOps(unittest.TestCase):
strided_slice = opset11.strided_slice(data_parameter, np.int32([1, 2, 3, 4]), np.int32(
[3, 6, 9, 12]), np.int32([1, 1, 1, 1]), begin_mask=[], end_mask=[], name="StridedSlice_10")
model = Model(strided_slice, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'strided_slice_model')
graph, _ = TestOps.check_graph_can_save(model, 'strided_slice_model')
strided_slice_node = graph.get_op_nodes(op="StridedSlice")[0]
self.assertEqual(strided_slice_node["version"], "opset1")
@@ -206,7 +212,7 @@ class TestOps(unittest.TestCase):
mul = opset11.multiply(scatter, np.int64([1, 2]))
reshape = opset11.reshape(data_parameter, mul, True)
model = Model(reshape, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'scatter_dynamic_model')
graph, _ = TestOps.check_graph_can_save(model, 'scatter_dynamic_model')
scatter_update_node = graph.get_op_nodes(op="ScatterUpdate")[0]
self.assertListEqual(scatter_update_node.out_port(0).data.get_value().tolist(), [0, None])
@@ -214,7 +220,7 @@ class TestOps(unittest.TestCase):
data_parameter = opset12.parameter([6, 12, 10, 24], name="Data", dtype=np.float32)
pad = opset12.pad(data_parameter, np.int64([0, 0, -1, -2]), np.int64([0, 0, -3, -4]), "constant")
model = Model(pad, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'pad_model')
graph, _ = TestOps.check_graph_can_save(model, 'pad_model')
pad_node = graph.get_op_nodes(op="Pad")[0]
self.assertEqual(pad_node["version"], "opset12")
self.assertListEqual(pad_node.in_port(1).data.get_value().tolist(), [0, 0, -1, -2])
@@ -225,7 +231,7 @@ class TestOps(unittest.TestCase):
data_parameter = opset12.parameter([10], name="Data", dtype=np.float32)
scatter = opset12.scatter_elements_update(data_parameter, np.int32([5, 0, 7, 5]), np.float32([5., 6., 1.5, -5.]), np.int32(0), "sum", False)
model = Model(scatter, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'scatter_model')
graph, _ = TestOps.check_graph_can_save(model, 'scatter_model')
scatter_node = graph.get_op_nodes(op="ScatterElementsUpdate")[0]
self.assertListEqual(scatter_node.out_port(0).data.get_shape().tolist(), [10])
self.assertEqual(scatter_node["version"], "opset12")
@@ -240,7 +246,7 @@ class TestOps(unittest.TestCase):
epsilon = 1e-6
node = opset12.group_normalization(data_parameter, scale, bias, num_groups, epsilon)
model = Model(node, [data_parameter])
graph = TestOps.check_graph_can_save(model, 'group_norm_model')
graph, _ = TestOps.check_graph_can_save(model, 'group_norm_model')
gn_node = graph.get_op_nodes(op="GroupNormalization")[0]
self.assertListEqual(gn_node.out_port(0).data.get_shape().tolist(), [1, 3, 3, 3])
self.assertEqual(gn_node["version"], "opset12")
@@ -253,7 +259,7 @@ class TestOps(unittest.TestCase):
op = opset13.bitwise_and(a, b)
model = Model(op, [a, b])
graph = TestOps.check_graph_can_save(model, "bitwise_and_model")
graph, _ = TestOps.check_graph_can_save(model, "bitwise_and_model")
op_node = graph.get_op_nodes(op="BitwiseAnd")[0]
self.assertListEqual(op_node.out_port(0).data.get_shape().tolist(), [4, 2])
self.assertEqual(op_node["version"], "opset13")
@@ -265,7 +271,7 @@ class TestOps(unittest.TestCase):
op = opset13.bitwise_or(a, b)
model = Model(op, [a, b])
graph = TestOps.check_graph_can_save(model, "bitwise_or_model")
graph, _ = TestOps.check_graph_can_save(model, "bitwise_or_model")
op_node = graph.get_op_nodes(op="BitwiseOr")[0]
self.assertListEqual(op_node.out_port(0).data.get_shape().tolist(), [4, 2])
self.assertEqual(op_node["version"], "opset13")
@@ -277,7 +283,7 @@ class TestOps(unittest.TestCase):
op = opset13.bitwise_xor(a, b)
model = Model(op, [a, b])
graph = TestOps.check_graph_can_save(model, "bitwise_xor_model")
graph, _ = TestOps.check_graph_can_save(model, "bitwise_xor_model")
op_node = graph.get_op_nodes(op="BitwiseXor")[0]
self.assertListEqual(op_node.out_port(0).data.get_shape().tolist(), [4, 2])
self.assertEqual(op_node["version"], "opset13")
@@ -288,7 +294,66 @@ class TestOps(unittest.TestCase):
op = opset13.bitwise_not(a)
model = Model(op, [a])
graph = TestOps.check_graph_can_save(model, "bitwise_not_model")
graph, _ = TestOps.check_graph_can_save(model, "bitwise_not_model")
op_node = graph.get_op_nodes(op="BitwiseNot")[0]
self.assertListEqual(op_node.out_port(0).data.get_shape().tolist(), [4, 2])
self.assertEqual(op_node["version"], "opset13")
def test_multinomial_13_param_inputs(self):
data_shape = [2, 8]
probs = opset13.parameter(
data_shape, name="probs", dtype=np.float32)
num_samples = opset13.parameter(
[1], name="num_samples", dtype=np.int32)
op = opset13.multinomial(probs, num_samples,
convert_type="i32",
with_replacement=True,
log_probs=True,
global_seed=456,
op_seed=213)
model = Model(op, [probs, num_samples])
graph, loaded_model = TestOps.check_graph_can_save(
model, 'multinomial_param_model')
graph_node = graph.get_op_nodes(op="Multinomial")[0]
self.assertEqual(graph_node["version"], "opset13")
self.assertListEqual(graph_node.out_port(
0).data.get_shape().tolist(), [2, None])
self.assertEqual(graph_node["convert_type"], "i32")
self.assertTrue(graph_node["with_replacement"])
self.assertTrue(graph_node["log_probs"])
self.assertEqual(graph_node["global_seed"], 456)
self.assertEqual(graph_node["op_seed"], 213)
self.assertEqual(loaded_model.get_output_element_type(0), Type.i32)
self.assertEqual(loaded_model.get_output_partial_shape(
0), PartialShape([2, -1]))
def test_multinomial_13_const_inputs(self):
probs = opset13.constant(
[[0.4, 0.5, 0.1], [0.3, 0.2, 0.5]], name="probs", dtype=np.float32)
num_samples = opset13.constant(
[3], name="num_samples", dtype=np.int64)
op = opset13.multinomial(probs, num_samples,
convert_type="i64",
with_replacement=False,
log_probs=False)
model = Model(op, [])
graph, loaded_model = TestOps.check_graph_can_save(
model, 'multinomial_const_model')
graph_node = graph.get_op_nodes(op="Multinomial")[0]
self.assertEqual(graph_node["version"], "opset13")
self.assertListEqual(graph_node.out_port(
0).data.get_shape().tolist(), [2, 3])
self.assertEqual(graph_node["convert_type"], "i64")
self.assertFalse(graph_node["with_replacement"])
self.assertFalse(graph_node["log_probs"])
self.assertEqual(graph_node["global_seed"], 0)
self.assertEqual(graph_node["op_seed"], 0)
self.assertEqual(loaded_model.get_output_element_type(0), Type.i64)
self.assertEqual(loaded_model.get_output_partial_shape(
0), PartialShape([2, 3]))