mo.convert() method. (#11347)

* convert() method added.

* Moved conversion to convert() method.

* Fixed commits.

* Output dir fix.

* Added objects support for extesions param.

* Added support for transformations_config extension objects.

* Input to str unit tests.

* Added tests, added comments.

* Updated BOM.

* Removed commented code.

* Fixed extension passing.

* Small corrections.

* Fixed for python 3.6.

* Small fix.

* Moved dir creating to ov.serialize(), removed mo.serialize(), small fixes.

* Small fix.

* Small correction.

* Removed coping of params, moved convert implemetation to separate module.

* Import fixes.

* Moved hiding of exceptions to main().

* Updated comment.

* Fixed unit tests.

* Comment changed.

* Fixed dir creating.

* Tests fixed.

* Small fixes.

* Test fix.

* Added meta data generation, removed printing of execution time for silent mode.

* Import fix.

* Conflict fix.

* Fixed error.

* Fix for custom config.

* Added version, data_type params to help.

* Added mo.convert() full-functional tests.

* Small corrections.

* Comment correction.

* Moved convert to openvino package, moved LayotMap and InputCutInfo to openvino.convert.

* Added help param.

* Wrong change removed.

* Small fix.

* Removed unnecessary comments.

* Removed .xml extension check from append_ir_info.

* Added missed file.

* Fixed error.

* Fix for bool value in InputCutInfo.

* Moved InputCutInfo, LayoutMap to openvino.tools.mo.

* Moved InputCutInfo, LayoutMap to openvino.tools.mo.

* Moved check and read_model to emit_ir.

* Small correction.

* Added comment.

* Added unit_tests with convert().

* Small corrections.

* Removed convert alias from openvino.

* Fixed conflicting unit tests.

* Removed unnecessary warnings.

* Params check fix.

* Small correction.

* Added paths checks.

* Added negative tests for to_str methods, fixed errors.

* Added tuples support in input parameter.

* Moved reminders to update OV and use API 2.0 to main().

* Returned .mapping file generating.

* Added positional input_model param.

* Added test for unnamed input_model.

* Optimize imports.

* Added more informative error for brackets syntax in --input.

* Conflict fix.

* Conflict fix.
This commit is contained in:
Anastasia Popova
2022-09-23 15:29:00 +02:00
committed by GitHub
parent 5fff132611
commit 940844e71f
33 changed files with 2326 additions and 791 deletions

View File

@@ -18,6 +18,7 @@
#include "ngraph/opsets/opset1.hpp"
#include "openvino/op/util/framework_node.hpp"
#include "openvino/pass/constant_folding.hpp"
#include "openvino/util/file_util.hpp"
#include "pugixml.hpp"
#include "transformations/hash.hpp"
#include "transformations/rt_info/primitives_priority_attribute.hpp"
@@ -1022,6 +1023,10 @@ bool pass::Serialize::run_on_model(const std::shared_ptr<ngraph::Function>& f_or
if (m_xmlFile && m_binFile) {
serializeFunc(*m_xmlFile, *m_binFile, f, m_version, m_custom_opsets);
} else {
auto xmlDir = ov::util::get_directory(m_xmlPath);
if (xmlDir != m_xmlPath)
ov::util::create_directory_recursive(xmlDir);
std::ofstream bin_file(m_binPath, std::ios::out | std::ios::binary);
NGRAPH_CHECK(bin_file, "Can't open bin file: \"" + m_binPath + "\"");

View File

@@ -0,0 +1,51 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from openvino.runtime import serialize
from openvino.tools.mo import convert
from openvino.tools.mo.utils.ir_engine.ir_engine import IREngine
from common.utils.common_utils import generate_ir
class CommonMOConvertTest:
@staticmethod
def generate_ir_python_api(**kwargs):
output_dir = kwargs['output_dir']
model_name = kwargs['model_name']
del kwargs['output_dir']
model = convert(**kwargs)
serialize(model, str(Path(output_dir, model_name + '.xml')))
def _test(self, temp_dir, test_params, ref_params):
"""
Generates two IRs using MO Python API and using cmd tool.
Then two IRs are compared.
"""
test_params.update({"model_name": 'model_test', "output_dir": temp_dir})
ref_params.update({"model_name": 'model_ref', "output_dir": temp_dir})
self.generate_ir_python_api(**test_params)
exit_code, stderr = generate_ir(**ref_params)
assert not exit_code, (
"Reference IR generation failed with {} exit code: {}".format(exit_code, stderr))
ir_test = IREngine(Path(temp_dir, 'model_test.xml'), Path(temp_dir, 'model_test.bin'))
ir_ref = IREngine(Path(temp_dir, 'model_ref.xml'), Path(temp_dir, 'model_ref.bin'))
flag, resp = ir_test.compare(ir_ref)
assert flag, '\n'.join(resp)
def _test_by_ref_graph(self, temp_dir, test_params, ref_graph):
"""
Generates IR using MO Python API, reads it and compares with reference graph.
"""
test_params.update({"model_name": 'model_test', "output_dir": temp_dir})
self.generate_ir_python_api(**test_params)
ir_test = IREngine(Path(temp_dir, 'model_test.xml'), Path(temp_dir, 'model_test.bin'))
flag, resp = ir_test.compare(ref_graph)
assert flag, '\n'.join(resp)

View File

@@ -0,0 +1,13 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import inspect
from common.layer_test_class import get_params
def pytest_generate_tests(metafunc):
test_gen_attrs_names = list(inspect.signature(get_params).parameters)
params = get_params()
metafunc.parametrize(test_gen_attrs_names, params, scope="function")

View File

@@ -0,0 +1,206 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
import pytest
from openvino.tools.mo.convert import InputCutInfo, LayoutMap
from common.mo_convert_test_class import CommonMOConvertTest
from common.tf_layer_test_class import save_to_pb
from openvino.runtime import Model, Layout, PartialShape, Shape, layout_helpers, Type, Dimension
class TestComplexParams(CommonMOConvertTest):
def create_tf_model(self, tmp_dir):
#
# Create Tensorflow model with multiple inputs/outputs
#
import tensorflow as tf
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session() as sess:
inp1 = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input1')
inp2 = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input2')
inp3 = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input3')
relu1 = tf.nn.relu(inp1, name='Relu1')
relu2 = tf.nn.relu(inp2, name='Relu2')
relu3 = tf.nn.relu(inp3, name='Relu3')
concat = tf.concat([relu1, relu2, relu3], axis=0, name='Concat')
outputs = tf.split(concat, 3)
outputs_list = []
for i, output in enumerate(outputs):
outputs_list.append(tf.nn.sigmoid(output, name='Sigmoid_{}'.format(i)))
tf.compat.v1.global_variables_initializer()
tf_net = sess.graph_def
# save model to .pb and return path to the model
return save_to_pb(tf_net, tmp_dir)
def create_tf_model_single_input_output(self, tmp_dir):
#
# Create Tensorflow model with single input/output
#
import tensorflow as tf
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session() as sess:
inp = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input')
relu = tf.nn.relu(inp, name='Relu')
output = tf.nn.sigmoid(relu, name='Sigmoid')
tf.compat.v1.global_variables_initializer()
tf_net = sess.graph_def
# save model to .pb and return path to the model
return save_to_pb(tf_net, tmp_dir)
def create_tf_param_res_model(self, tmp_dir):
#
# Create Tensorflow model with following pattern:
# Input ---\
# Add --> Identity
# Input1 ---/
#
# This graph is needed for transform test. Input and Identity are replaced with ReadValue and Assign ops.
import tensorflow as tf
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session() as sess:
inp = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input')
inp1 = tf.compat.v1.placeholder(tf.float32, [1, 3, 2, 2], 'Input1')
sum1 = tf.add(inp, inp1, "Add1")
result = tf.identity(sum1, name='Identity')
tf.compat.v1.global_variables_initializer()
tf_net = sess.graph_def
# save model to .pb and return path to the model
return save_to_pb(tf_net, tmp_dir)
test_data = [
{'params_test': {'input_shape': [PartialShape([2, 3, 4]),
[2, 3, 4],
[Dimension(2), Dimension(3), Dimension(4)]],
'input':['Input1', 'Input2', 'Relu3']},
'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1,Input2,Relu3'}},
{'params_test': {'input_shape': [PartialShape([Dimension(), Dimension(1, 3), Dimension(4, -1), Dimension(-1, 5)]),
[Dimension(), Dimension(1, 3), 4, Dimension(-1, 5)],
[Dimension(), 3, Dimension(4, -1), Dimension(-1, 5)]],
'input':['Input1', 'Input2', 'Relu3']},
'params_ref': {'input_shape': "[?,1..3,4..,..5],[?,1..3,4,..5],[?,3,4..,..5]", 'input': 'Input1,Input2,Relu3'}},
{'params_test': {'input': [InputCutInfo("Relu1", Shape([3, 2]), Type(np.int32), None),
InputCutInfo("Relu2", PartialShape([Dimension(3, 10), Dimension(2, -1)]), np.int32, None),
InputCutInfo("Relu3", [3, 2], Type(np.int32), [1, 2, 3, 4, 5, 6])]},
'params_ref': {'input': "Relu1[3 2]{i32},Relu2[3..10 2..]{i32},Relu3[3 2]{i32}->[1 2 3 4 5 6]"}},
{'params_test': {'input': [("Relu1", Shape([3, 2]), Type(np.int32)),
(np.int32, "Relu2", PartialShape([Dimension(3, 10), Dimension(2, -1)])),
([3, 2],"Relu3", Type(np.int32))]},
'params_ref': {'input': "Relu1[3 2]{i32},Relu2[3..10 2..]{i32},Relu3[3 2]{i32}"}},
{'params_test': {'output': ["Sigmoid_0", "Sigmoid_2"]},
'params_ref': {'output': "Sigmoid_0,Sigmoid_2"}},
{'params_test': {'mean_values': {'Input1': [0.5,1.3,0.67], 'Input2':[4.2, 6.7, 3.15], 'Input3':[0.757, 4.6, 7.3]}},
'params_ref': {'mean_values': "Input1[0.5,1.3,0.67],Input2[4.2,6.7,3.15],Input3[0.757,4.6,7.3]"}},
{'params_test': {
'mean_values': [[0.5, 1.3, 0.67], [4.2, 6.7, 3.15], [0.757, 4.6, 7.3]]},
'params_ref': {'mean_values': "[0.5,1.3,0.67],[4.2,6.7,3.15],[0.757,4.6,7.3]"}},
{'params_test': {'scale_values': {'Input1': [0.5,1.3,0.67], 'Input2':[4.2, 6.7, 3.15], 'Input3':[0.757, 4.6, 7.3]}},
'params_ref': {'scale_values': "Input1[0.5,1.3,0.67],Input2[4.2,6.7,3.15],Input3[0.757,4.6,7.3]"}},
{'params_test': {
'scale_values': [[0.5, 1.3, 0.67], [4.2, 6.7, 3.15], [0.757, 4.6, 7.3]]},
'params_ref': {'scale_values': "[0.5,1.3,0.67],[4.2,6.7,3.15],[0.757,4.6,7.3]"}},
{'params_test': {
'source_layout': {'Input1': Layout("nchw"), 'Input2': "nchw", 'Input3': "nc??"}},
'params_ref': {'source_layout': "Input1(nchw),Input2(nchw),Input3(nc??)"}},
{'params_test': {
'target_layout': {'Input1': Layout("nhwc"), 'Input2': "nhwc", 'Input3': "n??c"}},
'params_ref': {'target_layout': "Input1(nhwc),Input2(nhwc),Input3(n??c)"}},
{'params_test': {
'layout': {'Input1': LayoutMap(source_layout=Layout("nchw"), target_layout="nhwc"),
'Input2': LayoutMap(source_layout="nc??", target_layout=Layout("n??c")),
'Input3': LayoutMap(source_layout="abcd", target_layout="acdb")}},
'params_ref': {'layout': "Input1(nchw->nhwc),Input2(nc??->n??c),Input3(abcd->acdb)"}},
]
@pytest.mark.parametrize("params", test_data)
@pytest.mark.nightly
def test_mo_convert_tf_model(self, params, ie_device, precision, ir_version,
temp_dir, use_new_frontend, use_old_api):
tf_net_path = self.create_tf_model(temp_dir)
test_params = params['params_test']
ref_params = params['params_ref']
test_params.update({'input_model': tf_net_path})
ref_params.update({'input_model': tf_net_path})
self._test(temp_dir, test_params, ref_params)
test_data = [
{'params_test': {'input_shape': PartialShape([2, 3, 4])},
'params_ref': {'input_shape': "[2,3,4]"}},
{'params_test': {'input_shape': [Dimension(), Dimension(1, 3), 4, Dimension(-1, 5)]},
'params_ref': {'input_shape': "[?,1..3,4,..5]"}},
{'params_test': {'input': InputCutInfo("Relu", [3, 2], Type(np.int32), [1, 2, 3, 4, 5, 6])},
'params_ref': {'input': "Relu[3 2]{i32}->[1 2 3 4 5 6]"}},
{'params_test': {'input': ("Relu", [3, 2], Type(np.int32))},
'params_ref': {'input': "Relu[3 2]{i32}"}},
{'params_test': {'input': ("Relu", Type(np.int32))},
'params_ref': {'input': "Relu{i32}"}},
{'params_test': {'input': ("Relu", [3, 2])},
'params_ref': {'input': "Relu[3 2]"}},
{'params_test': {'input': ("Relu")},
'params_ref': {'input': "Relu"}},
{'params_test': {'mean_values': [0.5, 1.3, 0.67]},
'params_ref': {'mean_values': "[0.5,1.3,0.67]"}},
{'params_test': {'scale_values': [0.5, 1.3, 0.67]},
'params_ref': {'scale_values': "[0.5,1.3,0.67]"}},
{'params_test': {'source_layout': Layout("nchw")},
'params_ref': {'source_layout': "nchw"}},
{'params_test': {'target_layout': Layout("nchw")},
'params_ref': {'target_layout': "nchw"}},
{'params_test': {'layout': LayoutMap(source_layout=Layout("nchw"), target_layout="nhwc")},
'params_ref': {'layout': "nchw->nhwc"}},
{'params_test': {'layout': Layout("nchw")},
'params_ref': {'layout': "nchw"}}
]
@pytest.mark.parametrize("params", test_data)
@pytest.mark.nightly
@pytest.mark.precommit
def test_mo_convert_tf_model_single_input_output(self, params, ie_device, precision, ir_version,
temp_dir, use_new_frontend, use_old_api):
tf_net_path = self.create_tf_model_single_input_output(temp_dir)
test_params = params['params_test']
ref_params = params['params_ref']
test_params.update({'input_model': tf_net_path})
ref_params.update({'input_model': tf_net_path})
self._test(temp_dir, test_params, ref_params)
test_data = [
{
'params_test': {'transform': ('MakeStateful', {'param_res_names': {'Input:0': 'Identity:0'}})},
'params_ref': {'transform': "MakeStateful[param_res_names={\'Input:0\':\'Identity:0\'}]"}}
]
@pytest.mark.parametrize("params", test_data)
@pytest.mark.nightly
def test_mo_convert_transform(self, params, ie_device, precision, ir_version,
temp_dir, use_new_frontend, use_old_api):
tf_net_path = self.create_tf_param_res_model(temp_dir)
test_params = params['params_test']
ref_params = params['params_ref']
test_params.update({'input_model': tf_net_path})
ref_params.update({'input_model': tf_net_path})
self._test(temp_dir, test_params, ref_params)

View File

@@ -0,0 +1,136 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import pytest
from common.mo_convert_test_class import CommonMOConvertTest
from common.onnx_layer_test_class import save_to_onnx
from unit_tests.utils.graph import build_graph
class TestExtensions(CommonMOConvertTest):
def create_onnx_model(self, tmp_dir):
#
# Create ONNX model
#
import onnx
from onnx import helper
from onnx import TensorProto
shape = [2, 3, 4]
input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape)
output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape)
node_def = onnx.helper.make_node(
'LeakyRelu',
inputs=['input'],
outputs=['LeakyRelu_data'],
alpha=0.1
)
node_def2 = onnx.helper.make_node(
'Elu',
inputs=['LeakyRelu_data'],
outputs=['output'],
alpha=0.1
)
# Create the graph (GraphProto)
graph_def = helper.make_graph(
[node_def, node_def2],
'test_model',
[input],
[output],
)
# Create the model (ModelProto)
onnx_net = helper.make_model(graph_def, producer_name='test_model')
# save model to .onnx and return path to the model
return save_to_onnx(onnx_net, tmp_dir)
def create_custom_extension_leaky_relu_to_relu():
# replaces LeakyRelu with Relu
from openvino.frontend import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
def custom_converter(node: NodeContext):
input = node.get_input(0)
relu = ops.relu(input)
return [relu.output(0)]
return ConversionExtension("LeakyRelu", custom_converter)
def create_custom_extension_elu_to_sigmoid():
# replaces Elu with Sigmoid
from openvino.frontend import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
def custom_converter(node: NodeContext):
input = node.get_input(0)
sigm = ops.sigmoid(input)
return [sigm.output(0)]
return ConversionExtension("Elu", custom_converter)
def create_ref_graph1():
nodes_attributes = {
'input': {'kind': 'op', 'type': 'Parameter'},
'input_data': {'shape': [2, 3, 4], 'kind': 'data'},
'relu': {'kind': 'op', 'type': 'ReLU'},
'relu_data': {'shape': [2, 3, 4], 'kind': 'data'},
'elu': {'kind': 'op', 'type': 'Elu'},
'elu_data': {'shape': [2, 3, 4], 'kind': 'data'},
'result': {'kind': 'op', 'type': 'Result'}
}
return build_graph(nodes_attributes,
[('input', 'input_data'),
('input_data', 'relu'),
('relu', 'relu_data'),
('relu_data', 'elu'),
('elu', 'elu_data'),
('elu_data', 'result'),
])
def create_ref_graph2():
nodes_attributes = {
'input': {'kind': 'op', 'type': 'Parameter'},
'input_data': {'shape': [2, 3, 4], 'kind': 'data'},
'relu': {'kind': 'op', 'type': 'ReLU'},
'relu_data': {'shape': [2, 3, 4], 'kind': 'data'},
'sigmoid': {'kind': 'op', 'type': 'Sigmoid'},
'sigmoid_data': {'shape': [2, 3, 4], 'kind': 'data'},
'result': {'kind': 'op', 'type': 'Result'}
}
return build_graph(nodes_attributes,
[('input', 'input_data'),
('input_data', 'relu'),
('relu', 'relu_data'),
('relu_data', 'sigmoid'),
('sigmoid', 'sigmoid_data'),
('sigmoid_data', 'result'),
])
test_data = [
{'params_test': {'extensions': create_custom_extension_leaky_relu_to_relu()},
'ref_graph': create_ref_graph1()},
{'params_test': {'extensions': [create_custom_extension_leaky_relu_to_relu(),
create_custom_extension_elu_to_sigmoid()]},
'ref_graph': create_ref_graph2()}
]
@pytest.mark.parametrize("params", test_data)
@pytest.mark.nightly
@pytest.mark.precommit
def test_mo_convert_extensions(self, params, ie_device, precision, ir_version,
temp_dir, use_new_frontend, use_old_api):
onnx_net_path = self.create_onnx_model(temp_dir)
test_params = params['params_test']
test_params.update({'input_model': onnx_net_path})
self._test_by_ref_graph(temp_dir, test_params, params['ref_graph'])

View File

@@ -81,6 +81,7 @@ openvino/tools/mo/back/TopKNormalizer.py
openvino/tools/mo/back/TransposeDFT.py
openvino/tools/mo/back/TransposeReduceFusing.py
openvino/tools/mo/back/UselessConcatRemoval.py
openvino/tools/mo/convert.py
openvino/tools/mo/front/__init__.py
openvino/tools/mo/front/ArgOpsSqueeze.py
openvino/tools/mo/front/ATenToEmbeddingBag.py

View File

@@ -1,3 +1,4 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from .convert import convert, InputCutInfo, LayoutMap

View File

@@ -7,7 +7,7 @@ from typing import List
from openvino.tools.mo.front.extractor import create_params_with_custom_types
from openvino.tools.mo.utils.cli_parser import parse_transform
from openvino.tools.mo.utils.error import Error
from openvino.runtime import Model
def get_available_transformations():
try:
@@ -49,28 +49,9 @@ def compress_model(func: object):
compress_model_transformation(func)
def apply_offline_transformations(input_model: str, argv: argparse.Namespace):
# This variable is only needed by GenerateMappingFile transformation
# to produce correct mapping
extract_names = argv.framework in ['tf', 'mxnet', 'kaldi']
from openvino.runtime import serialize # pylint: disable=import-error,no-name-in-module
from openvino.offline_transformations import generate_mapping_file # pylint: disable=import-error,no-name-in-module
from openvino.frontend import FrontEndManager # pylint: disable=no-name-in-module,import-error
def apply_offline_transformations(func: Model, argv: argparse.Namespace):
from openvino.tools.mo.back.preprocessing import apply_preprocessing # pylint: disable=no-name-in-module,import-error
fem = FrontEndManager()
# We have to separate fe object lifetime from fem to
# avoid segfault during object destruction. So fe must
# be destructed before fem object explicitly.
def read_model(path_to_xml):
fe = fem.load_by_framework(framework="ir")
function = fe.convert(fe.load(path_to_xml))
return function
func = read_model(input_model + "_tmp.xml")
# Apply preprocessing (mean/scale/reverse_channels/convert_layout/etc)
apply_preprocessing(ov_function=func, argv=argv)
@@ -83,6 +64,4 @@ def apply_offline_transformations(input_model: str, argv: argparse.Namespace):
if "compress_fp16" in argv and argv.compress_fp16:
compress_model(func)
serialize(func, str(input_model + ".xml").encode('utf-8'), (input_model + ".bin").encode('utf-8'))
path_to_mapping = input_model + ".mapping"
generate_mapping_file(func, path_to_mapping.encode('utf-8'), extract_names)
return func

View File

@@ -0,0 +1,26 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from collections import namedtuple
from openvino.tools.mo.convert_impl import _convert
InputCutInfo = namedtuple("InputInfo", ["name", "shape", "type", "value"])
LayoutMap = namedtuple("LayoutMap", ["source_layout", "target_layout"])
def convert(input_model=None, **args):
"""
Converts the model from original framework to OpenVino Model.
Args:
input_model:
Tensorflow*: a file with a pre-trained model (binary or text .pb file after freezing).
Caffe*: a model proto file with model weights
Run convert(help=true) to list all available parameters.
Returns:
openvino.runtime.Model
"""
args.update({'input_model': input_model})
return _convert(**args)

View File

@@ -0,0 +1,573 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import argparse
import datetime
import logging as log
import os
import platform
import sys
from collections import OrderedDict
from copy import deepcopy
try:
import openvino_telemetry as tm
except ImportError:
import openvino.tools.mo.utils.telemetry_stub as tm
from openvino.tools.mo.back.SpecialNodesFinalization import RemoveConstOps, CreateConstNodesReplacement, NormalizeTI
from openvino.tools.mo.moc_frontend.check_config import legacy_transformations_config_used, \
new_extensions_used, new_transformations_config_used, input_freezig_used
from openvino.tools.mo.moc_frontend.pipeline import moc_pipeline
from openvino.tools.mo.moc_frontend.serialize import moc_emit_ir
from openvino.tools.mo.graph.graph import Graph
from openvino.tools.mo.middle.pattern_match import for_graph_and_each_sub_graph_recursively
from openvino.tools.mo.pipeline.common import prepare_emit_ir
from openvino.tools.mo.pipeline.unified import unified_pipeline
from openvino.tools.mo.utils import import_extensions
from openvino.tools.mo.utils.cli_parser import check_available_transforms, \
get_advanced_cli_options, get_available_front_ends, get_caffe_cli_options, \
get_common_cli_options, get_freeze_placeholder_values, get_kaldi_cli_options, get_layout_values, \
get_mean_scale_dictionary, get_meta_info, get_mxnet_cli_options, get_onnx_cli_options, \
get_placeholder_shapes, get_tf_cli_options, get_tuple_values, parse_transform, parse_tuple_pairs, \
get_all_cli_parser, mo_convert_params, get_model_name_from_args
from openvino.tools.mo.utils.error import Error
from openvino.tools.mo.utils.find_ie_version import find_ie_version
from openvino.tools.mo.utils.guess_framework import deduce_legacy_frontend_by_namespace
from openvino.tools.mo.utils.logger import init_logger, progress_printer
from openvino.tools.mo.utils.utils import refer_to_faq_msg
from openvino.tools.mo.utils.telemetry_utils import send_params_info, send_framework_info
from openvino.tools.mo.utils.version import get_simplified_mo_version, get_simplified_ie_version
from openvino.tools.mo.utils.versions_checker import check_requirements # pylint: disable=no-name-in-module
from openvino.tools.mo.utils.telemetry_utils import get_tid
from openvino.tools.mo.front.common.partial_infer.utils import mo_array
from openvino.tools.mo.moc_frontend.check_config import legacy_extensions_used
# pylint: disable=no-name-in-module,import-error
from openvino.frontend import FrontEndManager, ProgressReporterExtension, TelemetryExtension, JsonConfigExtension
def load_extensions(argv: argparse.Namespace, is_tf: bool, is_caffe: bool, is_mxnet: bool, is_kaldi: bool,
is_onnx: bool):
extensions = None
if hasattr(argv, 'extensions') and argv.extensions and argv.extensions != '':
extensions = argv.extensions
if is_tf:
from openvino.tools.mo.front.tf.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_caffe:
send_framework_info('caffe')
from openvino.tools.mo.front.caffe.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_mxnet:
send_framework_info('mxnet')
from openvino.tools.mo.front.mxnet.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_kaldi:
send_framework_info('kaldi')
from openvino.tools.mo.front.kaldi.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_onnx:
send_framework_info('onnx')
from openvino.tools.mo.front.onnx.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
def replace_ext(name: str, old: str, new: str):
base, ext = os.path.splitext(name)
log.debug("base: {}, ext: {}".format(base, ext))
if ext == old:
return base + new
def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_mxnet: bool, is_kaldi: bool, is_onnx: bool,
model_name: str):
print('Model Optimizer arguments:')
props = OrderedDict()
props['common_args'] = get_common_cli_options(model_name)
props['advanced_args'] = get_advanced_cli_options()
if is_caffe:
props['caffe_args'] = get_caffe_cli_options()
if is_tf:
props['tf_args'] = get_tf_cli_options()
if is_mxnet:
props['mxnet_args'] = get_mxnet_cli_options()
if is_kaldi:
props['kaldi_args'] = get_kaldi_cli_options()
if is_onnx:
props['onnx_args'] = get_onnx_cli_options()
framework_specifics_map = {
'common_args': 'Common parameters:',
'advanced_args': 'Advanced parameters:',
'caffe_args': 'Caffe specific parameters:',
'tf_args': 'TensorFlow specific parameters:',
'mxnet_args': 'MXNet specific parameters:',
'kaldi_args': 'Kaldi specific parameters:',
'onnx_args': 'ONNX specific parameters:',
}
lines = []
for key in props:
lines.append(framework_specifics_map[key])
for (op, desc) in props[key].items():
if isinstance(desc, list):
lines.append('\t{}: \t{}'.format(desc[0], desc[1](getattr(argv, op, 'NONE'))))
else:
if op == 'k':
default_path = os.path.join(os.path.dirname(sys.argv[0]),
'openvino/tools/mo/front/caffe/CustomLayersMapping.xml')
if getattr(argv, op, 'NONE') == default_path:
lines.append('\t{}: \t{}'.format(desc, 'Default'))
continue
lines.append('\t{}: \t{}'.format(desc, getattr(argv, op, 'NONE')))
print('\n'.join(lines), flush=True)
def arguments_post_parsing(argv: argparse.Namespace):
use_legacy_frontend = argv.use_legacy_frontend
use_new_frontend = argv.use_new_frontend
if use_new_frontend and use_legacy_frontend:
raise Error('Options --use_new_frontend and --use_legacy_frontend must not be used simultaneously '
'in the Model Optimizer command-line')
moc_front_end, available_moc_front_ends = get_moc_frontends(argv)
if not moc_front_end and use_new_frontend:
raise Error('Option --use_new_frontend is specified but the Model Optimizer is unable to find new frontend. '
'Please ensure that your environment contains new frontend for the input model format or '
'try to convert the model without specifying --use_new_frontend option.')
is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx = \
deduce_legacy_frontend_by_namespace(argv) if not moc_front_end else [False, False, False, False, False]
is_legacy_frontend = any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx])
if not is_legacy_frontend and use_legacy_frontend:
raise Error('Option --use_legacy_frontend is specified but Model Optimizer does not have legacy frontend '
'for the input model format. Please try to convert the model without specifying --use_legacy_frontend option.')
# handle a default case, i.e. use_new_frontend and use_legacy_frontend are not specified, when no frontend is found
if not is_legacy_frontend and not moc_front_end:
legacy_frameworks = ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx']
frameworks = list(set(legacy_frameworks + available_moc_front_ends))
if not argv.framework:
raise Error('Framework name can not be deduced from the given options: {}={}. '
'Please use --framework with one from the list: {}.',
'--input_model', argv.input_model, frameworks)
elif argv.framework not in frameworks:
raise Error('Framework {} is not a valid target. Please use --framework with one from the list: {}. ' +
refer_to_faq_msg(15), argv.framework, frameworks)
if is_legacy_frontend:
if new_extensions_used(argv):
raise Error('New kind of extensions used on legacy path')
if new_transformations_config_used(argv):
raise Error('New kind of transformations configuration used on legacy path')
if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph:
raise Error('Path to input model or saved model dir is required: use --input_model, --saved_model_dir or '
'--input_meta_graph')
elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name:
raise Error('Path to input model or input symbol or pretrained_model_name is required: use --input_model or '
'--input_symbol or --pretrained_model_name')
elif is_caffe and not argv.input_model and not argv.input_proto:
raise Error('Path to input model or input proto is required: use --input_model or --input_proto')
elif (is_kaldi or is_onnx) and not argv.input_model:
raise Error('Path to input model is required: use --input_model.')
log.debug(str(argv))
log.debug("Model Optimizer started")
log.debug('Output model name would be {}{{.xml, .bin}}'.format(argv.model_name))
# if --input_proto is not provided, try to retrieve another one
# by suffix substitution from model file name
if is_caffe and not argv.input_proto:
argv.input_proto = replace_ext(argv.input_model, '.caffemodel', '.prototxt')
if not argv.input_proto:
raise Error("Cannot find prototxt file: for Caffe please specify --input_proto - a " +
"protobuf file that stores topology and --input_model that stores " +
"pretrained weights. " +
refer_to_faq_msg(20))
log.info('Deduced name for prototxt: {}'.format(argv.input_proto))
if not argv.silent:
print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx, argv.model_name)
# This try-except is additional reinsurance that the IE
# dependency search does not break the MO pipeline
def raise_ie_not_found():
raise Error("Could not find the Inference Engine or nGraph Python API.\n"
"Consider building the Inference Engine and nGraph Python APIs from sources or "
"try to install OpenVINO (TM) Toolkit using \"install_prerequisites.{}\"".format(
"bat" if sys.platform == "windows" else "sh"))
try:
if not find_ie_version(silent=argv.silent):
raise_ie_not_found()
except Exception as e:
log.error(e)
raise_ie_not_found()
if 'data_type' in argv and argv.data_type in ['FP16', 'half']:
argv.data_type = 'FP32'
argv.compress_fp16 = True
else:
argv.compress_fp16 = False
# This is just to check that transform key is valid and transformations are available
check_available_transforms(parse_transform(argv.transform))
# For C++ frontends there are no specific Python installation requirements, check only generic ones
if moc_front_end:
ret_code = check_requirements(silent=argv.silent)
else:
ret_code = check_requirements(framework=argv.framework, silent=argv.silent)
if ret_code:
raise Error('check_requirements exited with return code {}'.format(ret_code))
if is_tf and argv.tensorflow_use_custom_operations_config is not None:
argv.transformations_config = argv.tensorflow_use_custom_operations_config
if is_caffe and argv.mean_file and argv.mean_values:
raise Error('Both --mean_file and mean_values are specified. Specify either mean file or mean values. ' +
refer_to_faq_msg(17))
elif is_caffe and argv.mean_file and argv.mean_file_offsets:
values = get_tuple_values(argv.mean_file_offsets, t=int, num_exp_values=2)
mean_file_offsets = mo_array([int(x) for x in values[0].split(',')])
if not all([offset >= 0 for offset in mean_file_offsets]):
raise Error("Negative value specified for --mean_file_offsets option. "
"Please specify positive integer values in format '(x,y)'. " +
refer_to_faq_msg(18))
argv.mean_file_offsets = mean_file_offsets
if argv.scale and argv.scale_values:
raise Error(
'Both --scale and --scale_values are defined. Specify either scale factor or scale values per input ' +
'channels. ' + refer_to_faq_msg(19))
if argv.scale and argv.scale < 1.0:
log.error("The scale value is less than 1.0. This is most probably an issue because the scale value specifies "
"floating point value which all input values will be *divided*.", extra={'is_warning': True})
if argv.input_model and (is_tf and argv.saved_model_dir):
raise Error('Both --input_model and --saved_model_dir are defined. '
'Specify either input model or saved model directory.')
if is_tf:
if argv.saved_model_tags is not None:
if ' ' in argv.saved_model_tags:
raise Error('Incorrect saved model tag was provided. Specify --saved_model_tags with no spaces in it')
argv.saved_model_tags = argv.saved_model_tags.split(',')
argv.output = argv.output.split(',') if argv.output else None
inputs_list, argv.placeholder_shapes, argv.placeholder_data_types = get_placeholder_shapes(
argv.input, argv.input_shape, argv.batch)
argv.inputs_list = inputs_list
mean_values = parse_tuple_pairs(argv.mean_values)
scale_values = parse_tuple_pairs(argv.scale_values)
mean_scale = get_mean_scale_dictionary(mean_values, scale_values, argv.input)
argv.mean_scale_values = mean_scale
argv.layout_values = get_layout_values(argv.layout, argv.source_layout, argv.target_layout)
if not os.path.exists(argv.output_dir):
try:
os.makedirs(argv.output_dir)
except PermissionError as e:
raise Error("Failed to create directory {}. Permission denied! " +
refer_to_faq_msg(22),
argv.output_dir) from e
else:
if not os.access(argv.output_dir, os.W_OK):
raise Error("Output directory {} is not writable for current user. " +
refer_to_faq_msg(22), argv.output_dir)
log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes))
argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values(argv.input,
argv.freeze_placeholder_with_value)
load_extensions(argv, is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx)
return argv
def check_fallback(argv: argparse.Namespace):
fallback_reasons = {}
# Some frontend such as PDPD does not have legacy path so it has no reasons to fallback
if not any(deduce_legacy_frontend_by_namespace(argv)):
return fallback_reasons
# There is no possibility for fallback if a user strictly wants to use new frontend
if argv.use_new_frontend:
return fallback_reasons
fallback_reasons['extensions'] = legacy_extensions_used
fallback_reasons['transformations_config'] = legacy_transformations_config_used
reasons = [reason for reason, is_applicable in fallback_reasons.items() if is_applicable(argv)]
return reasons
def get_default_frontends():
# Set which frontend to use by default, values should be 'new' or 'legacy'
default_frontends = {
'onnx': 'new',
'tf': 'legacy'
}
return default_frontends
def get_moc_frontends(argv: argparse.Namespace):
fem = argv.feManager
# Read user flags:
use_legacy_frontend = argv.use_legacy_frontend
use_new_frontend = argv.use_new_frontend
if not fem or use_legacy_frontend:
return None, []
available_moc_front_ends = get_available_front_ends(fem)
if not argv.framework and argv.input_model:
moc_front_end = fem.load_by_model(argv.input_model)
if not moc_front_end:
return None, available_moc_front_ends
argv.framework = moc_front_end.get_name()
elif argv.framework in available_moc_front_ends:
moc_front_end = fem.load_by_framework(argv.framework)
else:
return None, []
default_frontends = get_default_frontends()
# Disable MOC frontend if default is set to legacy and no user override
if default_frontends.get(moc_front_end.get_name()) == 'legacy' and not use_new_frontend:
return None, available_moc_front_ends
# This check as a workaround to skip IR frontend
if not moc_front_end.get_name() in available_moc_front_ends:
return None, available_moc_front_ends
return moc_front_end, available_moc_front_ends
def prepare_ir(argv: argparse.Namespace):
argv = arguments_post_parsing(argv)
t = tm.Telemetry()
graph = None
ngraph_function = None
moc_front_end, available_moc_front_ends = get_moc_frontends(argv)
if moc_front_end:
fallback_reasons = check_fallback(argv)
if len(fallback_reasons) == 0:
t.send_event("mo", "conversion_method", moc_front_end.get_name() + "_frontend")
moc_front_end.add_extension(TelemetryExtension("mo", t.send_event, t.send_error, t.send_stack_trace))
moc_front_end.add_extension(ProgressReporterExtension(progress_printer(argv)))
if legacy_transformations_config_used(argv):
raise Error('Legacy extensions are not supported for the new frontend')
if legacy_extensions_used(argv):
raise Error('Legacy transformations configuration is not supported for the new frontend')
if new_transformations_config_used(argv):
moc_front_end.add_extension(JsonConfigExtension(argv.transformations_config))
if new_extensions_used(argv):
for extension in argv.extensions:
moc_front_end.add_extension(extension)
ngraph_function = moc_pipeline(argv, moc_front_end)
return graph, ngraph_function
else: # apply fallback
reasons_message = ", ".join(fallback_reasons)
load_extensions(argv, *list(deduce_legacy_frontend_by_namespace(argv)))
t.send_event("mo", "fallback_reason", reasons_message)
log.warning("The IR preparation was executed by the legacy MO path. "
"This is a fallback scenario applicable only for some specific cases. "
f"The detailed reason why fallback was executed: not supported {reasons_message} were used. "
"You can specify --use_new_frontend flag to force using the Frontend MO path to avoid additional checks. " +
refer_to_faq_msg(105))
t.send_event("mo", "conversion_method", "mo_legacy")
graph = unified_pipeline(argv)
return graph, ngraph_function
def emit_ir(graph: Graph, argv: argparse.Namespace):
# We have to separate fe object lifetime from fem to
# avoid segfault during object destruction. So fe must
# be destructed before fem object explicitly.
def read_model(path_to_xml):
fe = fem.load_by_framework(framework="ir")
function = fe.convert(fe.load(path_to_xml))
return function
NormalizeTI().find_and_replace_pattern(graph)
for_graph_and_each_sub_graph_recursively(graph, RemoveConstOps().find_and_replace_pattern)
for_graph_and_each_sub_graph_recursively(graph, CreateConstNodesReplacement().find_and_replace_pattern)
if 'feManager' in argv:
del argv.feManager
mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None
input_names = deepcopy(graph.graph['input_names']) if 'input_names' in graph.graph else []
prepare_emit_ir(graph=graph,
data_type=graph.graph['cmd_params'].data_type,
output_dir=argv.output_dir,
output_model_name=argv.model_name,
mean_data=mean_data,
input_names=input_names,
meta_info=get_meta_info(argv),
use_temporary_path=True)
# This graph cleanup is required to avoid double memory consumption
graph.clear()
output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
orig_model_name = os.path.normpath(os.path.join(output_dir, argv.model_name))
fem = FrontEndManager()
func = read_model(orig_model_name + "_tmp.xml")
return_code = "not executed"
if not(argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update):
try:
from openvino.tools.mo.back.offline_transformations import apply_offline_transformations
func = apply_offline_transformations(func, argv)
if "compress_fp16" in argv and argv.compress_fp16:
# restore data_type cmd parameter
argv.data_type = 'FP16'
return_code = 0
except Exception as e:
return_code = "failed"
log.error(e)
message = str(dict({
"platform": platform.system(),
"mo_version": get_simplified_mo_version(),
"ie_version": get_simplified_ie_version(env=os.environ),
"python_version": sys.version,
"return_code": return_code
}))
t = tm.Telemetry()
t.send_event('mo', 'offline_transformations_status', message)
if return_code != 0:
raise Error("offline transformations step has failed.")
for suf in [".xml", ".bin", ".mapping"]:
# remove existing files
path_to_file = orig_model_name + "_tmp" + suf
if os.path.exists(path_to_file):
os.remove(path_to_file)
return func
def driver(argv: argparse.Namespace):
init_logger(argv.log_level.upper(), argv.silent)
start_time = datetime.datetime.now()
graph, ngraph_function = prepare_ir(argv)
if graph is not None:
res_ngraph_function = emit_ir(graph, argv)
else:
res_ngraph_function = moc_emit_ir(ngraph_function, argv)
if res_ngraph_function is None:
return res_ngraph_function
if not argv.silent:
elapsed_time = datetime.datetime.now() - start_time
print('[ SUCCESS ] Total execution time: {:.2f} seconds. '.format(elapsed_time.total_seconds()))
try:
import resource
mem_usage = round(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
if sys.platform == 'darwin':
mem_usage = round(mem_usage / 1024)
print('[ SUCCESS ] Memory consumed: {} MB. '.format(mem_usage))
except ImportError:
pass
return res_ngraph_function
def args_dict_to_list(cli_parser, **kwargs):
result = []
for key, value in kwargs.items():
if value is not None and cli_parser.get_default(key) != value:
# skip parser checking for non str objects
if not isinstance(value, str):
continue
result.append('--{}'.format(key))
if not isinstance(value, bool):
result.append(value)
return result
def pack_params_to_args_namespace(**kwargs):
fe_manager = FrontEndManager()
cli_parser = get_all_cli_parser(fe_manager)
argv = cli_parser.parse_args(args_dict_to_list(cli_parser, **kwargs))
for key, value in kwargs.items():
if key not in argv and key not in mo_convert_params.keys():
raise Error("Unrecognized argument: {}".format(key))
if value is not None:
setattr(argv, key, value)
send_params_info(argv, cli_parser)
return argv
def params_to_string(**kwargs):
for key, value in kwargs.items():
if key in mo_convert_params.keys():
param_data = mo_convert_params[key]
if param_data.to_string is not None:
kwargs[key] = param_data.to_string(value)
return kwargs
def show_mo_convert_help():
print('MO convert parameters:')
for param_name in mo_convert_params.keys():
param_data = mo_convert_params[param_name]
print("{}: {}".format(param_name, param_data.description.format(param_data.possible_types_python_api)))
def _convert(**args):
if 'help' in args and args['help']:
show_mo_convert_help()
return None
telemetry = tm.Telemetry(tid=get_tid(), app_name='Model Optimizer', app_version=get_simplified_mo_version())
telemetry.start_session('mo')
telemetry.send_event('mo', 'version', get_simplified_mo_version())
args = params_to_string(**args)
argv = pack_params_to_args_namespace(**args)
if argv.model_name is None:
argv.model_name = get_model_name_from_args(argv)
try:
# Initialize logger with 'ERROR' as default level to be able to form nice messages
# before arg parser deliver log_level requested by user
init_logger('ERROR', False)
argv.feManager = FrontEndManager()
ngraph_function = driver(argv)
telemetry.send_event('mo', 'conversion_result', 'success')
telemetry.end_session('mo')
telemetry.force_shutdown(1.0)
return ngraph_function
except Exception as e:
telemetry.send_event('mo', 'conversion_result', 'fail')
telemetry.end_session('mo')
telemetry.force_shutdown(1.0)
raise e

View File

@@ -2,545 +2,53 @@
# SPDX-License-Identifier: Apache-2.0
import argparse
import datetime
import logging as log
import os
import platform
import sys
import traceback
from collections import OrderedDict
from copy import deepcopy
import logging as log
try:
import openvino_telemetry as tm
except ImportError:
import openvino.tools.mo.utils.telemetry_stub as tm
from openvino.tools.mo.back.SpecialNodesFinalization import RemoveConstOps, CreateConstNodesReplacement, NormalizeTI
from openvino.tools.mo.back.ie_ir_ver_2.emitter import append_ir_info
from openvino.tools.mo.moc_frontend.check_config import legacy_extensions_used, legacy_transformations_config_used, \
new_extensions_used, new_transformations_config_used, input_freezig_used
from openvino.tools.mo.moc_frontend.pipeline import moc_pipeline
from openvino.tools.mo.moc_frontend.serialize import moc_emit_ir
from openvino.tools.mo.graph.graph import Graph
from openvino.tools.mo.middle.pattern_match import for_graph_and_each_sub_graph_recursively
from openvino.tools.mo.pipeline.common import prepare_emit_ir, get_ir_version
from openvino.tools.mo.pipeline.unified import unified_pipeline
from openvino.tools.mo.utils import import_extensions
from openvino.tools.mo.utils.cli_parser import check_available_transforms, \
get_advanced_cli_options, get_available_front_ends, get_caffe_cli_options, \
get_common_cli_options, get_freeze_placeholder_values, get_kaldi_cli_options, get_layout_values, \
get_mean_scale_dictionary, get_meta_info, get_model_name, get_mxnet_cli_options, get_onnx_cli_options, \
get_placeholder_shapes, get_tf_cli_options, get_tuple_values, parse_transform, parse_tuple_pairs
from openvino.tools.mo.convert import convert
from openvino.tools.mo.pipeline.common import get_ir_version
from openvino.tools.mo.utils.cli_parser import get_model_name_from_args, get_meta_info
from openvino.tools.mo.utils.logger import init_logger
from openvino.tools.mo.utils.error import Error, FrameworkError
from openvino.tools.mo.utils.find_ie_version import find_ie_version
import traceback
from openvino.tools.mo.utils.get_ov_update_message import get_ov_update_message, get_ov_api20_message
from openvino.tools.mo.utils.guess_framework import deduce_legacy_frontend_by_namespace
from openvino.tools.mo.utils.logger import init_logger, progress_printer
from openvino.tools.mo.utils.model_analysis import AnalysisResults
from openvino.tools.mo.utils.utils import refer_to_faq_msg
from openvino.tools.mo.utils.telemetry_utils import send_params_info, send_framework_info
from openvino.tools.mo.utils.version import get_simplified_mo_version, get_simplified_ie_version
from openvino.tools.mo.utils.versions_checker import check_requirements # pylint: disable=no-name-in-module
from openvino.tools.mo.utils.telemetry_utils import get_tid
from openvino.tools.mo.front.common.partial_infer.utils import mo_array
from openvino.tools.mo.back.ie_ir_ver_2.emitter import append_ir_info
# pylint: disable=no-name-in-module,import-error
from openvino.frontend import FrontEndManager, ProgressReporterExtension, TelemetryExtension, JsonConfigExtension
from openvino.frontend import FrontEndManager
from openvino.offline_transformations import generate_mapping_file
from openvino.runtime import serialize
def replace_ext(name: str, old: str, new: str):
base, ext = os.path.splitext(name)
log.debug("base: {}, ext: {}".format(base, ext))
if ext == old:
return base + new
def main(cli_parser: argparse.ArgumentParser, framework=None):
argv = cli_parser.parse_args()
argv.model_name = get_model_name_from_args(argv)
argv = vars(argv)
# Initialize logger with 'ERROR' as default level to be able to form nice messages
# before arg parser deliver log_level requested by user
init_logger('ERROR', False)
def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_mxnet: bool, is_kaldi: bool, is_onnx: bool,
model_name: str):
print('Model Optimizer arguments:')
props = OrderedDict()
props['common_args'] = get_common_cli_options(model_name)
props['advanced_args'] = get_advanced_cli_options()
if is_caffe:
props['caffe_args'] = get_caffe_cli_options()
if is_tf:
props['tf_args'] = get_tf_cli_options()
if is_mxnet:
props['mxnet_args'] = get_mxnet_cli_options()
if is_kaldi:
props['kaldi_args'] = get_kaldi_cli_options()
if is_onnx:
props['onnx_args'] = get_onnx_cli_options()
if framework is not None:
argv['framework'] = framework
framework_specifics_map = {
'common_args': 'Common parameters:',
'advanced_args': 'Advanced parameters:',
'caffe_args': 'Caffe specific parameters:',
'tf_args': 'TensorFlow specific parameters:',
'mxnet_args': 'MXNet specific parameters:',
'kaldi_args': 'Kaldi specific parameters:',
'onnx_args': 'ONNX specific parameters:',
}
lines = []
for key in props:
lines.append(framework_specifics_map[key])
for (op, desc) in props[key].items():
if isinstance(desc, list):
lines.append('\t{}: \t{}'.format(desc[0], desc[1](getattr(argv, op, 'NONE'))))
else:
if op == 'k':
default_path = os.path.join(os.path.dirname(sys.argv[0]),
'openvino/tools/mo/front/caffe/CustomLayersMapping.xml')
if getattr(argv, op, 'NONE') == default_path:
lines.append('\t{}: \t{}'.format(desc, 'Default'))
continue
lines.append('\t{}: \t{}'.format(desc, getattr(argv, op, 'NONE')))
print('\n'.join(lines), flush=True)
def get_default_frontends():
# Set which frontend to use by default, values should be 'new' or 'legacy'
default_frontends = {
'onnx': 'new',
'tf': 'legacy'
}
return default_frontends
def get_moc_frontends(argv: argparse.Namespace):
fem = argv.feManager
# Read user flags:
use_legacy_frontend = argv.use_legacy_frontend
use_new_frontend = argv.use_new_frontend
if not fem or use_legacy_frontend:
return None, []
available_moc_front_ends = get_available_front_ends(fem)
if not argv.framework and argv.input_model:
moc_front_end = fem.load_by_model(argv.input_model)
if not moc_front_end:
return None, available_moc_front_ends
argv.framework = moc_front_end.get_name()
elif argv.framework in available_moc_front_ends:
moc_front_end = fem.load_by_framework(argv.framework)
else:
return None, []
default_frontends = get_default_frontends()
# Disable MOC frontend if default is set to legacy and no user override
if default_frontends.get(moc_front_end.get_name()) == 'legacy' and not use_new_frontend:
return None, available_moc_front_ends
# This check as a workaround to skip IR frontend
if not moc_front_end.get_name() in available_moc_front_ends:
return None, available_moc_front_ends
return moc_front_end, available_moc_front_ends
def arguments_post_parsing(argv: argparse.Namespace):
use_legacy_frontend = argv.use_legacy_frontend
use_new_frontend = argv.use_new_frontend
if use_new_frontend and use_legacy_frontend:
raise Error('Options --use_new_frontend and --use_legacy_frontend must not be used simultaneously '
'in the Model Optimizer command-line')
moc_front_end, available_moc_front_ends = get_moc_frontends(argv)
if not moc_front_end and use_new_frontend:
raise Error('Option --use_new_frontend is specified but the Model Optimizer is unable to find new frontend. '
'Please ensure that your environment contains new frontend for the input model format or '
'try to convert the model without specifying --use_new_frontend option.')
is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx = \
deduce_legacy_frontend_by_namespace(argv) if not moc_front_end else [False, False, False, False, False]
is_legacy_frontend = any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx])
if not is_legacy_frontend and use_legacy_frontend:
raise Error('Option --use_legacy_frontend is specified but Model Optimizer does not have legacy frontend '
'for the input model format. Please try to convert the model without specifying --use_legacy_frontend option.')
# handle a default case, i.e. use_new_frontend and use_legacy_frontend are not specified, when no frontend is found
if not is_legacy_frontend and not moc_front_end:
legacy_frameworks = ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx']
frameworks = list(set(legacy_frameworks + available_moc_front_ends))
if not argv.framework:
raise Error('Framework name can not be deduced from the given options: {}={}. '
'Please use --framework with one from the list: {}.',
'--input_model', argv.input_model, frameworks)
elif argv.framework not in frameworks:
raise Error('Framework {} is not a valid target. Please use --framework with one from the list: {}. ' +
refer_to_faq_msg(15), argv.framework, frameworks)
if is_legacy_frontend:
if new_extensions_used(argv):
raise Error('New kind of extensions used on legacy path')
if new_transformations_config_used(argv):
raise Error('New kind of transformations configuration used on legacy path')
if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph:
raise Error('Path to input model or saved model dir is required: use --input_model, --saved_model_dir or '
'--input_meta_graph')
elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name:
raise Error('Path to input model or input symbol or pretrained_model_name is required: use --input_model or '
'--input_symbol or --pretrained_model_name')
elif is_caffe and not argv.input_model and not argv.input_proto:
raise Error('Path to input model or input proto is required: use --input_model or --input_proto')
elif (is_kaldi or is_onnx) and not argv.input_model:
raise Error('Path to input model is required: use --input_model.')
log.debug(str(argv))
log.debug("Model Optimizer started")
model_name = "<UNKNOWN_NAME>"
if argv.model_name:
model_name = argv.model_name
elif argv.input_model:
model_name = get_model_name(argv.input_model)
elif is_tf and argv.saved_model_dir:
model_name = "saved_model"
elif is_tf and argv.input_meta_graph:
model_name = get_model_name(argv.input_meta_graph)
elif is_mxnet and argv.input_symbol:
model_name = get_model_name(argv.input_symbol)
argv.model_name = model_name
log.debug('Output model name would be {}{{.xml, .bin}}'.format(argv.model_name))
# if --input_proto is not provided, try to retrieve another one
# by suffix substitution from model file name
if is_caffe and not argv.input_proto:
argv.input_proto = replace_ext(argv.input_model, '.caffemodel', '.prototxt')
if not argv.input_proto:
raise Error("Cannot find prototxt file: for Caffe please specify --input_proto - a " +
"protobuf file that stores topology and --input_model that stores " +
"pretrained weights. " +
refer_to_faq_msg(20))
log.info('Deduced name for prototxt: {}'.format(argv.input_proto))
if not argv.silent:
print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx, argv.model_name)
# This try-except is additional reinsurance that the IE
# dependency search does not break the MO pipeline
def raise_ie_not_found():
raise Error("Could not find the Inference Engine or nGraph Python API.\n"
"Consider building the Inference Engine and nGraph Python APIs from sources or "
"try to install OpenVINO (TM) Toolkit using \"install_prerequisites.{}\"".format(
"bat" if sys.platform == "windows" else "sh"))
try:
if not find_ie_version(silent=argv.silent):
raise_ie_not_found()
except Exception as e:
log.error(e)
raise_ie_not_found()
if 'data_type' in argv and argv.data_type in ['FP16', 'half']:
argv.data_type = 'FP32'
argv.compress_fp16 = True
else:
argv.compress_fp16 = False
# This is just to check that transform key is valid and transformations are available
check_available_transforms(parse_transform(argv.transform))
# For C++ frontends there are no specific Python installation requirements, check only generic ones
if moc_front_end:
ret_code = check_requirements()
else:
ret_code = check_requirements(framework=argv.framework)
if ret_code:
raise Error('check_requirements exited with return code {}'.format(ret_code))
if is_tf and argv.tensorflow_use_custom_operations_config is not None:
argv.transformations_config = argv.tensorflow_use_custom_operations_config
if is_caffe and argv.mean_file and argv.mean_values:
raise Error('Both --mean_file and mean_values are specified. Specify either mean file or mean values. ' +
refer_to_faq_msg(17))
elif is_caffe and argv.mean_file and argv.mean_file_offsets:
values = get_tuple_values(argv.mean_file_offsets, t=int, num_exp_values=2)
mean_file_offsets = mo_array([int(x) for x in values[0].split(',')])
if not all([offset >= 0 for offset in mean_file_offsets]):
raise Error("Negative value specified for --mean_file_offsets option. "
"Please specify positive integer values in format '(x,y)'. " +
refer_to_faq_msg(18))
argv.mean_file_offsets = mean_file_offsets
if argv.scale and argv.scale_values:
raise Error(
'Both --scale and --scale_values are defined. Specify either scale factor or scale values per input ' +
'channels. ' + refer_to_faq_msg(19))
if argv.scale and argv.scale < 1.0:
log.error("The scale value is less than 1.0. This is most probably an issue because the scale value specifies "
"floating point value which all input values will be *divided*.", extra={'is_warning': True})
if argv.input_model and (is_tf and argv.saved_model_dir):
raise Error('Both --input_model and --saved_model_dir are defined. '
'Specify either input model or saved model directory.')
if is_tf:
if argv.saved_model_tags is not None:
if ' ' in argv.saved_model_tags:
raise Error('Incorrect saved model tag was provided. Specify --saved_model_tags with no spaces in it')
argv.saved_model_tags = argv.saved_model_tags.split(',')
argv.output = argv.output.split(',') if argv.output else None
inputs_list, argv.placeholder_shapes, argv.placeholder_data_types = get_placeholder_shapes(
argv.input, argv.input_shape, argv.batch)
argv.inputs_list = inputs_list
mean_values = parse_tuple_pairs(argv.mean_values)
scale_values = parse_tuple_pairs(argv.scale_values)
mean_scale = get_mean_scale_dictionary(mean_values, scale_values, argv.input)
argv.mean_scale_values = mean_scale
argv.layout_values = get_layout_values(argv.layout, argv.source_layout, argv.target_layout)
if not os.path.exists(argv.output_dir):
try:
os.makedirs(argv.output_dir)
except PermissionError as e:
raise Error("Failed to create directory {}. Permission denied! " +
refer_to_faq_msg(22),
argv.output_dir) from e
else:
if not os.access(argv.output_dir, os.W_OK):
raise Error("Output directory {} is not writable for current user. " +
refer_to_faq_msg(22), argv.output_dir)
log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes))
argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values(argv.input,
argv.freeze_placeholder_with_value)
load_extensions(argv, is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx)
return argv
def load_extensions(argv: argparse.Namespace, is_tf: bool, is_caffe: bool, is_mxnet: bool, is_kaldi: bool,
is_onnx: bool):
extensions = None
if hasattr(argv, 'extensions') and argv.extensions and argv.extensions != '':
extensions = argv.extensions.split(',')
if is_tf:
from openvino.tools.mo.front.tf.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_caffe:
send_framework_info('caffe')
from openvino.tools.mo.front.caffe.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_mxnet:
send_framework_info('mxnet')
from openvino.tools.mo.front.mxnet.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_kaldi:
send_framework_info('kaldi')
from openvino.tools.mo.front.kaldi.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
elif is_onnx:
send_framework_info('onnx')
from openvino.tools.mo.front.onnx.register_custom_ops import get_front_classes
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
def check_fallback(argv: argparse.Namespace):
fallback_reasons = {}
# Some frontend such as PDPD does not have legacy path so it has no reasons to fallback
if not any(deduce_legacy_frontend_by_namespace(argv)):
return fallback_reasons
# There is no possibility for fallback if a user strictly wants to use new frontend
if argv.use_new_frontend:
return fallback_reasons
fallback_reasons['extensions'] = legacy_extensions_used
fallback_reasons['transformations_config'] = legacy_transformations_config_used
reasons = [reason for reason, is_applicable in fallback_reasons.items() if is_applicable(argv)]
return reasons
def prepare_ir(argv: argparse.Namespace):
argv = arguments_post_parsing(argv)
t = tm.Telemetry()
graph = None
ngraph_function = None
moc_front_end, available_moc_front_ends = get_moc_frontends(argv)
if moc_front_end:
fallback_reasons = check_fallback(argv)
if len(fallback_reasons) == 0:
t.send_event("mo", "conversion_method", moc_front_end.get_name() + "_frontend")
moc_front_end.add_extension(TelemetryExtension("mo", t.send_event, t.send_error, t.send_stack_trace))
moc_front_end.add_extension(ProgressReporterExtension(progress_printer(argv)))
if legacy_transformations_config_used(argv):
raise Error('Legacy extensions are not supported for the new frontend')
if legacy_extensions_used(argv):
raise Error('Legacy transformations configuration is not supported for the new frontend')
if new_transformations_config_used(argv):
moc_front_end.add_extension(JsonConfigExtension(argv.transformations_config))
if new_extensions_used(argv):
for extension in argv.extensions.split(','):
moc_front_end.add_extension(extension)
ngraph_function = moc_pipeline(argv, moc_front_end)
return graph, ngraph_function
else: # apply fallback
reasons_message = ", ".join(fallback_reasons)
load_extensions(argv, *list(deduce_legacy_frontend_by_namespace(argv)))
t.send_event("mo", "fallback_reason", reasons_message)
log.warning("The IR preparation was executed by the legacy MO path. "
"This is a fallback scenario applicable only for some specific cases. "
f"The detailed reason why fallback was executed: not supported {reasons_message} were used. "
"You can specify --use_new_frontend flag to force using the Frontend MO path to avoid additional checks. " +
refer_to_faq_msg(105))
t.send_event("mo", "conversion_method", "mo_legacy")
graph = unified_pipeline(argv)
return graph, ngraph_function
def emit_ir(graph: Graph, argv: argparse.Namespace):
NormalizeTI().find_and_replace_pattern(graph)
for_graph_and_each_sub_graph_recursively(graph, RemoveConstOps().find_and_replace_pattern)
for_graph_and_each_sub_graph_recursively(graph, CreateConstNodesReplacement().find_and_replace_pattern)
if 'feManager' in argv:
del argv.feManager
mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None
input_names = deepcopy(graph.graph['input_names']) if 'input_names' in graph.graph else []
prepare_emit_ir(graph=graph,
data_type=graph.graph['cmd_params'].data_type,
output_dir=argv.output_dir,
output_model_name=argv.model_name,
mean_data=mean_data,
input_names=input_names,
meta_info=get_meta_info(argv),
use_temporary_path=True)
# This graph cleanup is required to avoid double memory consumption
graph.clear()
if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update):
output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
orig_model_name = os.path.normpath(os.path.join(output_dir, argv.model_name))
return_code = "not executed"
try:
from openvino.tools.mo.back.offline_transformations import apply_offline_transformations
apply_offline_transformations(orig_model_name, argv)
if "compress_fp16" in argv and argv.compress_fp16:
# restore data_type cmd parameter
argv.data_type = 'FP16'
return_code = 0
except Exception as e:
return_code = "failed"
log.error(e)
message = str(dict({
"platform": platform.system(),
"mo_version": get_simplified_mo_version(),
"ie_version": get_simplified_ie_version(env=os.environ),
"python_version": sys.version,
"return_code": return_code
}))
t = tm.Telemetry()
t.send_event('mo', 'offline_transformations_status', message)
if return_code != 0:
raise Error("offline transformations step has failed.")
for suf in [".xml", ".bin", ".mapping"]:
# remove existing files
path_to_file = orig_model_name + "_tmp" + suf
if os.path.exists(path_to_file):
os.remove(path_to_file)
# add meta information to IR
append_ir_info(file=orig_model_name,
meta_info=get_meta_info(argv),
mean_data=mean_data,
input_names=input_names,
legacy_path=True)
print('[ SUCCESS ] Generated IR version {} model.'.format(get_ir_version(argv)))
print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name))
print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name))
return 0
def driver(argv: argparse.Namespace):
init_logger(argv.log_level.upper(), argv.silent)
start_time = datetime.datetime.now()
graph, ngraph_function = prepare_ir(argv)
if graph is not None:
ret_res = emit_ir(graph, argv)
else:
ret_res = moc_emit_ir(ngraph_function, argv)
if ret_res != 0:
return ret_res
elapsed_time = datetime.datetime.now() - start_time
print('[ SUCCESS ] Total execution time: {:.2f} seconds. '.format(elapsed_time.total_seconds()))
try:
import resource
mem_usage = round(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
if sys.platform == 'darwin':
mem_usage = round(mem_usage / 1024)
print('[ SUCCESS ] Memory consumed: {} MB. '.format(mem_usage))
except ImportError:
pass
return ret_res
def main(cli_parser: argparse.ArgumentParser, fem: FrontEndManager, framework: str):
telemetry = tm.Telemetry(tid=get_tid(), app_name='Model Optimizer', app_version=get_simplified_mo_version())
telemetry.start_session('mo')
telemetry.send_event('mo', 'version', get_simplified_mo_version())
try:
# Initialize logger with 'ERROR' as default level to be able to form nice messages
# before arg parser deliver log_level requested by user
init_logger('ERROR', False)
argv = cli_parser.parse_args()
send_params_info(argv, cli_parser)
if framework:
argv.framework = framework
argv.feManager = fem
ov_update_message = None
ov_api20_message = None
if not hasattr(argv, 'silent') or not argv.silent:
ov_update_message = get_ov_update_message()
ov_api20_message = get_ov_api20_message()
ret_code = driver(argv)
if ov_update_message:
ngraph_function = convert(**argv)
ov_update_message = get_ov_update_message()
ov_api20_message = get_ov_api20_message()
if ov_update_message is not None:
print(ov_update_message)
if ov_api20_message and ret_code == 0:
if ov_api20_message is not None and ngraph_function is not None:
print(ov_api20_message)
telemetry.send_event('mo', 'conversion_result', 'success')
telemetry.end_session('mo')
telemetry.force_shutdown(1.0)
return ret_code
except (FileNotFoundError, NotADirectoryError) as e:
log.error('File {} was not found'.format(str(e).split('No such file or directory:')[1]))
log.debug(traceback.format_exc())
@@ -564,14 +72,29 @@ def main(cli_parser: argparse.ArgumentParser, fem: FrontEndManager, framework: s
log.error("---------------- END OF BUG REPORT --------------")
log.error("-------------------------------------------------")
telemetry.send_event('mo', 'conversion_result', 'fail')
telemetry.end_session('mo')
telemetry.force_shutdown(1.0)
return 1
if ngraph_function is None:
return 1
output_dir = argv['output_dir'] if argv['output_dir'] != '.' else os.getcwd()
model_path_no_ext = os.path.normpath(os.path.join(output_dir, argv['model_name']))
model_path = model_path_no_ext + '.xml'
serialize(ngraph_function, model_path.encode('utf-8'), model_path.replace('.xml', '.bin').encode('utf-8'))
# add meta information to IR
append_ir_info(file=model_path_no_ext, meta_info=get_meta_info(argv))
# generate .mapping file
path_to_mapping = model_path_no_ext + ".mapping"
extract_names = argv['framework'] in ['tf', 'mxnet', 'kaldi']
generate_mapping_file(ngraph_function, path_to_mapping, extract_names)
print('[ SUCCESS ] Generated IR version {} model.'.format(get_ir_version(argv)))
print('[ SUCCESS ] XML file: {}'.format(model_path))
print('[ SUCCESS ] BIN file: {}'.format(model_path.replace('.xml', '.bin')))
return 0
if __name__ == "__main__":
from openvino.tools.mo.utils.cli_parser import get_all_cli_parser
fe_manager = FrontEndManager()
sys.exit(main(get_all_cli_parser(fe_manager), fe_manager, None))
sys.exit(main(get_all_cli_parser(FrontEndManager()), None))

View File

@@ -7,4 +7,4 @@ from openvino.tools.mo.utils.cli_parser import get_caffe_cli_parser
if __name__ == "__main__":
from openvino.tools.mo.main import main
sys.exit(main(get_caffe_cli_parser(), None, 'caffe'))
sys.exit(main(get_caffe_cli_parser(), 'caffe'))

View File

@@ -7,4 +7,4 @@ from openvino.tools.mo.utils.cli_parser import get_kaldi_cli_parser
if __name__ == "__main__":
from openvino.tools.mo.main import main
sys.exit(main(get_kaldi_cli_parser(), None, 'kaldi'))
sys.exit(main(get_kaldi_cli_parser(), 'kaldi'))

View File

@@ -7,4 +7,4 @@ from openvino.tools.mo.utils.cli_parser import get_mxnet_cli_parser
if __name__ == "__main__":
from openvino.tools.mo.main import main
sys.exit(main(get_mxnet_cli_parser(), None, 'mxnet'))
sys.exit(main(get_mxnet_cli_parser(), 'mxnet'))

View File

@@ -7,6 +7,5 @@ from openvino.tools.mo.utils.cli_parser import get_onnx_cli_parser
if __name__ == "__main__":
from openvino.tools.mo.main import main
from openvino.frontend import FrontEndManager # pylint: disable=no-name-in-module,import-error
sys.exit(main(get_onnx_cli_parser(), FrontEndManager(), 'onnx'))
sys.exit(main(get_onnx_cli_parser(), 'onnx'))

View File

@@ -10,5 +10,4 @@ from openvino.frontend import FrontEndManager # pylint: disable=no-name-in-modu
if __name__ == "__main__":
from openvino.tools.mo.main import main
fem = FrontEndManager()
sys.exit(main(get_all_cli_parser(fem), fem, 'paddle'))
sys.exit(main(get_all_cli_parser(FrontEndManager()), 'paddle'))

View File

@@ -7,4 +7,4 @@ from openvino.tools.mo.utils.cli_parser import get_tf_cli_parser
if __name__ == "__main__":
from openvino.tools.mo.main import main
sys.exit(main(get_tf_cli_parser(), None, 'tf'))
sys.exit(main(get_tf_cli_parser(), 'tf'))

View File

@@ -27,15 +27,4 @@ class PartialInfer(MiddleReplacementPattern):
if not is_fully_defined(param_shape):
parameter_name = parameter.soft_get('name', parameter.id)
dynamic_inputs[parameter_name] = param_shape
if dynamic_inputs:
log.error('The model contains input(s) with partially defined shapes: {}. '
'Starting from the 2022.1 release the Model Optimizer can generate an IR with partially defined '
'input shapes ("-1" dimension in the TensorFlow model or dimension with string value in the ONNX '
'model). Some of the OpenVINO plugins require model input shapes to be static, so you should '
'call "reshape" method in the Inference Engine and specify static input shapes. For optimal '
'performance, it is still recommended to update input shapes with fixed ones using "--input" or '
'"--input_shape" command-line parameters.'
.format(','.join(f'name="{name}" shape="{unmask_shape(param_shape)}"'
for name, param_shape in dynamic_inputs.items())),
extra={'is_warning': True})
partial_infer(graph)

View File

@@ -10,17 +10,38 @@ from openvino.tools.mo.utils.error import Error
def any_extensions_used(argv: argparse.Namespace):
return hasattr(argv, 'extensions') and argv.extensions is not None and len(argv.extensions) > 0 \
and argv.extensions != import_extensions.default_path() # extensions arg has default value
# Checks that extensions are provided.
# Allowed types are string containing path to legacy extension directory
# or path to new extension .so file, or classes inherited from BaseExtension.
if not hasattr(argv, 'extensions') or argv.extensions is None:
return False
if isinstance(argv.extensions, list) and len(argv.extensions) > 0:
has_non_default_path = False
has_non_str_objects = False
for ext in argv.extensions:
if not isinstance(ext, str):
has_non_str_objects = True
continue
if len(ext) == 0 or ext == import_extensions.default_path():
continue
has_non_default_path = True
return has_non_default_path or has_non_str_objects
raise Exception("Expected list of extensions, got {}.".format(type(argv.extensions)))
def legacy_extensions_used(argv: argparse.Namespace):
if any_extensions_used(argv):
extensions = argv.extensions.split(',')
extensions = argv.extensions
legacy_ext_counter = 0
for extension in extensions:
path = Path(extension)
if not path.is_file():
if not isinstance(extension, str):
continue
if extension == import_extensions.default_path():
continue
if not Path(extension).is_file():
legacy_ext_counter += 1
if legacy_ext_counter == len(extensions):
return True # provided only legacy extensions
@@ -33,11 +54,16 @@ def legacy_extensions_used(argv: argparse.Namespace):
def new_extensions_used(argv: argparse.Namespace):
if any_extensions_used(argv):
extensions = argv.extensions.split(',')
extensions = argv.extensions
if not isinstance(extensions, list):
extensions = [extensions]
new_ext_counter = 0
for extension in argv.extensions.split(','):
path = Path(extension)
if path.is_file() and (path.suffix == '.so' or path.suffix == '.dll'):
for extension in extensions:
if isinstance(extension, str):
path = Path(extension)
if path.is_file() and (path.suffix == '.so' or path.suffix == '.dll'):
new_ext_counter += 1
else:
new_ext_counter += 1
if new_ext_counter == len(extensions):
return True # provided only new extensions
@@ -71,9 +97,10 @@ def is_new_json_config(json_file_path: str):
def get_transformations_config_path(argv: argparse.Namespace) -> Path:
if hasattr(argv, 'transformations_config') \
and argv.transformations_config is not None and len(argv.transformations_config):
path = Path(argv.transformations_config)
if path.is_file():
return path
if isinstance(argv.transformations_config, str):
path = Path(argv.transformations_config)
if path.is_file():
return path
return None
@@ -81,6 +108,11 @@ def new_transformations_config_used(argv: argparse.Namespace):
path = get_transformations_config_path(argv)
if path != None:
return is_new_json_config(path)
if hasattr(argv, 'transformations_config') \
and argv.transformations_config is not None and not isinstance(argv.transformations_config, str):
return True
return False

View File

@@ -38,26 +38,5 @@ def moc_emit_ir(ngraph_function: Model, argv: argparse.Namespace):
from openvino.tools.mo.back.offline_transformations import compress_model
compress_model(ngraph_function)
orig_model_name = os.path.normpath(os.path.join(output_dir, argv.model_name))
from openvino.runtime import serialize # pylint: disable=import-error,no-name-in-module
from openvino.offline_transformations import generate_mapping_file # pylint: disable=import-error,no-name-in-module
serialize(ngraph_function, (orig_model_name + ".xml").encode('utf-8'), (orig_model_name + ".bin").encode('utf-8'))
del argv.feManager
path_to_mapping = orig_model_name + ".mapping"
extract_names = argv.framework in ['tf', 'mxnet', 'kaldi']
generate_mapping_file(ngraph_function, path_to_mapping.encode('utf-8'), extract_names)
# add meta information to IR
append_ir_info(file=orig_model_name,
meta_info=get_meta_info(argv),
mean_data=None,
input_names=None,
legacy_path=False)
print('[ SUCCESS ] Generated IR version {} model.'.format(get_ir_version(argv)))
print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name))
print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name))
return 0
return ngraph_function

View File

@@ -7,7 +7,6 @@ import os
from operator import itemgetter
import networkx as nx
from openvino.tools.mo.back.RemoveUselessConvert import RemoveUselessConvert
from openvino.tools.mo.back.ResultRename import ResultRename
from openvino.tools.mo.back.ie_ir_ver_2.emitter import port_renumber, serialize_constants, generate_ie_ir, \
@@ -16,11 +15,9 @@ from openvino.tools.mo.back.op_versioning import OpVersioning
from openvino.tools.mo.graph.graph import Node, Graph
from openvino.tools.mo.middle.passes import tensor_names, convert_data_type
from openvino.tools.mo.middle.passes.convert_data_type import data_type_str_to_np
from openvino.tools.mo.middle.passes.eliminate import shape_inference
from openvino.tools.mo.middle.passes.infer import type_infer
from openvino.tools.mo.middle.pattern_match import for_graph_and_each_sub_graph_recursively
from openvino.tools.mo.ops.Cast import Cast
from openvino.tools.mo.ops.op import Op
from openvino.tools.mo.utils.error import Error

File diff suppressed because it is too large Load Diff

View File

@@ -231,7 +231,7 @@ def get_environment_setup(framework):
return env_setup
def check_requirements(framework=None):
def check_requirements(framework=None, silent=True):
"""
Please do not add parameter type annotations (param:type).
Because we import this file while checking Python version.
@@ -241,6 +241,7 @@ def check_requirements(framework=None):
Logs a warning in case of permissible dissatisfaction
Logs an error in cases of critical dissatisfaction
:param framework: framework name
:param silent: determines if it is required to print warning messages
:return: exit code (0 - execution successful, 1 - error)
"""
framework_suffix = "_{}".format(framework)
@@ -249,9 +250,10 @@ def check_requirements(framework=None):
framework_suffix = ""
elif framework == "tf":
if "tensorflow" in env_setup and env_setup["tensorflow"] < LooseVersion("2.0.0"):
log.error('\t\nSupport of the Model Optimizer tool in TensorFlow 1.x environment is deprecated.'
'It is highly recommended to use TensorFlow 2.x.\n',
extra={'is_warning': True})
if not silent:
log.error('\t\nSupport of the Model Optimizer tool in TensorFlow 1.x environment is deprecated.'
'It is highly recommended to use TensorFlow 2.x.\n',
extra={'is_warning': True})
file_name = "requirements{}.txt".format(framework_suffix)
@@ -282,9 +284,10 @@ def check_requirements(framework=None):
not_satisfied_versions.append((name, 'not installed', ''))
continue
except Exception as e:
log.error('Error happened while importing {} module. It may happen due to unsatisfied requirements of '
'that module. Please run requirements installation script once more.\n'
'Details on module importing failure: {}'.format(name, e))
if not silent:
log.error('Error happened while importing {} module. It may happen due to unsatisfied requirements of '
'that module. Please run requirements installation script once more.\n'
'Details on module importing failure: {}'.format(name, e))
not_satisfied_versions.append((name, 'package error', 'required: {} {}'.format(key, required_version)))
continue
@@ -304,7 +307,9 @@ def check_requirements(framework=None):
for module in not_satisfied_versions:
missed_modules_message += "\t{}: {}, {}\n".format(module[0], module[1], module[2])
if exit_code:
log.error(message.format(missed_modules_message, helper_command))
if not silent:
log.error(message.format(missed_modules_message, helper_command))
else:
log.error(message.format(missed_modules_message, helper_command), extra={'is_warning': True})
if not silent:
log.error(message.format(missed_modules_message, helper_command), extra={'is_warning': True})
return exit_code

View File

@@ -37,6 +37,8 @@ for item in os.listdir(prefix):
if re.match(r'mo(.*)\.py|main(.*)\.py', item):
py_modules.append(prefix.replace('/', '.') + item.split('.')[0])
py_modules.append(prefix.replace('/', '.') + 'subprocess_main')
py_modules.append(prefix.replace('/', '.') + 'convert')
py_modules.append(prefix.replace('/', '.') + 'convert_impl')
py_modules.append(prefix.replace('/', '.') + '__main__')
# Minimal set of dependencies

View File

@@ -0,0 +1,108 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import tempfile
from generator import generator, generate
from openvino.runtime import serialize
from openvino.tools.mo import InputCutInfo, LayoutMap
from openvino.tools.mo.utils.ir_engine.ir_engine import IREngine
from unit_tests.mo.unit_test_with_mocked_telemetry import UnitTestWithMockedTelemetry
from unit_tests.utils.graph import build_graph
from utils import create_onnx_model, save_to_onnx
@generator
class ConvertImportMOTest(UnitTestWithMockedTelemetry):
# Checks convert import from openvino.tools.mo
test_directory = os.path.dirname(os.path.realpath(__file__))
@generate(*[
({}),
({'input': InputCutInfo(name='LeakyRelu_out', shape=None, type=None, value=None)}),
({'layout': {'input': LayoutMap(source_layout='NCHW', target_layout='NHWC')}}),
])
def test_import(self, params):
from openvino.tools.mo import convert
with tempfile.TemporaryDirectory(dir=self.test_directory) as tmpdir:
model = create_onnx_model()
model_path = save_to_onnx(model, tmpdir)
out_xml = os.path.join(tmpdir, "model.xml")
ov_model = convert(input_model=model_path, **params)
serialize(ov_model, out_xml.encode('utf-8'), out_xml.replace('.xml', '.bin').encode('utf-8'))
assert os.path.exists(out_xml)
def test_unnamed_input_model(self):
def create_onnx_model():
#
# Create ONNX model
#
import onnx
from onnx import helper
from onnx import TensorProto
shape = [1, 2, 3]
input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape)
output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape)
node_def = onnx.helper.make_node(
'Relu',
inputs=['input'],
outputs=['Relu_out'],
)
node_def2 = onnx.helper.make_node(
'Sigmoid',
inputs=['Relu_out'],
outputs=['output'],
)
# Create the graph (GraphProto)
graph_def = helper.make_graph(
[node_def, node_def2],
'test_model',
[input],
[output],
)
# Create the model (ModelProto)
onnx_net = helper.make_model(graph_def, producer_name='test_model')
return onnx_net
nodes_attributes = {
'input': {'kind': 'op', 'type': 'Parameter'},
'input_data': {'shape': [1, 2, 3], 'kind': 'data'},
'relu': {'kind': 'op', 'type': 'ReLU'},
'relu_data': {'shape': [1, 2, 3], 'kind': 'data'},
'sigmoid': {'kind': 'op', 'type': 'Sigmoid'},
'sigmoid_data': {'shape': [1, 2, 3], 'kind': 'data'},
'result': {'kind': 'op', 'type': 'Result'}
}
ref_graph = build_graph(nodes_attributes,
[('input', 'input_data'),
('input_data', 'relu'),
('relu', 'relu_data'),
('relu_data', 'sigmoid'),
('sigmoid', 'sigmoid_data'),
('sigmoid_data', 'result'),
])
from openvino.tools.mo import convert
with tempfile.TemporaryDirectory(dir=self.test_directory) as tmpdir:
model = create_onnx_model()
model_path = save_to_onnx(model, tmpdir)
out_xml = os.path.join(tmpdir, "model.xml")
ov_model = convert(model_path)
serialize(ov_model, out_xml.encode('utf-8'), out_xml.replace('.xml', '.bin').encode('utf-8'))
ir = IREngine(out_xml, out_xml.replace('.xml', '.bin'))
flag, resp = ir.compare(ref_graph)
assert flag, '\n'.join(resp)

View File

@@ -0,0 +1,52 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
def create_onnx_model():
#
# Create ONNX model
#
import onnx
from onnx import helper
from onnx import TensorProto
shape = [1, 3, 2, 2]
input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape)
output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape)
node_def = onnx.helper.make_node(
'LeakyRelu',
inputs=['input'],
outputs=['LeakyRelu_out'],
alpha=0.1
)
node_def2 = onnx.helper.make_node(
'Elu',
inputs=['LeakyRelu_out'],
outputs=['output'],
alpha=0.1
)
# Create the graph (GraphProto)
graph_def = helper.make_graph(
[node_def, node_def2],
'test_model',
[input],
[output],
)
# Create the model (ModelProto)
onnx_net = helper.make_model(graph_def, producer_name='test_model')
return onnx_net
def save_to_onnx(onnx_model, path_to_saved_onnx_model):
import onnx
path = os.path.join(path_to_saved_onnx_model, 'model.onnx')
onnx.save(onnx_model, path)
assert os.path.isfile(path), "model.onnx haven't been saved here: {}".format(path_to_saved_onnx_model)
return path

View File

@@ -12,7 +12,7 @@ import json
import argparse
from pathlib import Path
from itertools import chain
from openvino.tools.mo.main import prepare_ir
from openvino.tools.mo.convert_impl import prepare_ir
from openvino.frontend import (
FrontEndManager,
) # pylint: disable=no-name-in-module,import-error
@@ -127,7 +127,7 @@ class TestMoFallback(unittest.TestCase):
def test_conersion_if_extensions_is_used(self):
args = base_args_config()
args.input_model = "test_model.onnx"
args.extensions = get_builtin_extensions_path()
args.extensions = [get_builtin_extensions_path()]
graph, model = prepare_ir(args)

View File

@@ -22,9 +22,9 @@ ngraph_needed = pytest.mark.skipif(not ngraph_available,
class TestMainErrors(unittest.TestCase):
@patch('argparse.ArgumentParser.parse_args', return_value=argparse.Namespace())
@patch('openvino.tools.mo.main.driver', side_effect=FrameworkError('FW ERROR MESSAGE'))
@patch('openvino.tools.mo.convert_impl.driver', side_effect=FrameworkError('FW ERROR MESSAGE'))
@ngraph_needed
def test_FrameworkError(self, mock_argparse, mock_driver):
with self.assertLogs() as logger:
main(argparse.ArgumentParser(), None, 'framework_string')
main(argparse.ArgumentParser())
self.assertEqual(logger.output, ['ERROR:root:FW ERROR MESSAGE'])

View File

@@ -14,3 +14,6 @@ class UnitTestWithMockedTelemetry(unittest.TestCase):
def setUp(self):
tm.Telemetry.__init__ = Mock(return_value=None)
tm.Telemetry.send_event = Mock()
tm.Telemetry.start_session = Mock()
tm.Telemetry.end_session = Mock()
tm.Telemetry.force_shutdown = Mock()

View File

@@ -0,0 +1,227 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
from openvino.runtime import Layout, PartialShape, Dimension, Shape, Type
from openvino.tools.mo import InputCutInfo, LayoutMap
from openvino.tools.mo.utils.cli_parser import input_to_str, mean_scale_value_to_str, \
transform_param_to_str, input_shape_to_str, str_list_to_str, source_target_layout_to_str, layout_param_to_str
from unit_tests.mo.unit_test_with_mocked_telemetry import UnitTestWithMockedTelemetry
class TestConvertingConvertArgumentsToString(UnitTestWithMockedTelemetry):
def test_input_to_str(self):
inp1 = InputCutInfo(name="data:0", shape=None, type=None, value=None)
self.assertTrue(input_to_str(inp1) == "data:0")
inp2 = InputCutInfo("data:0", [1, 3, 100, 100], type=None, value=None)
self.assertTrue(input_to_str(inp2) == "data:0[1 3 100 100]")
inp3 = InputCutInfo("data:0", type=np.int32, value=None, shape=None)
self.assertTrue(input_to_str(inp3) == "data:0{i32}")
inp4 = InputCutInfo("data:0", value=[2, 4, 5], type=None, shape=None)
self.assertTrue(input_to_str(inp4) == "data:0->[2 4 5]")
inp5 = InputCutInfo("data:0", [1, 3, 100, 100], np.uint8, value=None)
self.assertTrue(input_to_str(inp5) == "data:0[1 3 100 100]{u8}")
inp6 = InputCutInfo("data:0", [2, 5, 7], value=[1, 2, 3, 4, 5], type=None)
self.assertTrue(input_to_str(inp6) == "data:0[2 5 7]->[1 2 3 4 5]")
inp7 = InputCutInfo("0:data1", type=np.float64, value=[1.6, 7.2, 5.66], shape=None)
self.assertTrue(input_to_str(inp7) == "0:data1{f64}->[1.6 7.2 5.66]")
inp8 = InputCutInfo("data2", [4, 5, 6], np.int64, [5, 4, 3, 2, 1])
self.assertTrue(input_to_str(inp8) == "data2[4 5 6]{i64}->[5 4 3 2 1]")
inp9 = InputCutInfo("data", [1], np.bool, True)
self.assertTrue(input_to_str(inp9) == "data[1]{boolean}->True")
inp = [inp6, inp7, inp8]
self.assertTrue(input_to_str(inp) == "data:0[2 5 7]->[1 2 3 4 5],"
"0:data1{f64}->[1.6 7.2 5.66],"
"data2[4 5 6]{i64}->[5 4 3 2 1]")
inp = ["data:0[2 5 7]->[1 2 3 4 5]", "0:data1{f64}->[1.6 7.2 5.66]", "data2[4 5 6]{i64}->[5 4 3 2 1]"]
self.assertTrue(input_to_str(inp) == "data:0[2 5 7]->[1 2 3 4 5],"
"0:data1{f64}->[1.6 7.2 5.66],"
"data2[4 5 6]{i64}->[5 4 3 2 1]")
inp9 = InputCutInfo("data1", PartialShape([Dimension(-1), Dimension(2, -1),
Dimension(-1, 10), 100, Dimension(2, 12)]), type=None, value=None)
self.assertTrue(input_to_str(inp9) == "data1[? 2.. ..10 100 2..12]")
inp10 = InputCutInfo("data2", [Dimension(-1), Dimension(2, -1),
Dimension(-1, 10), 100, Dimension(2, 12)], np.uint8, value=None)
self.assertTrue(input_to_str(inp10) == "data2[? 2.. ..10 100 2..12]{u8}")
inp11 = InputCutInfo("data3", Shape([4, 5, 6]), np.int64, [5, 4, 3, 2, 1])
self.assertTrue(input_to_str(inp11) == "data3[4 5 6]{i64}->[5 4 3 2 1]")
inp12 = InputCutInfo("data4", PartialShape.dynamic(), type=None, value=None)
self.assertTrue(input_to_str(inp12) == "data4[...]")
inp = [inp9, inp10, inp11, inp12]
self.assertTrue(input_to_str(inp) == "data1[? 2.. ..10 100 2..12],"
"data2[? 2.. ..10 100 2..12]{u8},"
"data3[4 5 6]{i64}->[5 4 3 2 1],"
"data4[...]")
inp1 = ("data:0")
self.assertTrue(input_to_str(inp1) == "data:0")
inp2 = ([1, 3, 100, 100], "data:0")
self.assertTrue(input_to_str(inp2) == "data:0[1 3 100 100]")
inp3 = ("data:0", np.int32)
self.assertTrue(input_to_str(inp3) == "data:0{i32}")
inp4 = (np.uint8, [1, 3, 100, 100], "data:0")
self.assertTrue(input_to_str(inp4) == "data:0[1 3 100 100]{u8}")
inp = [inp1, inp2, inp3, inp4]
self.assertTrue(input_to_str(inp) == "data:0,"
"data:0[1 3 100 100],"
"data:0{i32},"
"data:0[1 3 100 100]{u8}")
inp5 = ("data1", PartialShape([Dimension(-1), Dimension(2, -1), Dimension(-1, 10), 100, Dimension(2, 12)]))
self.assertTrue(input_to_str(inp5) == "data1[? 2.. ..10 100 2..12]")
inp6 = ("data2", [Dimension(-1), Dimension(2, -1), Dimension(-1, 10), 100, Dimension(2, 12)], np.uint8)
self.assertTrue(input_to_str(inp6) == "data2[? 2.. ..10 100 2..12]{u8}")
inp7 = ("data3", Shape([4, 5, 6]), np.int64)
self.assertTrue(input_to_str(inp7) == "data3[4 5 6]{i64}")
inp8 = ("data4", PartialShape.dynamic())
self.assertTrue(input_to_str(inp8) == "data4[...]")
inp = [inp5, inp6, inp7, inp8]
self.assertTrue(input_to_str(inp) == "data1[? 2.. ..10 100 2..12],"
"data2[? 2.. ..10 100 2..12]{u8},"
"data3[4 5 6]{i64},"
"data4[...]")
self.assertRaises(Exception, input_to_str, **{"input": InputCutInfo(0.5, [1, 2, 3], None, None)})
self.assertRaises(Exception, input_to_str, **{"input": InputCutInfo("name", 0.5, None, None)})
self.assertRaises(Exception, input_to_str, **{"input": InputCutInfo("name", [1, 2, 3], 0.5, None)})
self.assertRaises(Exception, input_to_str, **{"input": InputCutInfo("name", [1, 2, 3], None, np.int)})
self.assertRaises(Exception, input_to_str, **{"input": InputCutInfo("name", [1, 2, 3], None, np.int)})
self.assertRaises(Exception, input_to_str, **{"input": ([2, 3], Shape([1, 2]))})
self.assertRaises(Exception, input_to_str, **{"input": ("name", [np.int, 2, 3])})
self.assertRaises(Exception, input_to_str, **{"input": ("name", "name1", [2, 3])})
self.assertRaises(Exception, input_to_str, **{"input": ("name", [2, 3], Shape([1, 2]))})
self.assertRaises(Exception, input_to_str, **{"input": ("name", np.int, Type(np.float))})
self.assertRaises(Exception, input_to_str, **{"input": Exception})
self.assertRaises(Exception, input_to_str, **{"input": ("name", Exception)})
self.assertRaises(Exception, input_to_str, **{"input": ("name", Dimension(1))})
def test_mean_scale_value_to_str(self):
values = [0.5, 1.3, 0.67]
self.assertTrue(mean_scale_value_to_str(values) == "[0.5,1.3,0.67]")
values = {"input": [0.5, 1.3, 0.67]}
self.assertTrue(mean_scale_value_to_str(values) == "input[0.5,1.3,0.67]")
values = {"input1": [0.5, 1.3, 0.67], "input2": [4.2, 6.7, 3.15], "input3": [0.757, 4.6, 7.3]}
self.assertTrue(mean_scale_value_to_str(values) ==
"input1[0.5,1.3,0.67],input2[4.2,6.7,3.15],input3[0.757,4.6,7.3]")
self.assertRaises(Exception, mean_scale_value_to_str, **{"value": {("a", "b"): [0.5, 1.3, 0.67]}})
self.assertRaises(Exception, mean_scale_value_to_str, **{"value": {"name": Dimension(1)}})
self.assertRaises(Exception, mean_scale_value_to_str, **{"value": Dimension(1)})
def test_transform_param_to_str(self):
transform = 'MakeStateful'
self.assertTrue(transform_param_to_str(transform) == "MakeStateful")
transform1 = ('LowLatency2', {'use_const_initializer': False})
self.assertTrue(transform_param_to_str(transform1) ==
"LowLatency2[use_const_initializer=False]")
transform2 = ('MakeStateful', {'param_res_names': {
'input_name_1': 'output_name_1', 'input_name_2': 'output_name_2'}})
self.assertTrue(transform_param_to_str(transform2) ==
"MakeStateful[param_res_names={\'input_name_1\':\'output_name_1\',"
"\'input_name_2\':\'output_name_2\'}]")
transform = [transform1, transform2]
self.assertTrue(transform_param_to_str(transform) == "LowLatency2[use_const_initializer=False],"
"MakeStateful[param_res_names={"
"\'input_name_1\':\'output_name_1\',"
"\'input_name_2\':\'output_name_2\'}]")
self.assertRaises(Exception, transform_param_to_str, **{"value": ('LowLatency2',
{'use_const_initializer': False},
"param")})
self.assertRaises(Exception, transform_param_to_str, **{"value": (("a", "b"), {})})
self.assertRaises(Exception, transform_param_to_str, **{"value": ('LowLatency2', Dimension(1))})
self.assertRaises(Exception, transform_param_to_str, **{"value": ('LowLatency2',
{('a', 'b'): False})})
self.assertRaises(Exception, transform_param_to_str, **{"value": Dimension(1)})
def test_input_shape_to_str(self):
input_shape1 = [1, 3, 100, 100]
self.assertTrue(input_shape_to_str(input_shape1) == "[1,3,100,100]")
input_shape2 = PartialShape([1, 3, 100, 100])
self.assertTrue(input_shape_to_str(input_shape2) == "[1,3,100,100]")
input_shape3 = PartialShape([Dimension(-1), Dimension(2, -1), Dimension(-1, 10), 100, Dimension(2, 12)])
self.assertTrue(input_shape_to_str(input_shape3) == "[?,2..,..10,100,2..12]")
input_shape4 = PartialShape.dynamic()
self.assertTrue(input_shape_to_str(input_shape4) == "[...]")
input_shape5 = Shape([1, 2, 3, 4])
self.assertTrue(input_shape_to_str(input_shape5) == "[1,2,3,4]")
input_shape6 = [Dimension(-1), Dimension(2, -1), Dimension(-1, 10), 100, Dimension(2, 12)]
self.assertTrue(input_shape_to_str(input_shape6) == "[?,2..,..10,100,2..12]")
input_shape = [input_shape1, input_shape2, input_shape3, input_shape4, input_shape5, input_shape6]
self.assertTrue(input_shape_to_str(input_shape) == "[1,3,100,100],[1,3,100,100],[?,2..,..10,100,2..12],"
"[...],[1,2,3,4],[?,2..,..10,100,2..12]")
self.assertRaises(Exception, input_shape_to_str, **{"input_shape": [np.int, 1]})
self.assertRaises(Exception, input_shape_to_str, **{"input_shape": Dimension(1)})
def test_str_list_to_str(self):
list_str = ["data1", "data2", "data3"]
self.assertTrue(str_list_to_str(list_str) == "data1,data2,data3")
list_str = "data1"
self.assertTrue(str_list_to_str(list_str) == "data1")
self.assertRaises(Exception, str_list_to_str, **{"values": [np.int, 1]})
self.assertRaises(Exception, str_list_to_str, **{"values": Dimension(1)})
def test_source_target_layout_to_str(self):
layout = {"input1": Layout("nhwc"), "input2": Layout("n??"), "input3": "nchw"}
self.assertTrue(source_target_layout_to_str(layout) == "input1([N,H,W,C]),input2([N,?,?]),input3(nchw)")
self.assertRaises(Exception, source_target_layout_to_str, **{"value": {"op": Dimension(1)}})
self.assertRaises(Exception, source_target_layout_to_str, **{"value": {("a", "b"): Layout("nhwc")}})
self.assertRaises(Exception, source_target_layout_to_str, **{"value": Dimension(1)})
def test_layout_param_to_str_to_str(self):
layout = {"input1": Layout("nhwc"), "input2": Layout("n??"), "input3": "nchw"}
self.assertTrue(layout_param_to_str(layout) == "input1([N,H,W,C]),input2([N,?,?]),input3(nchw)")
layout_map1 = LayoutMap(source_layout=Layout("n??"), target_layout=None)
layout_map2 = LayoutMap(source_layout=Layout("nhwc"), target_layout=("nchw"))
layout_map3 = LayoutMap(source_layout="abc", target_layout="cab")
layout = {"input1": layout_map1, "input2": layout_map2, "input3": layout_map3, "input4": Layout("nhwc"),
"input5": "n?"}
self.assertTrue(layout_param_to_str(layout) == "input1([N,?,?]),input2([N,H,W,C]->nchw),"
"input3(abc->cab),input4([N,H,W,C]),input5(n?)")
self.assertRaises(Exception, layout_param_to_str, **{"value": {"op": Dimension(1)}})
self.assertRaises(Exception, layout_param_to_str, **{"value": {("a", "b"): Layout("nhwc")}})
self.assertRaises(Exception, layout_param_to_str, **{"value": Dimension(1)})

View File

@@ -6,7 +6,7 @@ from unittest.mock import patch, Mock
import pytest
from openvino.runtime import Core
from openvino.tools.mo.main import prepare_ir
from openvino.tools.mo.convert_impl import prepare_ir
from openvino.frontend import (
FrontEndManager,
FrontEnd,
@@ -146,7 +146,7 @@ class TestMoFreezePlaceholder(unittest.TestCase):
],
)
def test_freeze_placeholder_with_value_onnx_fe(self, input_freezing_value, use_new_fe, inputs, expected, dtype=None):
with patch("openvino.tools.mo.main.get_default_frontends") as default_fe:
with patch("openvino.tools.mo.convert_impl.get_default_frontends") as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_new_fe=use_new_fe)
args.input_model = "test_model.onnx"
@@ -213,7 +213,7 @@ class TestMoFreezePlaceholder(unittest.TestCase):
],
)
def test_freeze_placeholder_with_value_mul(self, input_freezing_value, use_new_fe, inputs, expected, dtype=None):
with patch("openvino.tools.mo.main.get_default_frontends") as default_fe:
with patch("openvino.tools.mo.convert_impl.get_default_frontends") as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_new_fe=use_new_fe)
args.input_model = "test_model_2.onnx"

View File

@@ -5,7 +5,7 @@ import unittest
from unittest.mock import patch, Mock
import openvino
from openvino.tools.mo.main import prepare_ir
from openvino.tools.mo.convert_impl import prepare_ir
from openvino.tools.mo.utils.error import Error
from openvino.frontend import FrontEndManager, FrontEnd # pylint: disable=no-name-in-module,import-error
from onnx.helper import make_graph, make_model, make_tensor_value_info
@@ -199,15 +199,15 @@ class TestMoFallback(unittest.TestCase):
shutil.rmtree(self.paddle_dir)
@generate(*[('dir_to_extension', None, None, 'mo_legacy', 'extensions'), # fallback
('dir_to_extension', None, True, None, None), # exception
('dir_to_extension', True, None, 'mo_legacy', None),
('', True, None, 'mo_legacy', None),
('', None, True, 'onnx_frontend', None),
@generate(*[(['dir_to_extension'], None, None, 'mo_legacy', 'extensions'), # fallback
(['dir_to_extension'], None, True, None, None), # exception
(['dir_to_extension'], True, None, 'mo_legacy', None),
([''], True, None, 'mo_legacy', None),
([''], None, True, 'onnx_frontend', None),
(None, None, None, 'onnx_frontend', None),
])
def test_fallback_if_extension_specified(self, extension, use_legacy, use_new_fe, conversion_method, fallback_reason):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy, use_new_fe)
args.extensions = extension
@@ -231,10 +231,10 @@ class TestMoFallback(unittest.TestCase):
(None, True, 'onnx_frontend'),
])
def test_fallback_if_new_extension_specified(self, use_legacy, use_new_fe, conversion_method):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy, use_new_fe)
args.extensions = 'onnx_fe_ext.so'
args.extensions = ['onnx_fe_ext.so']
args.input_model = "test_model.onnx"
if conversion_method:
@@ -250,10 +250,10 @@ class TestMoFallback(unittest.TestCase):
(None, True, 'onnx_frontend'),
])
def test_fallback_if_two_new_extension_specified(self, use_legacy, use_new_fe, conversion_method):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy, use_new_fe)
args.extensions = 'onnx_fe_ext.so,onnx_fe_ext_2.so'
args.extensions = ['onnx_fe_ext.so', 'onnx_fe_ext_2.so']
args.input_model = "test_model.onnx"
if conversion_method:
@@ -270,7 +270,7 @@ class TestMoFallback(unittest.TestCase):
(None, None, None, 'onnx_frontend', None),
])
def test_fallback_if_tranformations_config_specified(self, trans_config, use_legacy, use_new_fe, expected_path, fallback_reason):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy, use_new_fe)
args.input_model = "test_model.onnx"
@@ -291,7 +291,7 @@ class TestMoFallback(unittest.TestCase):
('test_config_3.json', None, None, 'mo_legacy', 'transformations_config'), # 'library' attribute in no transformations
])
def test_fallback_if_new_tranformations_config_specified(self, trans_config, use_legacy, use_new_fe, conversion_method, fallback_reason):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy, use_new_fe)
args.input_model = "test_model.onnx"
@@ -313,7 +313,7 @@ class TestMoFallback(unittest.TestCase):
def test_exception_if_new_trans_config_on_legacy_path(self):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_legacy_fe=True)
args.input_model = "test_model.onnx"
@@ -325,7 +325,7 @@ class TestMoFallback(unittest.TestCase):
def test_exeption_if_mixed_types_of_trans_configs(self):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config()
args.input_model = "test_model.onnx"
@@ -335,13 +335,13 @@ class TestMoFallback(unittest.TestCase):
prepare_ir(args)
@generate(*[('dir_to_extension', 'fake_config.json', None, 'mo_legacy', 'extensions, transformations_config'), # fallback
@generate(*[(['dir_to_extension'], 'fake_config.json', None, 'mo_legacy', 'extensions, transformations_config'), # fallback
(None, 'fake_config.json', None, 'mo_legacy', 'transformations_config'), # fallback
('dir_to_extension', None, None, 'mo_legacy', 'extensions'), # fallback
(['dir_to_extension'], None, None, 'mo_legacy', 'extensions'), # fallback
(None, None, True, 'onnx_frontend', None),
])
def test_fallback_if_both_extension_and_trans_config_specified(self, extension, trans_config, use_new_fe, expected_path, fallback_reason):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = get_test_default_frontends()
args = base_args_config(use_new_fe=use_new_fe)
args.extensions = extension
@@ -363,7 +363,7 @@ class TestMoFallback(unittest.TestCase):
(None, None, True, 'onnx_frontend'),
])
def test_fallback_if_legacy_set_as_default(self, trans_config, use_legacy, use_new_fe, expected_path):
with patch('openvino.tools.mo.main.get_default_frontends') as default_fe:
with patch('openvino.tools.mo.convert_impl.get_default_frontends') as default_fe:
default_fe.return_value = {'onnx': 'legacy', 'tf': 'legacy'}
args = base_args_config(use_legacy, use_new_fe)
args.input_model = "test_model.onnx"
@@ -376,7 +376,7 @@ class TestMoFallback(unittest.TestCase):
tm.Telemetry.send_event.assert_any_call('mo', 'fallback_reason')
@generate(*[(None, None, 'test_config_1.json', 'paddle_frontend'),
@generate(*[(None, None, ['test_config_1.json'], 'paddle_frontend'),
(True, None, None, 'paddle_frontend'),
(None, None, None, 'paddle_frontend'),
])
@@ -396,7 +396,7 @@ class TestMoFallback(unittest.TestCase):
def test_exception_if_old_extensions_used_for_pdpd(self):
args = base_args_config()
args.framework = 'paddle'
args.extensions = 'dir_to_extension'
args.extensions = ['dir_to_extension']
args.input_model = 'paddle_dir/relu/relu.pdmodel'
with pytest.raises(Error) as ex: # not called

View File

@@ -9,7 +9,7 @@ import os
from os import environ
import json
import argparse
from openvino.tools.mo.main import prepare_ir
from openvino.tools.mo.convert_impl import prepare_ir
from openvino.frontend import FrontEndManager # pylint: disable=no-name-in-module,import-error
from openvino.tools.mo.moc_frontend.analysis import json_model_analysis_dump