[TF FE] Add info messages in Model Optimizer (#14586)

* [TF FE] Add info messages in Model Optimizer and the preliminary user documentation

Signed-off-by: Kazantsev, Roman <roman.kazantsev@intel.com>

* Add test model with future operation

* Update docs/MO_DG/prepare_model/TensorFlow_Frontend.md

Co-authored-by: Maxim Vafin <maxim.vafin@intel.com>

* Apply code-review feedback: Recover use of dict

* Fix the rest of feedback

* Remove User documentation for TF FE

* Correct tests to provide required arguments

* Handle argv without framework argument in deduce_framework function

Signed-off-by: Kazantsev, Roman <roman.kazantsev@intel.com>
Co-authored-by: Maxim Vafin <maxim.vafin@intel.com>
This commit is contained in:
Roman Kazantsev 2022-12-13 15:01:24 +04:00 committed by GitHub
parent 519cb35983
commit 79e34ccbb3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 178 additions and 9 deletions

View File

@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0
import argparse
import logging as log
import os
import sys
import logging as log
try:
import openvino_telemetry as tm
@ -17,8 +17,10 @@ from openvino.tools.mo.utils.cli_parser import get_model_name_from_args
from openvino.tools.mo.utils.logger import init_logger
from openvino.tools.mo.utils.error import Error, FrameworkError
import traceback
from openvino.tools.mo.utils.get_ov_update_message import get_ov_update_message, get_ov_api20_message
from openvino.tools.mo.utils.get_ov_update_message import get_ov_update_message, get_ov_api20_message, \
get_tf_fe_message, get_tf_fe_legacy_message
from openvino.tools.mo.utils.model_analysis import AnalysisResults
from openvino.tools.mo.utils.guess_framework import deduce_legacy_frontend_by_namespace
# pylint: disable=no-name-in-module,import-error
from openvino.frontend import FrontEndManager
@ -37,6 +39,7 @@ def main(cli_parser: argparse.ArgumentParser, framework=None):
argv = cli_parser.parse_args()
logger.disabled = False
argv.model_name = get_model_name_from_args(argv)
is_tf, _, _, _, _ = deduce_legacy_frontend_by_namespace(argv)
argv = vars(argv)
if framework is not None:
@ -51,6 +54,8 @@ def main(cli_parser: argparse.ArgumentParser, framework=None):
print(ov_update_message)
if ov_api20_message is not None and ngraph_function is not None:
print(ov_api20_message)
if argv['use_new_frontend'] and is_tf:
print(get_tf_fe_message())
except (FileNotFoundError, NotADirectoryError) as e:
log.error('File {} was not found'.format(str(e).split('No such file or directory:')[1]))
@ -61,6 +66,8 @@ def main(cli_parser: argparse.ArgumentParser, framework=None):
for el in analysis_results.get_messages():
log.error(el, extra={'analysis_info': True})
log.error(err)
if not argv['use_new_frontend'] and is_tf:
print(get_tf_fe_legacy_message())
log.debug(traceback.format_exc())
except FrameworkError as err:
log.error(err, extra={'framework_error': True})
@ -74,6 +81,8 @@ def main(cli_parser: argparse.ArgumentParser, framework=None):
log.error(traceback.format_exc())
log.error("---------------- END OF BUG REPORT --------------")
log.error("-------------------------------------------------")
if not argv['use_new_frontend'] and is_tf:
print(get_tf_fe_legacy_message())
if ngraph_function is None:
return 1

View File

@ -17,10 +17,27 @@ def get_ov_update_message():
def get_ov_api20_message():
link = "https://docs.openvino.ai"
link = "https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html"
message = '[ INFO ] The model was converted to IR v11, the latest model format that corresponds to the source DL framework ' \
'input/output format. While IR v11 is backwards compatible with OpenVINO Inference Engine API v1.0, ' \
'please use API v2.0 (as of 2022.1) to take advantage of the latest improvements in IR v11.\n' \
'Find more information about API v2.0 and IR v11 at {}'.format(link)
return message
def get_tf_fe_message():
link = "https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html"
message = '[ INFO ] IR generated by new TensorFlow Frontend is compatible only with API v2.0. Please make sure to use API v2.0.\n' \
'Find more information about API v2.0 at {}'.format(link)
return message
def get_tf_fe_legacy_message():
link = "https://docs.openvino.ai/latest/openvino_docs_MO_DG_TensorFlow_Frontend.html"
message = '[ INFO ] You can also try to use new TensorFlow Frontend (preview feature as of 2022.3) by adding `--use_new_frontend` ' \
'option into Model Optimizer command-line.\n' \
'Find more information about new TensorFlow Frontend at {}'.format(link)
return message

View File

@ -5,11 +5,10 @@ import re
from argparse import Namespace
from openvino.tools.mo.utils.error import Error
from openvino.tools.mo.utils.utils import refer_to_faq_msg
def deduce_legacy_frontend_by_namespace(argv: Namespace):
if not argv.framework:
if not hasattr(argv, 'framework') or not argv.framework:
if getattr(argv, 'saved_model_dir', None) or getattr(argv, 'input_meta_graph', None):
argv.framework = 'tf'
elif getattr(argv, 'input_symbol', None) or getattr(argv, 'pretrained_model_name', None):
@ -39,4 +38,3 @@ def guess_framework_by_ext(input_model_path: str) -> int:
return 'kaldi'
elif re.match(r'^.*\.onnx$', input_model_path):
return 'onnx'

View File

@ -9,7 +9,6 @@ import pytest
from openvino.tools.mo.utils.error import FrameworkError
ngraph_available = True
try:
from openvino.tools.mo.main import main
@ -21,7 +20,12 @@ ngraph_needed = pytest.mark.skipif(not ngraph_available,
class TestMainErrors(unittest.TestCase):
@patch('argparse.ArgumentParser.parse_args', return_value=argparse.Namespace())
@patch('argparse.ArgumentParser.parse_args', return_value=argparse.Namespace(
use_legacy_frontend=False,
use_new_frontend=False,
framework=None,
input_model="abc.pbtxt"
))
@patch('openvino.tools.mo.convert_impl.driver', side_effect=FrameworkError('FW ERROR MESSAGE'))
@ngraph_needed
def test_FrameworkError(self, mock_argparse, mock_driver):

View File

@ -11,7 +11,10 @@ def mocked_parse_args(*argv):
# Mock parse_args method which generates warning
import logging as log
log.error("warning", extra={'is_warning': True})
argv = argparse.Namespace()
argv = argparse.Namespace(use_legacy_frontend=False,
use_new_frontend=False,
framework=None,
input_model="abc.pbtxt")
return argv

View File

@ -0,0 +1,80 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import argparse
import io
import os
import unittest
from contextlib import redirect_stdout
from unittest.mock import patch
from openvino.tools.mo.main import main
from openvino.tools.mo.utils.get_ov_update_message import get_tf_fe_message, get_tf_fe_legacy_message
def arg_parse_helper(input_model,
use_legacy_frontend,
use_new_frontend,
input_model_is_text,
framework):
path = os.path.dirname(__file__)
input_model = os.path.join(path, "test_models", input_model)
return argparse.Namespace(
input_model=input_model,
use_legacy_frontend=use_legacy_frontend,
use_new_frontend=use_new_frontend,
framework=framework,
input_model_is_text=input_model_is_text,
log_level='INFO',
silent=True,
model_name=None,
transform=[],
scale=None,
output=None,
input=None,
input_shape=None,
batch=None,
input_checkpoint=None,
saved_model_dir=None,
input_meta_graph=None,
saved_model_tags=None,
output_dir='.',
mean_values=(),
scale_values=(),
layout={},
source_layout={},
target_layout={},
freeze_placeholder_with_value=None,
tensorflow_use_custom_operations_config=None,
data_type=None,
tensorflow_custom_operations_config_update=None,
)
class TestInfoMessagesTFFE(unittest.TestCase):
@patch('argparse.ArgumentParser.parse_args',
return_value=arg_parse_helper(input_model="model_int32.pbtxt",
use_legacy_frontend=False, use_new_frontend=True,
framework=None, input_model_is_text=True))
def test_api20_only(self, mock_argparse):
f = io.StringIO()
with redirect_stdout(f):
main(argparse.ArgumentParser())
std_out = f.getvalue()
tf_fe_message_found = get_tf_fe_message() in std_out
tf_fe_legacy_message_found = get_tf_fe_legacy_message() in std_out
assert tf_fe_message_found and not tf_fe_legacy_message_found
@patch('argparse.ArgumentParser.parse_args',
return_value=arg_parse_helper(input_model="future_op.pbtxt",
use_legacy_frontend=True, use_new_frontend=False,
framework=None, input_model_is_text=True))
def test_tf_fe_legacy(self, mock_argparse):
f = io.StringIO()
with redirect_stdout(f):
main(argparse.ArgumentParser())
std_out = f.getvalue()
tf_fe_message_found = get_tf_fe_message() in std_out
tf_fe_legacy_message_found = get_tf_fe_legacy_message() in std_out
assert tf_fe_legacy_message_found and not tf_fe_message_found

View File

@ -0,0 +1,58 @@
node {
name: "in1"
op: "Placeholder"
attr {
key: "dtype"
value {
type: DT_INT32
}
}
attr {
key: "shape"
value {
shape {
dim {
size: 2
}
dim {
size: 3
}
}
}
}
}
node {
name: "in2"
op: "Placeholder"
attr {
key: "dtype"
value {
type: DT_INT32
}
}
attr {
key: "shape"
value {
shape {
dim {
size: 2
}
dim {
size: 3
}
}
}
}
}
node {
name: "future_op"
op: "FutureOp"
input: "in1"
input: "in2"
attr {
key: "T"
value {
type: DT_INT32
}
}
}