[TF FE] WA: Support Non-frozen Formats (#13289)
* [TF FE] WA: Support Non-frozen Formats Signed-off-by: Kazantsev, Roman <roman.kazantsev@intel.com> * Normalize path to the intermediate model * Apply feedback: add path to return and correct tests * Check that the file is not empty in the test Signed-off-by: Kazantsev, Roman <roman.kazantsev@intel.com>
This commit is contained in:
@@ -358,6 +358,14 @@ def get_moc_frontends(argv: argparse.Namespace):
|
||||
|
||||
|
||||
def prepare_ir(argv: argparse.Namespace):
|
||||
# TODO: remove this workaround once new TensorFlow frontend supports non-frozen formats: checkpoint, MetaGraph, and SavedModel
|
||||
# Now it converts all TensorFlow formats to the frozen .pb format in case new TensorFlow frontend
|
||||
is_tf, _, _, _, _ = deduce_legacy_frontend_by_namespace(argv)
|
||||
path_to_aux_pb = None
|
||||
if argv.use_new_frontend and is_tf:
|
||||
from openvino.tools.mo.front.tf.loader import convert_to_pb
|
||||
path_to_aux_pb = convert_to_pb(argv)
|
||||
|
||||
argv = arguments_post_parsing(argv)
|
||||
t = tm.Telemetry()
|
||||
graph = None
|
||||
@@ -380,6 +388,12 @@ def prepare_ir(argv: argparse.Namespace):
|
||||
moc_front_end.add_extension(extension)
|
||||
ngraph_function = moc_pipeline(argv, moc_front_end)
|
||||
|
||||
# TODO: remove this workaround once new TensorFlow frontend supports non-frozen formats: checkpoint, MetaGraph, and SavedModel
|
||||
# Now it converts all TensorFlow formats to the frozen .pb format in case new TensorFlow frontend
|
||||
if argv.use_new_frontend and is_tf and path_to_aux_pb is not None:
|
||||
if os.path.exists(path_to_aux_pb):
|
||||
os.remove(path_to_aux_pb)
|
||||
|
||||
return graph, ngraph_function
|
||||
else: # apply fallback
|
||||
reasons_message = ", ".join(fallback_reasons)
|
||||
@@ -435,7 +449,7 @@ def emit_ir(graph: Graph, argv: argparse.Namespace):
|
||||
func = read_model(orig_model_name + "_tmp.xml")
|
||||
|
||||
return_code = "not executed"
|
||||
if not(argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update):
|
||||
if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update):
|
||||
try:
|
||||
from openvino.tools.mo.back.offline_transformations import apply_offline_transformations
|
||||
func = apply_offline_transformations(func, argv)
|
||||
|
||||
@@ -306,7 +306,9 @@ def convert_to_pb(argv: argparse.Namespace):
|
||||
argv.model_name = model_name
|
||||
tf_v1.io.write_graph(graph_def, argv.output_dir if argv.output_dir != '.' else os.getcwd(),
|
||||
model_name + "_tmp.pb", as_text=False)
|
||||
argv.input_model = model_name + "_tmp.pb"
|
||||
path_to_pb = os.path.normpath(os.path.join(argv.output_dir, model_name + "_tmp.pb"))
|
||||
argv.input_model = path_to_pb
|
||||
return path_to_pb
|
||||
|
||||
|
||||
def protobuf_attrs(pb: tf_v1.NodeDef):
|
||||
|
||||
54
tools/mo/unit_tests/mo/front/tf/convert_to_pb_test.py
Normal file
54
tools/mo/unit_tests/mo/front/tf/convert_to_pb_test.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2018-2022 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from openvino.tools.mo.front.tf.loader import convert_to_pb
|
||||
|
||||
|
||||
class ConvertToPBTests(unittest.TestCase):
|
||||
test_directory = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
def setUp(self):
|
||||
self.argv = argparse.Namespace(input_model=None, input_model_is_text=False, input_checkpoint=None, output=None,
|
||||
saved_model_dir=None, input_meta_graph=None, saved_model_tags=None,
|
||||
model_name='model', output_dir=None)
|
||||
|
||||
def test_saved_model(self):
|
||||
import tensorflow as tf
|
||||
with tempfile.TemporaryDirectory(dir=self.test_directory) as tmp_dir:
|
||||
inputs = tf.keras.Input(shape=(3,))
|
||||
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
|
||||
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
|
||||
model = tf.keras.Model(inputs=inputs, outputs=outputs)
|
||||
model.save(tmp_dir)
|
||||
self.argv.saved_model_dir = tmp_dir
|
||||
self.argv.output_dir = tmp_dir
|
||||
path_to_pb = convert_to_pb(self.argv)
|
||||
self.assertTrue(os.path.exists(path_to_pb), "The auxiliary .pb is not generated")
|
||||
self.assertTrue(os.path.getsize(path_to_pb) != 0, "The auxiliary .pb is empty")
|
||||
|
||||
def test_meta_format(self):
|
||||
try:
|
||||
import tensorflow.compat.v1 as tf_v1
|
||||
tf_v1.disable_eager_execution()
|
||||
except ImportError:
|
||||
import tensorflow as tf_v1
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=self.test_directory) as tmp_dir:
|
||||
a = tf_v1.get_variable("A", initializer=tf_v1.constant(3, shape=[2]))
|
||||
b = tf_v1.get_variable("B", initializer=tf_v1.constant(5, shape=[2]))
|
||||
tf_v1.add(a, b, name='Add')
|
||||
init_op = tf_v1.global_variables_initializer()
|
||||
saver = tf_v1.train.Saver()
|
||||
with tf_v1.Session() as sess:
|
||||
sess.run(init_op)
|
||||
saver.save(sess, os.path.join(tmp_dir, 'model'))
|
||||
self.argv.input_meta_graph = os.path.join(tmp_dir, 'model.meta')
|
||||
self.argv.output_dir = tmp_dir
|
||||
path_to_pb = convert_to_pb(self.argv)
|
||||
self.assertTrue(os.path.exists(path_to_pb), "The auxiliary .pb is not generated")
|
||||
self.assertTrue(os.path.getsize(path_to_pb) != 0, "The auxiliary .pb is empty")
|
||||
Reference in New Issue
Block a user