[TF FE] Report the full list of unsupported operations (#17143)
This commit is contained in:
parent
d972a71b4c
commit
3830125e3b
@ -33,16 +33,14 @@ using namespace ov::frontend::tensorflow;
|
||||
namespace {
|
||||
|
||||
void get_unsupported_operations_and_failures(const std::shared_ptr<Model>& model,
|
||||
std::vector<std::string>& unsupported_operations,
|
||||
std::set<std::string>& unsupported_operations,
|
||||
std::unordered_map<std::string, std::string>& failures) {
|
||||
for (const auto& node : model->get_ordered_ops()) {
|
||||
if (const auto& fw_node = ov::as_type_ptr<FrameworkNode>(node)) {
|
||||
auto op_type = fw_node->get_decoder()->get_op_type();
|
||||
// if this operation is encountered among unsupported operations
|
||||
// or conversion failures, skip it
|
||||
if (failures.count(op_type) > 0 ||
|
||||
std::find(unsupported_operations.begin(), unsupported_operations.end(), op_type) !=
|
||||
unsupported_operations.end()) {
|
||||
if (failures.count(op_type) > 0 || unsupported_operations.count(op_type) > 0) {
|
||||
continue;
|
||||
}
|
||||
auto fw_node_attrs = fw_node->get_attrs();
|
||||
@ -52,7 +50,7 @@ void get_unsupported_operations_and_failures(const std::shared_ptr<Model>& model
|
||||
failures[op_type] = fw_node_attrs.at(FrameworkNode::failed_conversion_key);
|
||||
} else {
|
||||
// found new unsupported operation
|
||||
unsupported_operations.push_back(op_type);
|
||||
unsupported_operations.insert(op_type);
|
||||
}
|
||||
}
|
||||
if (const auto& fw_node = ov::as_type_ptr<ov::op::util::MultiSubGraphOp>(node)) {
|
||||
@ -208,7 +206,7 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const ov::frontend::InputModel::Ptr
|
||||
auto f = convert_partially(model);
|
||||
|
||||
std::unordered_map<std::string, std::string> failures;
|
||||
std::vector<std::string> unsupported_operations;
|
||||
std::set<std::string> unsupported_operations;
|
||||
get_unsupported_operations_and_failures(f, unsupported_operations, failures);
|
||||
|
||||
std::stringstream exception_message;
|
||||
@ -216,7 +214,7 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const ov::frontend::InputModel::Ptr
|
||||
if (m_telemetry) {
|
||||
// TODO: 105173 support anonymization of exception message in order to send to telemetry
|
||||
}
|
||||
exception_message << "[TensorFlow Frontend] Internal error: conversion is failed for " + failure.first +
|
||||
exception_message << "[TensorFlow Frontend] Internal error, conversion is failed for " + failure.first +
|
||||
" operation with a message:\n" + failure.second + "\n";
|
||||
}
|
||||
|
||||
@ -225,12 +223,16 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const ov::frontend::InputModel::Ptr
|
||||
m_telemetry->send_event("error_cause", "tf_" + unsupported_operation);
|
||||
}
|
||||
}
|
||||
// TODO 107500: report the full list of unsupported operations
|
||||
// also, communicate with MO for the fallback to the legacy FE
|
||||
// via OpConversionFailure exception that will store all failures and unsupported_operations
|
||||
if (unsupported_operations.size() > 0) {
|
||||
exception_message << "[TensorFlow Frontend] Internal error: No translator found for " +
|
||||
unsupported_operations[0] + " node.";
|
||||
exception_message << "[TensorFlow Frontend] Internal error, no translator found for operation(s): ";
|
||||
size_t counter = 0;
|
||||
for (const auto& unsupported_operation : unsupported_operations) {
|
||||
if (counter > 0) {
|
||||
exception_message << ", ";
|
||||
}
|
||||
exception_message << unsupported_operation;
|
||||
++counter;
|
||||
}
|
||||
}
|
||||
|
||||
bool is_conversion_successful = ((unsupported_operations.size() == 0) && (failures.size() == 0));
|
||||
|
@ -151,7 +151,8 @@ TEST(FrontEndConvertModelTest, test_unsupported_tf1_while) {
|
||||
"OpConversionFailure is expected.";
|
||||
} catch (const OpConversionFailure& error) {
|
||||
string error_message = error.what();
|
||||
string ref_message = "No translator found for Enter node.";
|
||||
string ref_message = "[TensorFlow Frontend] Internal error, no translator found for operation(s): Enter, Exit, "
|
||||
"LoopCond, Merge, NextIteration, Switch";
|
||||
ASSERT_TRUE(error_message.find(ref_message) != string::npos);
|
||||
ASSERT_EQ(model, nullptr);
|
||||
} catch (...) {
|
||||
@ -188,7 +189,8 @@ TEST(FrontEndConvertModelTest, test_unsupported_tf1_while_and_incorrect_less_tra
|
||||
string error_message = error.what();
|
||||
string ref_message = "Less expects ten inputs.\n"
|
||||
"\n"
|
||||
"[TensorFlow Frontend] Internal error: No translator found for Enter node.";
|
||||
"[TensorFlow Frontend] Internal error, no translator found for operation(s): Enter, Exit, "
|
||||
"LoopCond, Merge, NextIteration, Switch";
|
||||
ASSERT_TRUE(error_message.find(ref_message) != string::npos);
|
||||
ASSERT_EQ(model, nullptr);
|
||||
} catch (...) {
|
||||
@ -208,7 +210,8 @@ TEST(FrontEndConvertModelTest, conversion_with_unknown_exception) {
|
||||
} catch (const OpConversionFailure& error) {
|
||||
string error_message = error.what();
|
||||
string ref_message = "Unknown exception type\n"
|
||||
"[TensorFlow Frontend] Internal error: No translator found for Enter node.";
|
||||
"[TensorFlow Frontend] Internal error, no translator found for operation(s): Enter, Exit, "
|
||||
"LoopCond, Merge, NextIteration, Switch";
|
||||
ASSERT_TRUE(error_message.find(ref_message) != string::npos);
|
||||
ASSERT_EQ(model, nullptr);
|
||||
} catch (...) {
|
||||
|
@ -85,7 +85,7 @@ TEST(TFTelemetryTest, test_nonexistent_add) {
|
||||
FAIL() << "Non-existent operation Adddd must not be supported by TF FE.";
|
||||
} catch (const OpConversionFailure& error) {
|
||||
string error_message = error.what();
|
||||
string ref_message = "No translator found for Adddd node.";
|
||||
string ref_message = "Internal error, no translator found for operation(s): Adddd";
|
||||
ASSERT_TRUE(error_message.find(ref_message) != string::npos);
|
||||
ASSERT_EQ(function, nullptr);
|
||||
|
||||
|
@ -303,9 +303,9 @@ def update_fallback_with_conversion_error(use_new_frontend: bool, is_tf: bool, e
|
||||
return False
|
||||
|
||||
# for TensorFlow FE we have a set of operations that should lead to the fallback to the legacy
|
||||
conversion_error_re = r"^(\[TensorFlow\ Frontend\]\ Internal\ error\:\ No\ translator\ found\ for\ )(\w+)(\ node\.)$"
|
||||
conversion_error_re = r"^(\[TensorFlow\ Frontend\]\ Internal\ error\,\ no\ translator\ found\ for\ operation\(s\)\:\ )((\w+)(\,\ \w+)*)$"
|
||||
conversion_error_match = re.findall(conversion_error_re, ex_msg, re.MULTILINE)
|
||||
fallback_operations = [
|
||||
all_fallback_operations = [
|
||||
# corresponds to TF1 While operation
|
||||
"TensorArrayScatterV3", "TensorArrayV3", "TensorArraySizeV3", "TensorArrayGatherV3",
|
||||
"LoopCond", "Enter", "NextIteration", "Exit",
|
||||
@ -316,11 +316,17 @@ def update_fallback_with_conversion_error(use_new_frontend: bool, is_tf: bool, e
|
||||
"RFFT", "RFFT2D", "RFFT3D", "IRFFT", "IRFFT2D", "IRFFT3D",
|
||||
"Complex", "ComplexAbs", "Real", "Imag",
|
||||
]
|
||||
if len(conversion_error_match) < 1 or len(conversion_error_match[0]) != 3 or \
|
||||
conversion_error_match[0][1] not in fallback_operations:
|
||||
if len(conversion_error_match) < 1 or len(conversion_error_match[0]) != 4:
|
||||
# no match for the fallback by unsupported operation
|
||||
return False
|
||||
|
||||
fallback_reasons.append("Unsupported operation: " + conversion_error_match[0][1])
|
||||
unsupported_operations = conversion_error_match[0][1].replace(" ", "").split(",")
|
||||
fallback_operations = [operation for operation in unsupported_operations if operation in all_fallback_operations]
|
||||
|
||||
if len(fallback_operations) == 0:
|
||||
return False
|
||||
|
||||
fallback_reasons.append("Fallback to the legacy TF FE due to operation(s): " + ', '.join(fallback_operations))
|
||||
return True
|
||||
|
||||
|
||||
|
@ -51,6 +51,7 @@ def arg_parse_helper(input_model,
|
||||
data_type=None,
|
||||
tensorflow_custom_operations_config_update=None,
|
||||
compress_to_fp16=compress_to_fp16,
|
||||
extensions=None
|
||||
)
|
||||
|
||||
|
@ -1,87 +1,30 @@
|
||||
# Copyright (C) 2018-2023 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import Mock
|
||||
|
||||
import numpy as np
|
||||
from generator import generator, generate
|
||||
|
||||
from openvino.frontend import (
|
||||
FrontEndManager,
|
||||
FrontEnd,
|
||||
) # pylint: disable=no-name-in-module,import-error
|
||||
from openvino.runtime import Core
|
||||
from openvino.tools.mo.convert_impl import prepare_ir
|
||||
|
||||
|
||||
def base_args_config():
|
||||
args = argparse.Namespace()
|
||||
args.feManager = FrontEndManager()
|
||||
args.extensions = None
|
||||
# use new TF FE
|
||||
args.use_legacy_frontend = False
|
||||
args.use_new_frontend = True
|
||||
args.framework = "tf"
|
||||
args.model_name = None
|
||||
args.input_model = None
|
||||
args.input_model_is_text = False
|
||||
args.input_checkpoint = None
|
||||
args.saved_model_dir = None
|
||||
args.input_meta_graph = None
|
||||
args.saved_model_tags = None
|
||||
args.silent = True
|
||||
args.transform = []
|
||||
args.scale = None
|
||||
args.output = None
|
||||
args.input = None
|
||||
args.input_shape = None
|
||||
args.batch = None
|
||||
args.mean_values = None
|
||||
args.scale_values = None
|
||||
args.output_dir = os.getcwd()
|
||||
args.freeze_placeholder_with_value = None
|
||||
args.transformations_config = None
|
||||
args.static_shape = None
|
||||
args.reverse_input_channels = None
|
||||
args.data_type = None
|
||||
args.layout = None
|
||||
args.source_layout = None
|
||||
args.target_layout = None
|
||||
return args
|
||||
|
||||
|
||||
try:
|
||||
import openvino_telemetry as tm
|
||||
except ImportError:
|
||||
import openvino.tools.mo.utils.telemetry_stub as tm
|
||||
from openvino.tools.mo.convert import convert_model
|
||||
|
||||
|
||||
@generator
|
||||
class TestMoFreezePlaceholderTFFE(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tm.Telemetry.__init__ = Mock(return_value=None)
|
||||
tm.Telemetry.send_event = Mock()
|
||||
FrontEnd.add_extension = Mock()
|
||||
|
||||
def basic(self, input_model, argv_input, inputs, dtype, expected, freeze_placeholder_with_value=None,
|
||||
input_shape=None, only_conversion=False, input_model_is_text=True, use_new_frontend=True,
|
||||
use_legacy_frontend=False):
|
||||
path = os.path.dirname(__file__)
|
||||
input_model = os.path.join(path, "test_models", input_model)
|
||||
args = base_args_config()
|
||||
args.input_model = input_model
|
||||
args.input = argv_input
|
||||
args.freeze_placeholder_with_value = freeze_placeholder_with_value
|
||||
args.input_shape = input_shape
|
||||
args.input_model_is_text = input_model_is_text
|
||||
args.use_new_frontend = use_new_frontend
|
||||
args.use_legacy_frontend = use_legacy_frontend
|
||||
|
||||
try:
|
||||
_, model = prepare_ir(args)
|
||||
model = convert_model(input_model, input=argv_input,
|
||||
freeze_placeholder_with_value=freeze_placeholder_with_value,
|
||||
input_shape=input_shape, input_model_is_text=input_model_is_text,
|
||||
use_new_frontend=use_new_frontend, use_legacy_frontend=use_legacy_frontend,
|
||||
framework="tf")
|
||||
except Exception as ex:
|
||||
self.fail("Model conversion failed due to error: {}".format(ex))
|
||||
|
||||
@ -297,7 +240,10 @@ class TestMoFreezePlaceholderTFFE(unittest.TestCase):
|
||||
None, None, True, True, False, False)
|
||||
|
||||
def test_conversion_failure_fallback_use_new_frontend(self):
|
||||
with self.assertRaisesRegex(Exception, "Internal error: No translator found for Enter node"):
|
||||
with self.assertRaisesRegex(Exception,
|
||||
"\[TensorFlow Frontend\] Internal error, no translator found for operation\(s\)\: "
|
||||
"Enter\, Exit\, LoopCond\, Merge\, NextIteration\, Switch\, TensorArrayGatherV3\, "
|
||||
"TensorArraySizeV3\, TensorArrayV3"):
|
||||
self.basic("ctc_model_based.pbtxt", None, None, None, None,
|
||||
None, None, True, True, True, False)
|
||||
|
||||
@ -326,6 +272,7 @@ class TestMoFreezePlaceholderTFFE(unittest.TestCase):
|
||||
),
|
||||
],
|
||||
)
|
||||
@unittest.skip("109220: Use generating script for this test model instead of Git LFS")
|
||||
def test_conversion_model_with_non_standard_extension(self, input_freezing_value, inputs, expected,
|
||||
dtype):
|
||||
self.basic("model_fp32.frozen", input_freezing_value, inputs, dtype, expected, only_conversion=False,
|
||||
@ -340,6 +287,7 @@ class TestMoFreezePlaceholderTFFE(unittest.TestCase):
|
||||
only_conversion=True, input_model_is_text=False, use_new_frontend=True,
|
||||
use_legacy_frontend=False)
|
||||
|
||||
@unittest.skip("109220: Make TF FE to return the error")
|
||||
def test_conversion_dir_model(self):
|
||||
with self.assertRaisesRegex(Exception,
|
||||
"Internal error or inconsistent input model: the frontend supports "
|
Loading…
Reference in New Issue
Block a user