[TF FE] Added additional pruned inputs for MetaGraph support (#17237)

* Added handling of additional pruned inputs
Added possible topology of RestoreV2 -> AssignVariableOp
Added additional checks

* Extended tests coverage
This commit is contained in:
Georgy Krivoruchko 2023-04-28 13:03:33 +04:00 committed by GitHub
parent 269ed1d9cc
commit 835f51a5d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 116 additions and 5 deletions

View File

@ -337,6 +337,7 @@ std::vector<std::shared_ptr<OpPlace>> InputModel::InputModelTFImpl::topologicall
if (m_tensor_places.find(output_port_name) != m_tensor_places.end()) {
const auto& tensor_place = m_tensor_places[output_port_name];
is_input |= tensor_place->is_input();
m_found_inputs.insert(output_port_name);
}
// 3. check if the current node is an input
@ -346,6 +347,7 @@ std::vector<std::shared_ptr<OpPlace>> InputModel::InputModelTFImpl::topologicall
if (m_tensor_places.find(producer_name) != m_tensor_places.end()) {
const auto& tensor_place = m_tensor_places[producer_name];
is_input |= tensor_place->is_input();
m_found_inputs.insert(producer_name);
}
// in case presence of NextIteration in the graph (or cycle created by other operation),

View File

@ -448,8 +448,20 @@ void VariablesIndex::map_assignvariable(const std::shared_ptr<::tensorflow::Grap
if (restorev2_nodes.size() == 1 && varhandle_nodes.size() == 1) {
std::vector<std::string> restore_output;
// Expected path is: RestoreV2 -(output_index)-(0)-> Identity -(0)-(1)-> AssignVariableOp
PtrNode::parse_node_name(node.second->inputs[1]->node->input(0), restore_output);
FRONT_END_GENERAL_CHECK(node.second->inputs.size() >= 2,
"Amount of AssignVariableOp inputs is less than expected");
// Here is known ways to find a correct RestoreV2 output index:
if (node.second->inputs[1]->inputs.size() >= 1 &&
node.second->inputs[1]->inputs[0]->node->op() == "RestoreV2") {
// Expected path is: RestoreV2 -(output_index)-(0)-> AnyNode -(0)-(1)-> AssignVariableOp
PtrNode::parse_node_name(node.second->inputs[1]->node->input(0), restore_output);
} else if (node.second->inputs[1]->node->op() == "RestoreV2" && node.second->node->input_size() >= 2) {
// Expected path is: RestoreV2 -(output_index)-(1)-> AssignVariableOp
PtrNode::parse_node_name(node.second->node->input(1), restore_output);
} else {
FRONT_END_THROW("Unexpected topology near AssignVariableOp");
}
int output_index = std::atoi(restore_output[restore_output.size() - 1].c_str());
@ -473,7 +485,10 @@ void VariablesIndex::map_assignvariable(const std::shared_ptr<::tensorflow::Grap
if (restorev2_nodes.size() == 1 && variablev2_nodes.size() == 1) {
std::vector<std::string> restore_output;
// Expected path is: RestoreV2 -(output_index)-(0)-> Assign
FRONT_END_GENERAL_CHECK(node.second->node->input_size() >= 2,
"Amount of Assign inputs is less than expected");
// Expected path is: RestoreV2 -(output_index)-(1)-> Assign
PtrNode::parse_node_name(node.second->node->input(1), restore_output);
int output_index = std::atoi(restore_output[restore_output.size() - 1].c_str());

View File

@ -545,11 +545,88 @@ TEST_F(TransformationTestsF, MetaGraphVariables) {
// create a reference graph
auto x = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{1, 2, 3, 3, 2, 1});
auto y = make_shared<Parameter>(element::f32, Shape{1});
auto z = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{2, 2, 1, 1, 1, 2});
auto add = make_shared<Add>(x, y);
auto sub = make_shared<Subtract>(add, z);
model_ref = make_shared<Model>(OutputVector{add}, ParameterVector{y});
model_ref = make_shared<Model>(OutputVector{sub}, ParameterVector{y});
}
}
TEST_F(TransformationTestsF, MetaGraphCut) {
{
model = convert_model("metagraph_variables/graph.meta", nullptr, {"y"});
model->validate_nodes_and_infer_types();
}
{
// create a reference graph
auto x = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{1, 2, 3, 3, 2, 1});
auto y = make_shared<Parameter>(element::f32, Shape{1});
auto z = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{2, 2, 1, 1, 1, 2});
auto add = make_shared<Add>(x, y);
auto sub = make_shared<Subtract>(add, z);
model_ref = make_shared<Model>(OutputVector{sub}, ParameterVector{y});
}
}
TEST_F(TransformationTestsF, MetaGraphCutInputTensor) {
{
model = convert_model("metagraph_variables/graph.meta",
nullptr,
{"0:SubOperation"},
{ov::element::f32},
{Shape{2, 3}});
model->validate_nodes_and_infer_types();
}
{
// create a reference graph
auto x = make_shared<Parameter>(element::f32, Shape{2, 3});
auto z = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{2, 2, 1, 1, 1, 2});
auto sub = make_shared<Subtract>(x, z);
model_ref = make_shared<Model>(OutputVector{sub}, ParameterVector{x});
}
}
TEST_F(TransformationTestsF, MetaGraphCutOutputTensor) {
{
model = convert_model("metagraph_variables/graph.meta",
nullptr,
{"AddOperation:0"},
{ov::element::f32},
{Shape{2, 3}});
model->validate_nodes_and_infer_types();
}
{
// create a reference graph
auto x = make_shared<Parameter>(element::f32, Shape{2, 3});
auto z = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{2, 2, 1, 1, 1, 2});
auto sub = make_shared<Subtract>(x, z);
model_ref = make_shared<Model>(OutputVector{sub}, ParameterVector{x});
}
}
TEST_F(TransformationTestsF, MetaGraphCutIdentity) {
{
model = convert_model("metagraph_variables/graph.meta",
nullptr,
{"AddIdentity"},
{ov::element::f32},
{Shape{2, 3}});
model->validate_nodes_and_infer_types();
}
{
// create a reference graph
auto x = make_shared<Parameter>(element::f32, Shape{2, 3});
auto z = make_shared<Constant>(element::f32, Shape{2, 3}, vector<float>{2, 2, 1, 1, 1, 2});
auto sub = make_shared<Subtract>(x, z);
model_ref = make_shared<Model>(OutputVector{sub}, ParameterVector{x});
}
}
TEST_F(TransformationTestsF, SplitInFunction) {
{
// create FAKE conversion extension for Split using named ports, this is not required for Split, but it tests

View File

@ -10,10 +10,27 @@ import tensorflow as tf
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session() as sess:
x_value = [[1.,2.,3.],[3.,2.,1.]]
z_value = [[2.,2.,1.],[1.,1.,2.]]
tf_x = tf.Variable(x_value)
tf_y = tf.compat.v1.placeholder(dtype=tf.float32, shape=[1], name='y')
tf_z = tf.add(tf_x, tf_y, name="AddOperation")
tf_z = tf.constant(z_value)
tf_add = tf.add(tf_x, tf_y, name="AddOperation")
tf_identity = tf.identity(tf_add, name="AddIdentity")
tf.subtract(tf_identity, tf_z, name="SubOperation")
sess.run(tf.compat.v1.global_variables_initializer())
# Produces RestoreV2 -> Identity -> AssignVariableOp
saver = tf.compat.v1.train.Saver([tf_x])
input_name = tf.compat.v1.get_default_graph().get_tensor_by_name("save/Const:0")
var_handle = tf.compat.v1.get_default_graph().get_tensor_by_name("Variable:0")
# Produces RestoreV2 -> Pack -> AssignVariableOp
restorev2 = tf.raw_ops.RestoreV2(prefix=input_name, tensor_names=["Variable"], shape_and_slices=[""], dtypes=[tf.float32], name="save/RestoreV2/wPack")
assign_var = tf.raw_ops.AssignVariableOp(resource = var_handle, value = restorev2)
# Produces RestoreV2 -> AssignVariableOp
restorev2 = tf.raw_ops.RestoreV2(prefix=input_name, tensor_names=["Variable"], shape_and_slices=[""], dtypes=[tf.float32], name="save/RestoreV2/Direct")
assign_var = tf.compat.v1.raw_ops.AssignVariableOp(resource = var_handle, value = restorev2[0])
os.makedirs(os.path.join(sys.argv[1], "metagraph_variables"))
saver.save(sess, os.path.join(sys.argv[1], "metagraph_variables", "graph"))