[TF FE] Refactor ReverseSequence and add layer test (#15807)

Signed-off-by: Kazantsev, Roman <roman.kazantsev@intel.com>
This commit is contained in:
Roman Kazantsev 2023-02-20 16:26:19 +04:00 committed by GitHub
parent 1a070b225e
commit bc8d0ec71e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 56 additions and 3 deletions

View File

@ -18,10 +18,10 @@ OutputVector translate_reverse_sequence_op(const NodeContext& node) {
auto seq_lengths = node.get_input(1);
// retrieve attributes
auto seq_axis = node.get_attribute<int64_t>("seq_dim");
auto batch_axis = node.get_attribute<int64_t>("batch_dim", 0);
auto seq_dim = node.get_attribute<int64_t>("seq_dim");
auto batch_dim = node.get_attribute<int64_t>("batch_dim", 0);
auto reverse_sequence = make_shared<ReverseSequence>(input, seq_lengths, batch_axis, seq_axis);
auto reverse_sequence = make_shared<ReverseSequence>(input, seq_lengths, batch_dim, seq_dim);
set_node_name(node.get_name(), reverse_sequence);
return {reverse_sequence};
}

View File

@ -0,0 +1,53 @@
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
import pytest
import tensorflow as tf
from common.tf_layer_test_class import CommonTFLayerTest
class TestReverseSequence(CommonTFLayerTest):
def _prepare_input(self, inputs_info):
assert 'input' in inputs_info
assert 'seq_lengths' in inputs_info
input_shape = inputs_info['input']
seq_lengths_shape = inputs_info['seq_lengths']
inputs_data = {}
inputs_data['input'] = np.random.randint(-50, 50, input_shape).astype(self.input_type)
inputs_data['seq_lengths'] = np.random.randint(0, self.max_seq_length + 1, seq_lengths_shape).astype(
self.seq_lengths_type)
return inputs_data
def create_reverse_sequence_net(self, input_shape, input_type, seq_lengths_type, seq_dim, batch_dim):
self.input_type = input_type
self.seq_lengths_type = seq_lengths_type
assert 0 <= batch_dim and batch_dim < len(input_shape), "Incorrect `batch_dim` in the test case"
assert 0 <= seq_dim and seq_dim < len(input_shape), "Incorrect `seq_dim` in the test case"
self.max_seq_length = input_shape[seq_dim]
batch_size = input_shape[batch_dim]
tf.compat.v1.reset_default_graph()
# Create the graph and model
with tf.compat.v1.Session() as sess:
input = tf.compat.v1.placeholder(input_type, input_shape, 'input')
seq_lengths = tf.compat.v1.placeholder(seq_lengths_type, [batch_size], 'seq_lengths')
tf.raw_ops.ReverseSequence(input=input, seq_lengths=seq_lengths, seq_dim=seq_dim, batch_dim=batch_dim)
tf.compat.v1.global_variables_initializer()
tf_net = sess.graph_def
return tf_net, None
test_data_basic = [
dict(input_shape=[2, 3], input_type=np.int32, seq_lengths_type=np.int64, seq_dim=1, batch_dim=0),
dict(input_shape=[3, 6, 4], input_type=np.float32, seq_lengths_type=np.int32, seq_dim=2, batch_dim=0),
dict(input_shape=[6, 3, 4, 2], input_type=np.float32, seq_lengths_type=np.int32, seq_dim=0, batch_dim=3),
]
@pytest.mark.parametrize("params", test_data_basic)
@pytest.mark.precommit_tf_fe
@pytest.mark.nightly
def test_reverse_sequence_basic(self, params, ie_device, precision, ir_version, temp_dir,
use_new_frontend, use_old_api):
self._test(*self.create_reverse_sequence_net(**params),
ie_device, precision, ir_version, temp_dir=temp_dir,
use_new_frontend=use_new_frontend, use_old_api=use_old_api)