[nGraph] Reorder nGraph LSTMSequence inputs and outputs dimensions (#560)

* Reorder nGraph LSTMSequence input/outpt dimensions

* Update nGraph pythonAPI for LSTMSequence

* Reorder axes in ONNX importer LSTM

* Tests update

* Fix clang warning

* Use opset3 namespace

* Style apply

* Tests update

* Use opset1  namespace

* Remove usage of  GetOutputElement in ONNX importer LSTM

* Remove opset0 header

* Use Node::output()
This commit is contained in:
Katarzyna Mitrus
2020-05-29 13:29:18 +02:00
committed by GitHub
parent a4f13ae9fe
commit 5f8f9ec108
8 changed files with 231 additions and 143 deletions

View File

@@ -1104,16 +1104,27 @@ TEST(attributes, lstm_cell_op)
TEST(attributes, lstm_sequence_op)
{
FactoryRegistry<Node>::get().register_factory<opset1::LSTMSequence>();
const auto X = make_shared<op::Parameter>(element::f32, Shape{1, 2, 4});
const auto initial_hidden_state = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
const auto initial_cell_state = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
const auto sequence_lengths = make_shared<op::Parameter>(element::i32, Shape{2});
const auto W = make_shared<op::Parameter>(element::f32, Shape{1, 12, 4});
const auto R = make_shared<op::Parameter>(element::f32, Shape{1, 12, 3});
const auto B = make_shared<op::Parameter>(element::f32, Shape{1, 12});
const auto hidden_size = 3;
const auto lstm_direction = op::LSTMSequence::direction::FORWARD;
const auto batch_size = 4;
const auto num_directions = 2;
const auto seq_length = 8;
const auto input_size = 16;
const auto hidden_size = 64;
const auto X =
make_shared<op::Parameter>(element::f32, Shape{batch_size, seq_length, input_size});
const auto initial_hidden_state =
make_shared<op::Parameter>(element::f32, Shape{batch_size, num_directions, hidden_size});
const auto initial_cell_state =
make_shared<op::Parameter>(element::f32, Shape{batch_size, num_directions, hidden_size});
const auto sequence_lengths = make_shared<op::Parameter>(element::i32, Shape{batch_size});
const auto W = make_shared<op::Parameter>(element::f32,
Shape{num_directions, 4 * hidden_size, input_size});
const auto R = make_shared<op::Parameter>(element::f32,
Shape{num_directions, 4 * hidden_size, hidden_size});
const auto B = make_shared<op::Parameter>(element::f32, Shape{num_directions, 4 * hidden_size});
const auto lstm_direction = op::LSTMSequence::direction::BIDIRECTIONAL;
const auto weights_format = op::LSTMWeightsFormat::ICOF;
const std::vector<float> activations_alpha = {1, 2, 3};
const std::vector<float> activations_beta = {4, 5, 6};