Update ONNX importer LSTM to use v5 LSTMSequence (#2511)

This commit is contained in:
Katarzyna Mitrus 2020-10-09 15:24:10 +02:00 committed by GitHub
parent 2e49b4e4d8
commit 00faee86e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1364 additions and 47 deletions

View File

@ -56,7 +56,8 @@ namespace ngraph
std::shared_ptr<Node> NGRAPH_API
convert_lstm_node_format(const Output<Node>& node,
LSTMWeightsFormat from_format,
LSTMWeightsFormat to_format = LSTMWeightsFormat::FICO);
LSTMWeightsFormat to_format = LSTMWeightsFormat::FICO,
int64_t axis = 0);
/// \brief Base class for all recurrent network cells.
///

View File

@ -32,7 +32,8 @@ using namespace ngraph;
std::shared_ptr<Node> ngraph::op::util::convert_lstm_node_format(const Output<Node>& node,
LSTMWeightsFormat from_format,
LSTMWeightsFormat to_format)
LSTMWeightsFormat to_format,
int64_t axis)
{
static const std::map<op::util::LSTMWeightsFormat, std::vector<size_t>> gate_order_map{
{op::util::LSTMWeightsFormat::FICO, {0, 1, 2, 3}},
@ -45,7 +46,7 @@ std::shared_ptr<Node> ngraph::op::util::convert_lstm_node_format(const Output<No
const auto& to = gate_order_map.at(to_format);
size_t num_gates = 4;
auto axis_const = std::make_shared<opset4::Constant>(element::i64, Shape{}, 0);
auto axis_const = std::make_shared<opset4::Constant>(element::i64, Shape{}, axis);
OutputVector splitted_node =
std::make_shared<opset4::Split>(node, axis_const, num_gates)->outputs();
OutputVector nodes_in_new_format(num_gates);
@ -53,7 +54,7 @@ std::shared_ptr<Node> ngraph::op::util::convert_lstm_node_format(const Output<No
{
nodes_in_new_format[to[from[i]]] = splitted_node[i];
}
return std::make_shared<opset4::Concat>(nodes_in_new_format, 0);
return std::make_shared<opset4::Concat>(nodes_in_new_format, axis);
}
// Modify input vector in-place and return reference to modified vector.

View File

@ -25,6 +25,7 @@
#include "ngraph/builder/reshape.hpp"
#include "ngraph/builder/split.hpp"
#include "ngraph/enum_names.hpp"
#include "ngraph/log.hpp"
#include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/lstm_sequence.hpp"
@ -78,10 +79,18 @@ namespace ngraph
builder::opset1::reorder_axes(ng_inputs.at(0), {1, 0, 2});
// Weight tensor for the gates.
// Shape: [num_directions, 4*hidden_size, input_size]
m_map[LSTMInput::LSTM_INPUT_W] = ng_inputs.at(1);
m_map[LSTMInput::LSTM_INPUT_W] = ngraph::op::util::convert_lstm_node_format(
ng_inputs.at(1),
ngraph::op::util::LSTMWeightsFormat::IOFC,
ngraph::op::util::LSTMWeightsFormat::FICO,
1);
// The recurrence weight tensor.
// Shape: [num_directions, 4*hidden_size, hidden_size]
m_map[LSTMInput::LSTM_INPUT_R] = ng_inputs.at(2);
m_map[LSTMInput::LSTM_INPUT_R] = ngraph::op::util::convert_lstm_node_format(
ng_inputs.at(2),
ngraph::op::util::LSTMWeightsFormat::IOFC,
ngraph::op::util::LSTMWeightsFormat::FICO,
1);
const std::size_t hidden_size =
m_map[LSTMInput::LSTM_INPUT_R].get_shape().back();
@ -99,6 +108,12 @@ namespace ngraph
NGRAPH_SUPPRESS_DEPRECATED_START
m_map[LSTMInput::LSTM_INPUT_B] = split_bias.at(0) + split_bias.at(1);
NGRAPH_SUPPRESS_DEPRECATED_END
m_map[LSTMInput::LSTM_INPUT_B] =
ngraph::op::util::convert_lstm_node_format(
m_map[LSTMInput::LSTM_INPUT_B],
ngraph::op::util::LSTMWeightsFormat::IOFC,
ngraph::op::util::LSTMWeightsFormat::FICO,
1);
}
else
{
@ -152,17 +167,12 @@ namespace ngraph
std::vector<float>(batch_size * num_directions * hidden_size, 0.f));
}
// The weight tensor for peepholes. Shape [num_directions, 3*hidde_size]
// Peepholes input is not supported by OpenVino
if (ng_inputs.size() > 7 && !ngraph::op::is_null(ng_inputs.at(7)))
{
m_map[LSTMInput::LSTM_INPUT_P] = ng_inputs.at(7);
}
else
{
m_map[LSTMInput::LSTM_INPUT_P] = default_opset::Constant::create(
element::f32,
Shape{num_directions, peepholes_count * hidden_size},
std::vector<float>(num_directions * peepholes_count * hidden_size,
0.f));
NGRAPH_WARN
<< (node)
<< " Input `P` (peepholes) is not supported and will be ignored ";
}
}
@ -193,6 +203,12 @@ namespace ngraph
m_direction =
ngraph::as_enum<ngraph::op::RecurrentSequenceDirection>(direction);
if (m_input_forget != 0)
{
NGRAPH_WARN << (node) << " Attribute `input_forget` is not supported "
"and will be ignored ";
}
}
ngraph::op::RecurrentSequenceDirection m_direction;
@ -213,10 +229,7 @@ namespace ngraph
LSTMNgInputMap input_map{node};
LSTMAttributes attributes{node};
// LSTMSequence is not fully supported in OpenVINO and is excluded from
// opset4 (current the latest opset version), use one of the previous
// opsets instead of default
auto lstmSequence = std::make_shared<opset3::LSTMSequence>(
auto lstm_sequence = std::make_shared<default_opset::LSTMSequence>(
input_map.at(LSTMInput::LSTM_INPUT_X),
input_map.at(LSTMInput::LSTM_INPUT_INIT_H),
input_map.at(LSTMInput::LSTM_INPUT_INIT_C),
@ -224,19 +237,16 @@ namespace ngraph
input_map.at(LSTMInput::LSTM_INPUT_W),
input_map.at(LSTMInput::LSTM_INPUT_R),
input_map.at(LSTMInput::LSTM_INPUT_B),
input_map.at(LSTMInput::LSTM_INPUT_P),
attributes.m_hidden_size,
attributes.m_direction,
ngraph::op::LSTMWeightsFormat::IOFC,
attributes.m_activation_alpha,
attributes.m_activation_beta,
attributes.m_activations,
attributes.m_clip_threshold,
attributes.m_input_forget);
attributes.m_clip_threshold);
const auto Y = lstmSequence->output(0);
const auto Y_h = lstmSequence->output(1);
const auto Y_c = lstmSequence->output(2);
const auto Y = lstm_sequence->output(0);
const auto Y_h = lstm_sequence->output(1);
const auto Y_c = lstm_sequence->output(2);
return {builder::opset1::reorder_axes(Y, {2, 1, 0, 3}),
builder::opset1::reorder_axes(Y_h, {1, 0, 2}),

View File

@ -0,0 +1,211 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 2
dims: 8
dims: 2
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
float_data: -0.821994960308075
float_data: -0.03900860995054245
float_data: -0.43670088052749634
float_data: -0.5381056666374207
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
float_data: -0.821994960308075
float_data: -0.03900860995054245
float_data: -0.43670088052749634
float_data: -0.5381056666374207
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 2
dims: 8
dims: 2
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.575465202331543
float_data: 0.4387988746166229
float_data: 0.7399293780326843
float_data: 0.4517557919025421
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.575465202331543
float_data: 0.4387988746166229
float_data: 0.7399293780326843
float_data: 0.4517557919025421
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "direction"
s: "bidirectional"
type: STRING
}
attribute {
name: "hidden_size"
i: 2
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -0,0 +1,186 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
float_data: -0.821994960308075
float_data: -0.03900860995054245
float_data: -0.43670088052749634
float_data: -0.5381056666374207
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.575465202331543
float_data: 0.4387988746166229
float_data: 0.7399293780326843
float_data: 0.4517557919025421
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "clip"
f: 0.10000000149011612
type: FLOAT
}
attribute {
name: "direction"
s: "forward"
type: STRING
}
attribute {
name: "hidden_size"
i: 2
type: INT
}
attribute {
name: "input_forget"
i: 0
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -0,0 +1,176 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
float_data: -0.821994960308075
float_data: -0.03900860995054245
float_data: -0.43670088052749634
float_data: -0.5381056666374207
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.575465202331543
float_data: 0.4387988746166229
float_data: 0.7399293780326843
float_data: 0.4517557919025421
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "direction"
s: "forward"
type: STRING
}
attribute {
name: "hidden_size"
i: 2
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -0,0 +1,209 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 12
dims: 1
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 12
dims: 3
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
name: "const_tensor"
}
type: TENSOR
}
}
node {
output: "sequence_lens"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 2
data_type: 6
int32_data: 1
int32_data: 2
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
input: ""
input: "sequence_lens"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "direction"
s: "forward"
type: STRING
}
attribute {
name: "hidden_size"
i: 3
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 1
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -0,0 +1,176 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
float_data: -0.821994960308075
float_data: -0.03900860995054245
float_data: -0.43670088052749634
float_data: -0.5381056666374207
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 8
dims: 2
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.575465202331543
float_data: 0.4387988746166229
float_data: 0.7399293780326843
float_data: 0.4517557919025421
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "direction"
s: "reverse"
type: STRING
}
attribute {
name: "hidden_size"
i: 2
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -0,0 +1,209 @@
ir_version: 7
producer_name: "onnx-importer-test"
graph {
node {
output: "W"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 12
dims: 1
data_type: 1
float_data: 0.31403765082359314
float_data: -0.16793324053287506
float_data: 1.3882579803466797
float_data: -0.690295398235321
float_data: -0.39940449595451355
float_data: -0.7833511233329773
float_data: -0.30992957949638367
float_data: 0.35575729608535767
float_data: -0.46826308965682983
float_data: 1.1741459369659424
float_data: -2.4147889614105225
float_data: -0.42783254384994507
name: "const_tensor_W"
}
type: TENSOR
}
}
node {
output: "R"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 1
dims: 12
dims: 3
data_type: 1
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
float_data: 0.8490582704544067
float_data: 0.45121243596076965
float_data: -1.179901361465454
float_data: 0.13536448776721954
float_data: 0.813286542892456
float_data: 0.6017516255378723
float_data: 0.4847572445869446
float_data: -1.2136037349700928
float_data: 0.16383321583271027
float_data: 1.5106260776519775
float_data: 1.1177502870559692
float_data: 0.2358246147632599
name: "const_tensor"
}
type: TENSOR
}
}
node {
output: "sequence_lens"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 2
data_type: 6
int32_data: 1
int32_data: 2
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "W"
input: "R"
input: ""
input: "sequence_lens"
output: "Y"
output: "Y_h"
output: "Y_c"
op_type: "LSTM"
attribute {
name: "activations"
strings: "sigmoid"
strings: "tanh"
strings: "tanh"
type: STRINGS
}
attribute {
name: "direction"
s: "reverse"
type: STRING
}
attribute {
name: "hidden_size"
i: 3
type: INT
}
}
name: "test-model-lstm"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 2
}
dim {
dim_value: 1
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
output {
name: "Y_h"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
output {
name: "Y_c"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 2
}
dim {
dim_value: 3
}
}
}
}
}
}
opset_import {
domain: ""
version: 12
}

View File

@ -43,10 +43,145 @@ static std::string s_manifest = "${MANIFEST}";
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
// ONNX LSTM tests (implemented by nGraph LSTMCell and LSTMSequence)
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_with_clip)
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_default_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_fwd_with_clip.prototxt"));
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_fwd_default_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(
Shape{2, 1, 1, 2}, {-0.063373, -0.20347191, -0.07230289, -0.13298286}); // Y_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.07230289, -0.13298286}); // Y_h_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.1557954, -0.24502525}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_reverse_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_reverse_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(
Shape{2, 1, 1, 2}, {-0.06082131, -0.19985214, 0.00860566, 0.00920492}); // Y_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.06082131, -0.19985214}); // Y_h_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.25917438, -0.3832652}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_bidir_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_bidir_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(Shape{2, 2, 1, 2},
{-0.063373,
-0.20347191,
-0.06082131,
-0.19985214,
-0.07230289,
-0.13298286,
0.00860566,
0.00920492}); // Y_data
test_case.add_expected_output<float>(
Shape{2, 1, 2}, {-0.07230289, -0.13298286, -0.06082131, -0.19985214}); // Y_h_data
test_case.add_expected_output<float>(
Shape{2, 1, 2}, {-0.1557954, -0.24502525, -0.25917438, -0.3832652}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_with_clip_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_fwd_clip_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(
Shape{2, 1, 1, 2}, {-0.02391884, -0.02744377, -0.01024176, -0.01188637}); // Y_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.01024176, -0.01188637}); // Y_h_data
test_case.add_expected_output<float>(Shape{1, 1, 2}, {-0.02039271, -0.02353566}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_mixed_seq_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_fwd_mixed_seq_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(Shape{2, 1, 2, 3},
{0.13528088,
-0.1779867,
-0.07448981,
0.14769037,
-0.16327181,
-0.10419653,
0.,
0.,
0.,
0.08759661,
-0.04002844,
-0.08617793}); // Y_data
test_case.add_expected_output<float>(
Shape{1, 2, 3},
{0.13528088, -0.1779867, -0.07448981, 0.08759661, -0.04002844, -0.08617793}); // Y_h_data
test_case.add_expected_output<float>(
Shape{1, 2, 3},
{0.367563, -0.43762812, -0.20435227, 0.17330585, -0.0732716, -0.18809439}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_reverse_mixed_seq_const)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_reverse_mixed_seq_const.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({0.68172926, 1.1405563, -0.03931177, -0.03759607}); // X
test_case.add_expected_output<float>(Shape{2, 1, 2, 3},
{0.13528088,
-0.1779867,
-0.07448981,
0.14696799,
-0.15571019,
-0.10270946,
0.,
0.,
0.,
-0.01110403,
0.0228607,
0.00397353}); // Y_data
test_case.add_expected_output<float>(
Shape{1, 2, 3},
{0.13528088, -0.1779867, -0.07448981, 0.14696799, -0.15571019, -0.10270946}); // Y_h_data
test_case.add_expected_output<float>(
Shape{1, 2, 3},
{0.367563, -0.43762812, -0.20435227, 0.50598085, -0.42627674, -0.3641275}); // Y_c_data
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_with_clip_peepholes)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_fwd_with_clip_peepholes.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
test_case.add_input<float>({-0.455351, -0.276391, -0.185934, -0.269585}); // X
@ -108,7 +243,7 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_with_clip)
// We have to enlarge tolerance bits to 3 - it's only one bit more than default value.
// The discrepancies may occur at most on 7th decimal position.
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 3);
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_mixed_seq)
@ -251,7 +386,7 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_fwd_large_batch_no_clip)
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_bdir_short_input_seq)
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_bdir_short_input_seq_peepholes)
{
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/lstm_bdir_short_input_seq.prototxt"));

View File

@ -47,14 +47,6 @@ onnx_model_gatherND_float
# Round op doesn't support some specific cases of rounding
onnx_model_round_half_nearest_even
# LSTMSequence Layer is not instance of RNNLayer class
onnx_model_lstm_fwd_with_clip
onnx_model_lstm_fwd_mixed_seq
onnx_model_lstm_fwd_hardsigmoid_activation
onnx_model_lstm_fwd_large_batch_no_clip
onnx_model_lstm_bdir_short_input_seq
onnx_model_lstm_mixed_seq_reverse
# Result mismatch
onnx_model_split_equal_parts_default
onnx_model_argmin_no_keepdims
@ -209,6 +201,15 @@ onnx_model_range_positive_step
onnx_model_range_negative_step
onnx_dyn_shapes_slice_1_3d_input_21_axes_ends_max
# LSTMSequence Layer is not instance of RNNLayer class
# (Constant W, B, R inputs are required)
onnx_model_lstm_fwd_with_clip_peepholes
onnx_model_lstm_fwd_mixed_seq
onnx_model_lstm_fwd_hardsigmoid_activation
onnx_model_lstm_fwd_large_batch_no_clip
onnx_model_lstm_bdir_short_input_seq_peepholes
onnx_model_lstm_mixed_seq_reverse
# GRUCell/GRUSequence operation has a form that is not supported
# (Constant W, B, R inputs are required)
IE_CPU.onnx_model_gru_defaults_fwd
@ -230,7 +231,9 @@ IE_CPU.onnx_model_rnn_reverse
IE_CPU.onnx_model_rnn_fwd_bias_initial_h
IE_CPU.onnx_model_rnn_bidirectional
## RNN/GRU Sequence - seq_lengths are not supported
## RNN/GRU/LSTM Sequence: Output values mismatch - seq_lengths not supported
IE_CPU.onnx_model_lstm_fwd_mixed_seq_const
IE_CPU.onnx_model_lstm_reverse_mixed_seq_const
IE_CPU.onnx_model_rnn_fwd_mixed_seq_len
IE_CPU.onnx_model_rnn_fwd_mixed_seq_len_const
IE_CPU.onnx_model_gru_fwd_mixed_seq_len

View File

@ -116,26 +116,26 @@ INTERPRETER.onnx_model_gatherND_float
# Round op doesn't support some specific cases of rounding
onnx_model_round_half_nearest_even
# GRU/RNN Sequence: Output values mismatch - seq_lengths not supported
# GRU/RNN/LSTM Sequence: Output values mismatch - seq_lengths not supported
onnx_model_lstm_fwd_mixed_seq_const
onnx_model_lstm_reverse_mixed_seq_const
onnx_model_lstm_fwd_mixed_seq
onnx_model_lstm_mixed_seq_reverse
onnx_model_gru_fwd_mixed_seq_len
onnx_model_gru_fwd_mixed_seq_len_const
onnx_model_rnn_fwd_mixed_seq_len
onnx_model_rnn_fwd_mixed_seq_len_const
# Unsupported op 'LSTMSequence': not FusedOp anymore, no reference implementation yet
onnx_model_lstm_fwd_with_clip
onnx_model_lstm_fwd_mixed_seq
onnx_model_lstm_fwd_hardsigmoid_activation
onnx_model_lstm_fwd_large_batch_no_clip
onnx_model_lstm_bdir_short_input_seq
onnx_model_lstm_mixed_seq_reverse
# Activation function hardsigmoid is not supported.
gru_cell_activation_function
lstm_cell_activaction_functions
onnx_model_gru_fwd_activations
onnx_model_lstm_fwd_hardsigmoid_activation
# Peepholes, input_forget are not supported
onnx_model_lstm_fwd_with_clip_peepholes
onnx_model_lstm_bdir_short_input_seq_peepholes
lstm_cell_bias_peepholes
lstm_cell_bias_peepholes_clip_input_forget