ONNX LSTM fix get_shape error (#3033)
* ONNX LSTM get dimension only if required * Test dynamic onnx lstm model import * Enable LSTM_Seq_lens_unpacked_model import test * Disable model zoo execution test "MSFT_opset9_LSTM_Seq_lens_unpacked" * Add missed comma in xfail list * Update error messages * init xfail issue * test zoo models import xfail issue * Fix SEQ_LENGTH init * Comments update * Fix usage of v0::Add by overloaded operator
This commit is contained in:
parent
18f04860af
commit
8dbff709fb
@ -60,10 +60,61 @@ namespace ngraph
|
|||||||
LSTM_INPUT_P
|
LSTM_INPUT_P
|
||||||
};
|
};
|
||||||
|
|
||||||
|
enum class LSTMInputDimension
|
||||||
|
{
|
||||||
|
BATCH_SIZE,
|
||||||
|
SEQ_LENGTH,
|
||||||
|
NUM_DIRECTIONS,
|
||||||
|
HIDDEN_SIZE,
|
||||||
|
};
|
||||||
|
|
||||||
struct LSTMNgInputMap
|
struct LSTMNgInputMap
|
||||||
{
|
{
|
||||||
using container_type = std::map<LSTMInput, Output<ngraph::Node>>;
|
// Check if input shape dimension at dimension_index is static
|
||||||
using iterator = typename container_type::iterator;
|
bool check_static_input_dim(LSTMInput input, const size_t dimension_index)
|
||||||
|
{
|
||||||
|
return m_input_map[input].get_partial_shape().rank().is_static() &&
|
||||||
|
m_input_map[input].get_partial_shape().rank().get_length() >
|
||||||
|
dimension_index &&
|
||||||
|
m_input_map[input].get_partial_shape()[dimension_index].is_static();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and handle dimensions required to create default inputs
|
||||||
|
void init_dim_map()
|
||||||
|
{
|
||||||
|
// batch_size
|
||||||
|
if (check_static_input_dim(LSTMInput::LSTM_INPUT_X, 0))
|
||||||
|
{
|
||||||
|
m_dim_map[LSTMInputDimension::BATCH_SIZE] =
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X]
|
||||||
|
.get_partial_shape()[0]
|
||||||
|
.get_length();
|
||||||
|
}
|
||||||
|
// seq_length
|
||||||
|
if (check_static_input_dim(LSTMInput::LSTM_INPUT_X, 1))
|
||||||
|
{
|
||||||
|
m_dim_map[LSTMInputDimension::SEQ_LENGTH] =
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X]
|
||||||
|
.get_partial_shape()[1]
|
||||||
|
.get_length();
|
||||||
|
}
|
||||||
|
// num_directions
|
||||||
|
if (check_static_input_dim(LSTMInput::LSTM_INPUT_R, 0))
|
||||||
|
{
|
||||||
|
m_dim_map[LSTMInputDimension::NUM_DIRECTIONS] =
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_R]
|
||||||
|
.get_partial_shape()[0]
|
||||||
|
.get_length();
|
||||||
|
}
|
||||||
|
// hidden_size
|
||||||
|
if (check_static_input_dim(LSTMInput::LSTM_INPUT_R, 2))
|
||||||
|
{
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE] =
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_R]
|
||||||
|
.get_partial_shape()[2]
|
||||||
|
.get_length();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
explicit LSTMNgInputMap(const Node& node)
|
explicit LSTMNgInputMap(const Node& node)
|
||||||
{
|
{
|
||||||
@ -74,99 +125,169 @@ namespace ngraph
|
|||||||
constexpr std::size_t peepholes_count{3};
|
constexpr std::size_t peepholes_count{3};
|
||||||
|
|
||||||
// ----- Mandatory inputs ------
|
// ----- Mandatory inputs ------
|
||||||
// Packed input sequences. Shape: [seq_length, batch_size, input_size]
|
// Packed input sequences.
|
||||||
m_map[LSTMInput::LSTM_INPUT_X] =
|
// ONNX Shape: [seq_length, batch_size, input_size]
|
||||||
|
// OpenVino Shape: [batch_size, seq_length, input_size]
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X] =
|
||||||
builder::opset1::reorder_axes(ng_inputs.at(0), {1, 0, 2});
|
builder::opset1::reorder_axes(ng_inputs.at(0), {1, 0, 2});
|
||||||
|
|
||||||
// Weight tensor for the gates.
|
// Weight tensor for the gates.
|
||||||
// Shape: [num_directions, 4*hidden_size, input_size]
|
// Shape: [num_directions, 4*hidden_size, input_size]
|
||||||
m_map[LSTMInput::LSTM_INPUT_W] = ngraph::op::util::convert_lstm_node_format(
|
m_input_map[LSTMInput::LSTM_INPUT_W] =
|
||||||
|
ngraph::op::util::convert_lstm_node_format(
|
||||||
ng_inputs.at(1),
|
ng_inputs.at(1),
|
||||||
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
||||||
ngraph::op::util::LSTMWeightsFormat::FICO,
|
ngraph::op::util::LSTMWeightsFormat::FICO,
|
||||||
1);
|
1);
|
||||||
|
|
||||||
// The recurrence weight tensor.
|
// The recurrence weight tensor.
|
||||||
// Shape: [num_directions, 4*hidden_size, hidden_size]
|
// Shape: [num_directions, 4*hidden_size, hidden_size]
|
||||||
m_map[LSTMInput::LSTM_INPUT_R] = ngraph::op::util::convert_lstm_node_format(
|
m_input_map[LSTMInput::LSTM_INPUT_R] =
|
||||||
|
ngraph::op::util::convert_lstm_node_format(
|
||||||
ng_inputs.at(2),
|
ng_inputs.at(2),
|
||||||
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
||||||
ngraph::op::util::LSTMWeightsFormat::FICO,
|
ngraph::op::util::LSTMWeightsFormat::FICO,
|
||||||
1);
|
1);
|
||||||
|
|
||||||
const std::size_t hidden_size =
|
// Get dimensions needed for default inputs creation
|
||||||
m_map[LSTMInput::LSTM_INPUT_R].get_shape().back();
|
init_dim_map();
|
||||||
const std::size_t batch_size =
|
|
||||||
m_map[LSTMInput::LSTM_INPUT_X].get_shape().at(0);
|
|
||||||
const std::size_t num_directions =
|
|
||||||
m_map[LSTMInput::LSTM_INPUT_W].get_shape().front();
|
|
||||||
|
|
||||||
// ------ Optional inputs ------
|
// ------ Optional inputs ------
|
||||||
// The bias tensor for input gate. Shape [num_directions, 4*hidden_size]
|
// `B` - The bias tensor for input gate.
|
||||||
|
// ONNX Shape: [num_directions, 8*hidden_size]
|
||||||
|
// OpenVino Shape: [num_directions, 4*hidden_size]
|
||||||
if (ng_inputs.size() > 3 && !ngraph::op::is_null(ng_inputs.at(3)))
|
if (ng_inputs.size() > 3 && !ngraph::op::is_null(ng_inputs.at(3)))
|
||||||
{
|
{
|
||||||
auto bias = ng_inputs.at(3);
|
auto bias = ng_inputs.at(3);
|
||||||
auto split_bias = builder::opset1::split(bias, 2, 1);
|
auto split_bias = builder::opset1::split(bias, 2, 1);
|
||||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||||
m_map[LSTMInput::LSTM_INPUT_B] = split_bias.at(0) + split_bias.at(1);
|
m_input_map[LSTMInput::LSTM_INPUT_B] =
|
||||||
|
std::make_shared<default_opset::Add>(split_bias.at(0),
|
||||||
|
split_bias.at(1));
|
||||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||||
m_map[LSTMInput::LSTM_INPUT_B] =
|
m_input_map[LSTMInput::LSTM_INPUT_B] =
|
||||||
ngraph::op::util::convert_lstm_node_format(
|
ngraph::op::util::convert_lstm_node_format(
|
||||||
m_map[LSTMInput::LSTM_INPUT_B],
|
m_input_map[LSTMInput::LSTM_INPUT_B],
|
||||||
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
ngraph::op::util::LSTMWeightsFormat::IOFC,
|
||||||
ngraph::op::util::LSTMWeightsFormat::FICO,
|
ngraph::op::util::LSTMWeightsFormat::FICO,
|
||||||
1);
|
1);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_B] = default_opset::Constant::create(
|
NGRAPH_CHECK(m_dim_map.count(LSTMInputDimension::NUM_DIRECTIONS) &&
|
||||||
element::f32,
|
m_dim_map.count(LSTMInputDimension::HIDDEN_SIZE),
|
||||||
Shape{num_directions, gates_count * hidden_size},
|
"ONNX LSTM: Can't create default `B` input, "
|
||||||
std::vector<float>(num_directions * gates_count * hidden_size,
|
"because at least one of required dimensions "
|
||||||
|
"(num_directions, hidden_size) is dynamic. "
|
||||||
|
"\n`R` input onnx shape {num_directions, "
|
||||||
|
"gates_count*hidden_size, hidden_size}: ",
|
||||||
|
ng_inputs.at(2).get_partial_shape());
|
||||||
|
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_B] = default_opset::Constant::create(
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X].get_element_type(),
|
||||||
|
Shape{m_dim_map[LSTMInputDimension::NUM_DIRECTIONS],
|
||||||
|
gates_count * m_dim_map[LSTMInputDimension::HIDDEN_SIZE]},
|
||||||
|
std::vector<float>(m_dim_map[LSTMInputDimension::NUM_DIRECTIONS] *
|
||||||
|
gates_count *
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE],
|
||||||
0.f));
|
0.f));
|
||||||
}
|
}
|
||||||
// The lengths of the sequences in a batch. Shape [batch_size]
|
// `sequence_lens`- The lengths of the sequences in a batch.
|
||||||
|
// Shape: [batch_size]
|
||||||
if (ng_inputs.size() > 4 && !ngraph::op::is_null(ng_inputs.at(4)))
|
if (ng_inputs.size() > 4 && !ngraph::op::is_null(ng_inputs.at(4)))
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] = ng_inputs.at(4);
|
m_input_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] = ng_inputs.at(4);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] =
|
NGRAPH_CHECK(
|
||||||
|
m_dim_map.count(LSTMInputDimension::BATCH_SIZE) &&
|
||||||
|
m_dim_map.count(LSTMInputDimension::SEQ_LENGTH),
|
||||||
|
"ONNX LSTM: Can't create default `sequence_lens` input, ",
|
||||||
|
"because at least one of required dimensions "
|
||||||
|
"(batch_size, seq_length) is dynamic. "
|
||||||
|
"\n`X` input onnx shape {seq_length, batch_size, input_size} is ",
|
||||||
|
ng_inputs.at(0).get_partial_shape());
|
||||||
|
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] =
|
||||||
default_opset::Constant::create(
|
default_opset::Constant::create(
|
||||||
element::i32,
|
element::i32,
|
||||||
Shape{batch_size},
|
Shape{m_dim_map[LSTMInputDimension::BATCH_SIZE]},
|
||||||
std::vector<std::int32_t>(
|
std::vector<std::int32_t>(
|
||||||
batch_size,
|
m_dim_map[LSTMInputDimension::BATCH_SIZE],
|
||||||
m_map[LSTMInput::LSTM_INPUT_X].get_shape().at(1)));
|
m_dim_map[LSTMInputDimension::SEQ_LENGTH]));
|
||||||
}
|
}
|
||||||
// The initial value of the hidden.
|
// `initial_h` - The initial value of the hidden.
|
||||||
// Shape [num_directions, batch_size, hidden_size]
|
// ONNX Shape: [num_directions, batch_size, hidden_size]
|
||||||
|
// OpenVino Shape: [batch_size, num_directions, hidden_size]
|
||||||
if (ng_inputs.size() > 5 && !ngraph::op::is_null(ng_inputs.at(5)))
|
if (ng_inputs.size() > 5 && !ngraph::op::is_null(ng_inputs.at(5)))
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_INIT_H] =
|
m_input_map[LSTMInput::LSTM_INPUT_INIT_H] =
|
||||||
builder::opset1::reorder_axes(ng_inputs.at(5), {1, 0, 2});
|
builder::opset1::reorder_axes(ng_inputs.at(5), {1, 0, 2});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_INIT_H] = default_opset::Constant::create(
|
NGRAPH_CHECK(
|
||||||
element::f32,
|
m_dim_map.count(LSTMInputDimension::BATCH_SIZE) &&
|
||||||
Shape{batch_size, num_directions, hidden_size},
|
m_dim_map.count(LSTMInputDimension::NUM_DIRECTIONS) &&
|
||||||
std::vector<float>(batch_size * num_directions * hidden_size, 0.f));
|
m_dim_map.count(LSTMInputDimension::HIDDEN_SIZE),
|
||||||
|
"ONNX LSTM: Can't create default `initial_h` input, "
|
||||||
|
"because at least one of required dimensions "
|
||||||
|
"(batch_size, num_directions, hidden_size) is dynamic. "
|
||||||
|
"\n`X` input onnx shape {seq_length, batch_size, input_size} is ",
|
||||||
|
ng_inputs.at(0).get_partial_shape(),
|
||||||
|
"\n`R` input onnx shape {num_directions, 4*hidden_size, "
|
||||||
|
"hidden_size} is ",
|
||||||
|
ng_inputs.at(2).get_partial_shape());
|
||||||
|
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_INIT_H] =
|
||||||
|
default_opset::Constant::create(
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X].get_element_type(),
|
||||||
|
Shape{m_dim_map[LSTMInputDimension::BATCH_SIZE],
|
||||||
|
m_dim_map[LSTMInputDimension::NUM_DIRECTIONS],
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE]},
|
||||||
|
std::vector<float>(
|
||||||
|
m_dim_map[LSTMInputDimension::BATCH_SIZE] *
|
||||||
|
m_dim_map[LSTMInputDimension::NUM_DIRECTIONS] *
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE],
|
||||||
|
0.f));
|
||||||
}
|
}
|
||||||
// The initial value of the cell.
|
// `initial_c` - The initial value of the cell.
|
||||||
// Shape [num_directions, batch_size, hidden_size]
|
// ONNX Shape: [num_directions, batch_size, hidden_size]
|
||||||
|
// OpenVino Shape: [batch_size, num_directions, hidden_size]
|
||||||
if (ng_inputs.size() > 6 && !ngraph::op::is_null(ng_inputs.at(6)))
|
if (ng_inputs.size() > 6 && !ngraph::op::is_null(ng_inputs.at(6)))
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_INIT_C] =
|
m_input_map[LSTMInput::LSTM_INPUT_INIT_C] =
|
||||||
builder::opset1::reorder_axes(ng_inputs.at(6), {1, 0, 2});
|
builder::opset1::reorder_axes(ng_inputs.at(6), {1, 0, 2});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_map[LSTMInput::LSTM_INPUT_INIT_C] = default_opset::Constant::create(
|
NGRAPH_CHECK(
|
||||||
element::f32,
|
m_dim_map.count(LSTMInputDimension::BATCH_SIZE) &&
|
||||||
Shape{batch_size, num_directions, hidden_size},
|
m_dim_map.count(LSTMInputDimension::NUM_DIRECTIONS) &&
|
||||||
std::vector<float>(batch_size * num_directions * hidden_size, 0.f));
|
m_dim_map.count(LSTMInputDimension::HIDDEN_SIZE),
|
||||||
|
"ONNX LSTM: Can't create default `initial_c` input, "
|
||||||
|
"because at least one of required dimensions "
|
||||||
|
"(batch_size, num_directions, hidden_size) is dynamic. "
|
||||||
|
"\n`X` input onnx shape {seq_length, batch_size, input_size} is ",
|
||||||
|
ng_inputs.at(0).get_partial_shape(),
|
||||||
|
"\n`R` input onnx shape {num_directions, 4*hidden_size, "
|
||||||
|
"hidden_size} is ",
|
||||||
|
ng_inputs.at(2).get_partial_shape());
|
||||||
|
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_INIT_C] =
|
||||||
|
default_opset::Constant::create(
|
||||||
|
m_input_map[LSTMInput::LSTM_INPUT_X].get_element_type(),
|
||||||
|
Shape{m_dim_map[LSTMInputDimension::BATCH_SIZE],
|
||||||
|
m_dim_map[LSTMInputDimension::NUM_DIRECTIONS],
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE]},
|
||||||
|
std::vector<float>(
|
||||||
|
m_dim_map[LSTMInputDimension::BATCH_SIZE] *
|
||||||
|
m_dim_map[LSTMInputDimension::NUM_DIRECTIONS] *
|
||||||
|
m_dim_map[LSTMInputDimension::HIDDEN_SIZE],
|
||||||
|
0.f));
|
||||||
}
|
}
|
||||||
// The weight tensor for peepholes. Shape [num_directions, 3*hidde_size]
|
// `P` - The weight tensor for peepholes.
|
||||||
// Peepholes input is not supported by OpenVino
|
// Peepholes input is not supported by OpenVino
|
||||||
if (ng_inputs.size() > 7 && !ngraph::op::is_null(ng_inputs.at(7)))
|
if (ng_inputs.size() > 7 && !ngraph::op::is_null(ng_inputs.at(7)))
|
||||||
{
|
{
|
||||||
@ -176,8 +297,9 @@ namespace ngraph
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Output<ngraph::Node>& at(const LSTMInput& key) { return m_map.at(key); }
|
Output<ngraph::Node>& at(const LSTMInput& key) { return m_input_map.at(key); }
|
||||||
container_type m_map;
|
std::map<LSTMInput, Output<ngraph::Node>> m_input_map;
|
||||||
|
std::map<LSTMInputDimension, size_t> m_dim_map;
|
||||||
};
|
};
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ATTRIBUTES PARSING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ATTRIBUTES PARSING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -235,3 +235,4 @@ xfail_issue_39663 = xfail_test(reason="RuntimeError: Unsupported primitive of ty
|
|||||||
xfail_issue_41815 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::NonMaxSuppression casted "
|
xfail_issue_41815 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v5::NonMaxSuppression casted "
|
||||||
"(yolo_evaluation_layer_1/concat_6:0_btc[0]:f32{1,2535,4},")
|
"(yolo_evaluation_layer_1/concat_6:0_btc[0]:f32{1,2535,4},")
|
||||||
xfail_issue_41894 = xfail_test(reason="CPU plugin elementwise computation missmatch")
|
xfail_issue_41894 = xfail_test(reason="CPU plugin elementwise computation missmatch")
|
||||||
|
xfail_issue_42818 = xfail_test(reason="AssertionError: This model has no test data")
|
||||||
|
@ -38,7 +38,8 @@ from tests import (
|
|||||||
xfail_issue_39669,
|
xfail_issue_39669,
|
||||||
xfail_issue_38726,
|
xfail_issue_38726,
|
||||||
xfail_issue_40686,
|
xfail_issue_40686,
|
||||||
xfail_issue_42779)
|
xfail_issue_42779,
|
||||||
|
xfail_issue_42818)
|
||||||
|
|
||||||
MODELS_ROOT_DIR = tests.MODEL_ZOO_DIR
|
MODELS_ROOT_DIR = tests.MODEL_ZOO_DIR
|
||||||
|
|
||||||
@ -123,7 +124,6 @@ if len(zoo_models) > 0:
|
|||||||
(xfail_issue_42297, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"),
|
(xfail_issue_42297, "test_MSFT_opset10_mlperf_ssd_mobilenet_300_ssd_mobilenet_v1_coco_2018_01_28_cpu"),
|
||||||
(xfail_issue_41814, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"),
|
(xfail_issue_41814, "test_MSFT_opset10_mlperf_ssd_resnet34_1200_ssd_resnet34_mAP_20.2_cpu"),
|
||||||
(xfail_issue_37957, "test_MSFT_opset10_mask_rcnn_keras_mask_rcnn_keras_cpu"),
|
(xfail_issue_37957, "test_MSFT_opset10_mask_rcnn_keras_mask_rcnn_keras_cpu"),
|
||||||
(xfail_issue_36465, "test_MSFT_opset9_LSTM_Seq_lens_unpacked_model_cpu"),
|
|
||||||
]
|
]
|
||||||
for test_case in import_xfail_list:
|
for test_case in import_xfail_list:
|
||||||
xfail, test_name = test_case
|
xfail, test_name = test_case
|
||||||
@ -182,7 +182,9 @@ if len(zoo_models) > 0:
|
|||||||
(xfail_issue_34323, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"),
|
(xfail_issue_34323, "test_MSFT_opset10_BERT_Squad_bertsquad10_cpu"),
|
||||||
|
|
||||||
(xfail_issue_41815, "test_MSFT_opset11_tinyyolov3_yolov3_tiny_cpu"),
|
(xfail_issue_41815, "test_MSFT_opset11_tinyyolov3_yolov3_tiny_cpu"),
|
||||||
(xfail_issue_41815, "test_MSFT_opset10_yolov3_yolov3_cpu")
|
(xfail_issue_41815, "test_MSFT_opset10_yolov3_yolov3_cpu"),
|
||||||
|
|
||||||
|
(xfail_issue_42818, "test_MSFT_opset9_LSTM_Seq_lens_unpacked_model_cpu"),
|
||||||
]
|
]
|
||||||
for test_case in import_xfail_list + execution_xfail_list:
|
for test_case in import_xfail_list + execution_xfail_list:
|
||||||
xfail, test_name = test_case
|
xfail, test_name = test_case
|
||||||
|
@ -0,0 +1,278 @@
|
|||||||
|
ir_version: 7
|
||||||
|
producer_name: "onnx-importer-test"
|
||||||
|
graph {
|
||||||
|
node {
|
||||||
|
output: "W"
|
||||||
|
op_type: "Constant"
|
||||||
|
attribute {
|
||||||
|
name: "value"
|
||||||
|
t {
|
||||||
|
dims: 1
|
||||||
|
dims: 12
|
||||||
|
dims: 1
|
||||||
|
data_type: 1
|
||||||
|
float_data: 0.31403765082359314
|
||||||
|
float_data: -0.16793324053287506
|
||||||
|
float_data: 1.3882579803466797
|
||||||
|
float_data: -0.690295398235321
|
||||||
|
float_data: -0.39940449595451355
|
||||||
|
float_data: -0.7833511233329773
|
||||||
|
float_data: -0.30992957949638367
|
||||||
|
float_data: 0.35575729608535767
|
||||||
|
float_data: -0.46826308965682983
|
||||||
|
float_data: 1.1741459369659424
|
||||||
|
float_data: -2.4147889614105225
|
||||||
|
float_data: -0.42783254384994507
|
||||||
|
name: "const_tensor_W"
|
||||||
|
}
|
||||||
|
type: TENSOR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node {
|
||||||
|
output: "R"
|
||||||
|
op_type: "Constant"
|
||||||
|
attribute {
|
||||||
|
name: "value"
|
||||||
|
t {
|
||||||
|
dims: 1
|
||||||
|
dims: 12
|
||||||
|
dims: 3
|
||||||
|
data_type: 1
|
||||||
|
float_data: 0.8490582704544067
|
||||||
|
float_data: 0.45121243596076965
|
||||||
|
float_data: -1.179901361465454
|
||||||
|
float_data: 0.13536448776721954
|
||||||
|
float_data: 0.813286542892456
|
||||||
|
float_data: 0.6017516255378723
|
||||||
|
float_data: 0.4847572445869446
|
||||||
|
float_data: -1.2136037349700928
|
||||||
|
float_data: 0.16383321583271027
|
||||||
|
float_data: 1.5106260776519775
|
||||||
|
float_data: 1.1177502870559692
|
||||||
|
float_data: 0.2358246147632599
|
||||||
|
float_data: 0.8490582704544067
|
||||||
|
float_data: 0.45121243596076965
|
||||||
|
float_data: -1.179901361465454
|
||||||
|
float_data: 0.13536448776721954
|
||||||
|
float_data: 0.813286542892456
|
||||||
|
float_data: 0.6017516255378723
|
||||||
|
float_data: 0.4847572445869446
|
||||||
|
float_data: -1.2136037349700928
|
||||||
|
float_data: 0.16383321583271027
|
||||||
|
float_data: 1.5106260776519775
|
||||||
|
float_data: 1.1177502870559692
|
||||||
|
float_data: 0.2358246147632599
|
||||||
|
float_data: 0.8490582704544067
|
||||||
|
float_data: 0.45121243596076965
|
||||||
|
float_data: -1.179901361465454
|
||||||
|
float_data: 0.13536448776721954
|
||||||
|
float_data: 0.813286542892456
|
||||||
|
float_data: 0.6017516255378723
|
||||||
|
float_data: 0.4847572445869446
|
||||||
|
float_data: -1.2136037349700928
|
||||||
|
float_data: 0.16383321583271027
|
||||||
|
float_data: 1.5106260776519775
|
||||||
|
float_data: 1.1177502870559692
|
||||||
|
float_data: 0.2358246147632599
|
||||||
|
name: "const_tensor"
|
||||||
|
}
|
||||||
|
type: TENSOR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node {
|
||||||
|
output: "B"
|
||||||
|
op_type: "Constant"
|
||||||
|
attribute {
|
||||||
|
name: "value"
|
||||||
|
t {
|
||||||
|
dims: 1
|
||||||
|
dims: 24
|
||||||
|
data_type: 1
|
||||||
|
float_data: 0.53367018699646
|
||||||
|
float_data: 1.6593654155731201
|
||||||
|
float_data: -1.1500109434127808
|
||||||
|
float_data: 0.0034221699461340904
|
||||||
|
float_data: 0.7993710041046143
|
||||||
|
float_data: 0.43780383467674255
|
||||||
|
float_data: -0.5508262515068054
|
||||||
|
float_data: 1.0774186849594116
|
||||||
|
float_data: -0.606513500213623
|
||||||
|
float_data: 0.6434063911437988
|
||||||
|
float_data: -1.5693753957748413
|
||||||
|
float_data: 1.4923384189605713
|
||||||
|
float_data: 1.1554348468780518
|
||||||
|
float_data: -1.328158974647522
|
||||||
|
float_data: 0.24995532631874084
|
||||||
|
float_data: 0.15112681686878204
|
||||||
|
float_data: -0.3469875752925873
|
||||||
|
float_data: -0.100888192653656
|
||||||
|
float_data: -0.2931624948978424
|
||||||
|
float_data: -0.4731961488723755
|
||||||
|
float_data: 0.6616785526275635
|
||||||
|
float_data: -1.1646721363067627
|
||||||
|
float_data: -0.09588219225406647
|
||||||
|
float_data: 0.5212928056716919
|
||||||
|
name: "const_tensor"
|
||||||
|
}
|
||||||
|
type: TENSOR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node {
|
||||||
|
input: "X"
|
||||||
|
input: "W"
|
||||||
|
input: "R"
|
||||||
|
input: "B"
|
||||||
|
input: "sequence_lens"
|
||||||
|
input: "initial_h"
|
||||||
|
input: "initial_c"
|
||||||
|
output: "Y"
|
||||||
|
output: "Y_h"
|
||||||
|
output: "Y_c"
|
||||||
|
op_type: "LSTM"
|
||||||
|
attribute {
|
||||||
|
name: "direction"
|
||||||
|
s: "forward"
|
||||||
|
type: STRING
|
||||||
|
}
|
||||||
|
attribute {
|
||||||
|
name: "hidden_size"
|
||||||
|
i: 3
|
||||||
|
type: INT
|
||||||
|
}
|
||||||
|
}
|
||||||
|
name: "test-model-lstm"
|
||||||
|
input {
|
||||||
|
name: "X"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
input {
|
||||||
|
name: "sequence_lens"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 6
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
input {
|
||||||
|
name: "initial_h"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
input {
|
||||||
|
name: "initial_c"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
name: "Y"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
name: "Y_h"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
name: "Y_c"
|
||||||
|
type {
|
||||||
|
tensor_type {
|
||||||
|
elem_type: 1
|
||||||
|
shape {
|
||||||
|
dim {
|
||||||
|
dim_value: 1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: -1
|
||||||
|
}
|
||||||
|
dim {
|
||||||
|
dim_value: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
opset_import {
|
||||||
|
domain: ""
|
||||||
|
version: 12
|
||||||
|
}
|
@ -491,6 +491,27 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_model_lstm_mixed_seq_reverse)
|
|||||||
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
|
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_import_only_lstm_dynamic_batch_seq_all_inputs)
|
||||||
|
{
|
||||||
|
auto function = onnx_import::import_onnx_model(
|
||||||
|
file_util::path_join(SERIALIZED_ZOO, "onnx/dynamic_shapes/lstm_dyn_batch_seq.prototxt"));
|
||||||
|
|
||||||
|
auto batch_size = Dimension::dynamic();
|
||||||
|
auto seq_length = Dimension::dynamic();
|
||||||
|
int64_t hidden_size = 3;
|
||||||
|
int64_t num_directions = 1;
|
||||||
|
auto Y_expected_output = PartialShape{batch_size, num_directions, seq_length, hidden_size};
|
||||||
|
auto Y_h_expected_output = PartialShape{num_directions, batch_size, hidden_size};
|
||||||
|
auto Y_c_expected_output = PartialShape{num_directions, batch_size, hidden_size};
|
||||||
|
|
||||||
|
EXPECT_EQ(function->get_output_size(), 3);
|
||||||
|
EXPECT_EQ(function->get_output_partial_shape(0), Y_expected_output);
|
||||||
|
EXPECT_EQ(function->get_output_partial_shape(1), Y_h_expected_output);
|
||||||
|
EXPECT_EQ(function->get_output_partial_shape(2), Y_c_expected_output);
|
||||||
|
|
||||||
|
EXPECT_EQ(count_ops_of_type<op::v5::LSTMSequence>(function), 1);
|
||||||
|
}
|
||||||
|
|
||||||
// RNNLikeSequenceOp test fixture for test setup reuse
|
// RNNLikeSequenceOp test fixture for test setup reuse
|
||||||
class GRUSequenceOp : public testing::Test
|
class GRUSequenceOp : public testing::Test
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user