[nGraph] Add opset5 ops to Python nGraph (#2833)

This commit is contained in:
Jan Iwaszkiewicz 2020-10-28 10:18:14 +01:00 committed by GitHub
parent 1689634574
commit 11801eadb3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 291 additions and 2 deletions

View File

@ -77,7 +77,7 @@ A single cell in the sequence is implemented in the same way as in <a href="#GRU
**Outputs**
* **1**: `Y` – 3D tensor of type *T1* `[batch_size, num_directions, seq_len, hidden_size]`, concatenation of all the intermediate output values of the hidden.
* **1**: `Y` - 4D tensor of type *T1* `[batch_size, num_directions, seq_len, hidden_size]`, concatenation of all the intermediate output values of the hidden.
* **2**: `Ho` - 3D tensor of type *T1* `[batch_size, num_directions, hidden_size]`, the last output value of hidden state.

View File

@ -69,7 +69,7 @@ A single cell in the sequence is implemented in the same way as in <a href="#RNN
**Outputs**
* **1**: `Y` – 3D tensor of type *T1* `[batch_size, num_directions, seq_len, hidden_size]`, concatenation of all the intermediate output values of the hidden.
* **1**: `Y` - 4D tensor of type *T1* `[batch_size, num_directions, seq_len, hidden_size]`, concatenation of all the intermediate output values of the hidden.
* **2**: `Ho` - 3D tensor of type *T1* `[batch_size, num_directions, hidden_size]`, the last output value of hidden state.

View File

@ -85,6 +85,7 @@ from ngraph.opset5 import grn
from ngraph.opset5 import group_convolution
from ngraph.opset5 import group_convolution_backprop_data
from ngraph.opset5 import gru_cell
from ngraph.opset5 import gru_sequence
from ngraph.opset5 import hard_sigmoid
from ngraph.opset5 import hsigmoid
from ngraph.opset5 import hswish
@ -97,6 +98,7 @@ from ngraph.opset5 import logical_not
from ngraph.opset5 import logical_or
from ngraph.opset5 import logical_xor
from ngraph.opset5 import log_softmax
from ngraph.opset5 import loop
from ngraph.opset5 import lrn
from ngraph.opset5 import lstm_cell
from ngraph.opset5 import lstm_sequence
@ -140,6 +142,7 @@ from ngraph.opset5 import reshape
from ngraph.opset5 import result
from ngraph.opset5 import reverse_sequence
from ngraph.opset5 import rnn_cell
from ngraph.opset5 import rnn_sequence
from ngraph.opset5 import roi_align
from ngraph.opset5 import roi_pooling
from ngraph.opset5 import round

View File

@ -72,6 +72,7 @@ from ngraph.opset1.ops import grn
from ngraph.opset1.ops import group_convolution
from ngraph.opset1.ops import group_convolution_backprop_data
from ngraph.opset3.ops import gru_cell
from ngraph.opset5.ops import gru_sequence
from ngraph.opset1.ops import hard_sigmoid
from ngraph.opset5.ops import hsigmoid
from ngraph.opset4.ops import hswish
@ -84,6 +85,7 @@ from ngraph.opset1.ops import logical_not
from ngraph.opset1.ops import logical_or
from ngraph.opset1.ops import logical_xor
from ngraph.opset5.ops import log_softmax
from ngraph.opset5.ops import loop
from ngraph.opset1.ops import lrn
from ngraph.opset4.ops import lstm_cell
from ngraph.opset1.ops import lstm_sequence
@ -127,6 +129,7 @@ from ngraph.opset1.ops import reshape
from ngraph.opset1.ops import result
from ngraph.opset1.ops import reverse_sequence
from ngraph.opset3.ops import rnn_cell
from ngraph.opset5.ops import rnn_sequence
from ngraph.opset3.ops import roi_align
from ngraph.opset2.ops import roi_pooling
from ngraph.opset5.ops import round

View File

@ -140,3 +140,136 @@ def hsigmoid(data: NodeInput, name: Optional[str] = None,) -> Node:
:return: The new node which performs HSigmoid
"""
return _get_node_factory_opset5().create("HSigmoid", as_nodes(data), {})
@nameable_op
def gru_sequence(
X: NodeInput,
H_t: NodeInput,
sequence_lengths: NodeInput,
W: NodeInput,
R: NodeInput,
B: NodeInput,
hidden_size: int,
direction: str,
activations: List[str] = None,
activations_alpha: List[float] = None,
activations_beta: List[float] = None,
clip: float = 0.0,
linear_before_reset: bool = False,
name: Optional[str] = None,
) -> Node:
"""Return a node which performs GRUSequence.
:param X: 3D tensor, input data.
:param H_t: 3D tensor, input hidden state data.
:param sequence_lengths: 1D tensor, specifies sequence lenghts
for each batch element.
:param W: 3D tensor, weights matrix.
:param R: 3D tensor, recurrence weights matrix.
:param B: 2D tensor, sum of biases.
:param hidden_size: Size of the hidden state.
:param direction: Specify if the RNN is forward, reverse, or bidirectional.
:param activations: Activation functions for gates.
:param activations_alpha: Attributes of function; applicability and meaning
of these attributes depends on choosen activation function.
:param activations_beta: Attributes of function; applicability and meaning
of these attributes depends on choosen activation function.
:param clip: Specifies bound values *[-clip, clip]* for tensor clipping.
:param linear_before_reset: During the computation of the output of
the hidden gate, apply the linear transformation.
:return: The new node which performs GRUSequence
"""
if activations is None:
activations = ["sigmoid", "tanh"]
if activations_alpha is None:
activations_alpha = []
if activations_beta is None:
activations_beta = []
inputs = as_nodes(X, H_t, sequence_lengths, W, R, B)
attributes = {
"hidden_size": hidden_size,
"activations": activations,
"activations_alpha": activations_alpha,
"activations_beta": activations_alpha,
"clip": clip,
"linear_before_reset": linear_before_reset,
}
return _get_node_factory_opset5().create("GRUSequence", inputs, attributes)
@nameable_op
def rnn_sequence(
X: NodeInput,
H_t: NodeInput,
sequence_lengths: NodeInput,
W: NodeInput,
R: NodeInput,
B: NodeInput,
hidden_size: int,
direction: str,
activations: List[str] = None,
activations_alpha: List[float] = None,
activations_beta: List[float] = None,
clip: float = 0.0,
name: Optional[str] = None,
) -> Node:
"""Return a node which performs RNNSequence.
:param X: 3D tensor, input data.
:param H_t: 3D tensor, input hidden state data.
:param sequence_lengths: 1D tensor, specifies sequence lenghts
for each batch element.
:param W: 3D tensor, weights matrix.
:param R: 3D tensor, recurrence weights matrix.
:param B: 2D tensor, sum of biases.
:param hidden_size: Size of the hidden state.
:param direction: Specify if the RNN is forward, reverse, or bidirectional.
:param activations: Activation functions for gates.
:param activations_alpha: Attributes of function; applicability and meaning
of these attributes depends on choosen activation function.
:param activations_beta: Attributes of function; applicability and meaning
of these attributes depends on choosen activation function.
:param clip: Specifies bound values *[-clip, clip]* for tensor clipping.
:return: The new node which performs RNNSequence
"""
if activations is None:
activations = ["tanh"]
if activations_alpha is None:
activations_alpha = []
if activations_beta is None:
activations_beta = []
inputs = as_nodes(X, H_t, sequence_lengths, W, R, B)
attributes = {
"hidden_size": hidden_size,
"activations": activations,
"activations_alpha": activations_alpha,
"activations_beta": activations_alpha,
"clip": clip,
}
return _get_node_factory_opset5().create("RNNSequence", inputs, attributes)
@nameable_op
def loop(
trip_count: NodeInput,
execution_condition: NodeInput,
name: Optional[str] = None,
) -> Node:
"""Return a node which performs Loop.
:param trip_count: A scalar or 1D tensor with 1 element specifying
maximum number of iterations.
:param execution_condition: A scalar or 1D tensor with 1 element
specifying whether to execute the first iteration or not.
:return: The new node which performs Loop.
"""
inputs = as_nodes(trip_count, execution_condition)
return _get_node_factory_opset5().create("Loop", inputs)

View File

@ -21,6 +21,8 @@ import ngraph as ng
import ngraph.opset1 as ng_opset1
from ngraph.impl import Type
from tests import skip_segfault
np_types = [np.float32, np.int32]
integral_np_types = [
np.int8,
@ -538,6 +540,154 @@ def test_gru_cell_operator():
assert list(node_param.get_output_shape(0)) == expected_shape
def test_gru_sequence():
batch_size = 2
input_size = 16
hidden_size = 32
seq_len = 8
seq_lengths = [seq_len] * batch_size
num_directions = 1
direction = "FORWARD"
X_shape = [batch_size, seq_len, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
W_shape = [num_directions, 3 * hidden_size, input_size]
R_shape = [num_directions, 3 * hidden_size, hidden_size]
B_shape = [num_directions, 3 * hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=np.float32)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=np.float32)
parameter_W = ng.parameter(W_shape, name="W", dtype=np.float32)
parameter_R = ng.parameter(R_shape, name="R", dtype=np.float32)
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
expected_shape_y = [batch_size, num_directions, seq_len, hidden_size]
expected_shape_h = [batch_size, num_directions, hidden_size]
node_default = ng.gru_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "GRUSequence"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape_y
assert list(node_default.get_output_shape(1)) == expected_shape_h
activations = ["tanh", "relu"]
activations_alpha = [1.0, 2.0]
activations_beta = [1.0, 2.0]
clip = 0.5
linear_before_reset = True
# If *linear_before_reset* is set True, then B tensor shape must be [4 * hidden_size]
B_shape = [num_directions, 4 * hidden_size]
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
node_param = ng.gru_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activations_alpha,
activations_beta,
clip,
linear_before_reset,
)
assert node_param.get_type_name() == "GRUSequence"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape_y
assert list(node_param.get_output_shape(1)) == expected_shape_h
def test_rnn_sequence():
batch_size = 2
input_size = 16
hidden_size = 32
seq_len = 8
seq_lengths = [seq_len] * batch_size
num_directions = 1
direction = "FORWARD"
X_shape = [batch_size, seq_len, input_size]
H_t_shape = [batch_size, num_directions, hidden_size]
W_shape = [num_directions, hidden_size, input_size]
R_shape = [num_directions, hidden_size, hidden_size]
B_shape = [num_directions, hidden_size]
parameter_X = ng.parameter(X_shape, name="X", dtype=np.float32)
parameter_H_t = ng.parameter(H_t_shape, name="H_t", dtype=np.float32)
parameter_W = ng.parameter(W_shape, name="W", dtype=np.float32)
parameter_R = ng.parameter(R_shape, name="R", dtype=np.float32)
parameter_B = ng.parameter(B_shape, name="B", dtype=np.float32)
expected_shape_y = [batch_size, num_directions, seq_len, hidden_size]
expected_shape_h = [batch_size, num_directions, hidden_size]
node_default = ng.rnn_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
)
assert node_default.get_type_name() == "RNNSequence"
assert node_default.get_output_size() == 2
assert list(node_default.get_output_shape(0)) == expected_shape_y
assert list(node_default.get_output_shape(1)) == expected_shape_h
activations = ["relu"]
activations_alpha = [2.0]
activations_beta = [1.0]
clip = 0.5
node_param = ng.rnn_sequence(
parameter_X,
parameter_H_t,
seq_lengths,
parameter_W,
parameter_R,
parameter_B,
hidden_size,
direction,
activations,
activations_alpha,
activations_beta,
clip,
)
assert node_param.get_type_name() == "RNNSequence"
assert node_param.get_output_size() == 2
assert list(node_param.get_output_shape(0)) == expected_shape_y
assert list(node_param.get_output_shape(1)) == expected_shape_h
@skip_segfault
def test_loop():
trip_count = 8
condition = True
node_default = ng.loop(trip_count, condition)
assert node_default.get_type_name() == "Loop"
def test_roi_pooling():
inputs = ng.parameter([2, 3, 4, 5], dtype=np.float32)
coords = ng.parameter([150, 5], dtype=np.float32)