Softmax-8 python API (#9057)

* added ngraph api for softmax-8

* Updated comments
This commit is contained in:
Yegor Kruglov 2021-12-14 16:25:23 +03:00 committed by GitHub
parent a64d694ded
commit 5c0b125554
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 52 additions and 4 deletions

View File

@ -146,7 +146,7 @@ from ngraph.opset1.ops import sign
from ngraph.opset1.ops import sin
from ngraph.opset1.ops import sinh
from ngraph.opset8.ops import slice
from ngraph.opset1.ops import softmax
from ngraph.opset8.ops import softmax
from ngraph.opset4.ops import softplus
from ngraph.opset2.ops import space_to_batch
from ngraph.opset1.ops import space_to_depth

View File

@ -777,3 +777,16 @@ def detection_output(
inputs = as_nodes(*inputs)
return _get_node_factory_opset8().create("DetectionOutput", inputs, attrs)
@nameable_op
def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node:
"""
Apply softmax operation on each element of input tensor.
@param data: The tensor providing input data.
@param axis: An axis along which Softmax should be calculated. Can be positive or negative.
@param name: Optional name for the node
@return The new node with softmax operation applied on each element.
"""
return _get_node_factory_opset8().create("Softmax", [as_node(data)], {"axis": axis})

View File

@ -146,7 +146,7 @@ from openvino.runtime.opset1.ops import sign
from openvino.runtime.opset1.ops import sin
from openvino.runtime.opset1.ops import sinh
from openvino.runtime.opset8.ops import slice
from openvino.runtime.opset1.ops import softmax
from openvino.runtime.opset8.ops import softmax
from openvino.runtime.opset4.ops import softplus
from openvino.runtime.opset2.ops import space_to_batch
from openvino.runtime.opset1.ops import space_to_depth

View File

@ -778,3 +778,16 @@ def detection_output(
inputs = as_nodes(*inputs)
return _get_node_factory_opset8().create("DetectionOutput", inputs, attrs)
@nameable_op
def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node:
"""
Apply softmax operation on each element of input tensor.
@param data: The tensor providing input data.
@param axis: An axis along which Softmax should be calculated. Can be positive or negative.
@param name: Optional name for the node.
@return The new node with softmax operation applied on each element.
"""
return _get_node_factory_opset8().create("Softmax", [as_node(data)], {"axis": axis})

View File

@ -99,7 +99,7 @@ def test_sigmoid():
assert np.allclose(result, expected)
def test_softmax():
def test_softmax_positive_axis():
axis = 1
input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32)
@ -110,6 +110,17 @@ def test_softmax():
assert np.allclose(result, expected)
def test_softmax_negative_axis():
axis = -1
input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32)
result = run_op_node([input_tensor], ov.softmax, axis)
expected = [[0.09003056, 0.24472842, 0.6652409], [0.09003056, 0.24472842, 0.6652409]]
assert np.allclose(result, expected)
def test_erf():
input_tensor = np.array([-1.0, 0.0, 1.0, 2.5, 3.14, 4.0], dtype=np.float32)
expected = [-0.842701, 0.0, 0.842701, 0.999593, 0.999991, 1.0]

View File

@ -99,7 +99,7 @@ def test_sigmoid():
assert np.allclose(result, expected)
def test_softmax():
def test_softmax_positive_axis():
axis = 1
input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32)
@ -110,6 +110,17 @@ def test_softmax():
assert np.allclose(result, expected)
def test_softmax_negative_axis():
axis = -1
input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32)
result = run_op_node([input_tensor], ng.softmax, axis)
expected = [[0.09003056, 0.24472842, 0.6652409], [0.09003056, 0.24472842, 0.6652409]]
assert np.allclose(result, expected)
def test_erf():
input_tensor = np.array([-1.0, 0.0, 1.0, 2.5, 3.14, 4.0], dtype=np.float32)
expected = [-0.842701, 0.0, 0.842701, 0.999593, 0.999991, 1.0]