diff --git a/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py b/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py index 55b1a5a8777..f572e5785a7 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py +++ b/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py @@ -146,7 +146,7 @@ from ngraph.opset1.ops import sign from ngraph.opset1.ops import sin from ngraph.opset1.ops import sinh from ngraph.opset8.ops import slice -from ngraph.opset1.ops import softmax +from ngraph.opset8.ops import softmax from ngraph.opset4.ops import softplus from ngraph.opset2.ops import space_to_batch from ngraph.opset1.ops import space_to_depth diff --git a/src/bindings/python/src/compatibility/ngraph/opset8/ops.py b/src/bindings/python/src/compatibility/ngraph/opset8/ops.py index d0f32933d66..78b02dd46c3 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset8/ops.py +++ b/src/bindings/python/src/compatibility/ngraph/opset8/ops.py @@ -777,3 +777,16 @@ def detection_output( inputs = as_nodes(*inputs) return _get_node_factory_opset8().create("DetectionOutput", inputs, attrs) + + +@nameable_op +def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node: + """ + Apply softmax operation on each element of input tensor. + + @param data: The tensor providing input data. + @param axis: An axis along which Softmax should be calculated. Can be positive or negative. + @param name: Optional name for the node + @return The new node with softmax operation applied on each element. + """ + return _get_node_factory_opset8().create("Softmax", [as_node(data)], {"axis": axis}) diff --git a/src/bindings/python/src/openvino/runtime/opset8/__init__.py b/src/bindings/python/src/openvino/runtime/opset8/__init__.py index 7b3352edcfc..aa5f080e626 100644 --- a/src/bindings/python/src/openvino/runtime/opset8/__init__.py +++ b/src/bindings/python/src/openvino/runtime/opset8/__init__.py @@ -146,7 +146,7 @@ from openvino.runtime.opset1.ops import sign from openvino.runtime.opset1.ops import sin from openvino.runtime.opset1.ops import sinh from openvino.runtime.opset8.ops import slice -from openvino.runtime.opset1.ops import softmax +from openvino.runtime.opset8.ops import softmax from openvino.runtime.opset4.ops import softplus from openvino.runtime.opset2.ops import space_to_batch from openvino.runtime.opset1.ops import space_to_depth diff --git a/src/bindings/python/src/openvino/runtime/opset8/ops.py b/src/bindings/python/src/openvino/runtime/opset8/ops.py index 71fe38c5f7b..7282c257b23 100644 --- a/src/bindings/python/src/openvino/runtime/opset8/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset8/ops.py @@ -778,3 +778,16 @@ def detection_output( inputs = as_nodes(*inputs) return _get_node_factory_opset8().create("DetectionOutput", inputs, attrs) + + +@nameable_op +def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node: + """ + Apply softmax operation on each element of input tensor. + + @param data: The tensor providing input data. + @param axis: An axis along which Softmax should be calculated. Can be positive or negative. + @param name: Optional name for the node. + @return The new node with softmax operation applied on each element. + """ + return _get_node_factory_opset8().create("Softmax", [as_node(data)], {"axis": axis}) diff --git a/src/bindings/python/tests/test_ngraph/test_ops_unary.py b/src/bindings/python/tests/test_ngraph/test_ops_unary.py index b6a2f675ed3..4b6aa046b19 100644 --- a/src/bindings/python/tests/test_ngraph/test_ops_unary.py +++ b/src/bindings/python/tests/test_ngraph/test_ops_unary.py @@ -99,7 +99,7 @@ def test_sigmoid(): assert np.allclose(result, expected) -def test_softmax(): +def test_softmax_positive_axis(): axis = 1 input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32) @@ -110,6 +110,17 @@ def test_softmax(): assert np.allclose(result, expected) +def test_softmax_negative_axis(): + axis = -1 + input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32) + + result = run_op_node([input_tensor], ov.softmax, axis) + + expected = [[0.09003056, 0.24472842, 0.6652409], [0.09003056, 0.24472842, 0.6652409]] + + assert np.allclose(result, expected) + + def test_erf(): input_tensor = np.array([-1.0, 0.0, 1.0, 2.5, 3.14, 4.0], dtype=np.float32) expected = [-0.842701, 0.0, 0.842701, 0.999593, 0.999991, 1.0] diff --git a/src/bindings/python/tests_compatibility/test_ngraph/test_ops_unary.py b/src/bindings/python/tests_compatibility/test_ngraph/test_ops_unary.py index 4eac079f0e3..ff1f48d85e3 100644 --- a/src/bindings/python/tests_compatibility/test_ngraph/test_ops_unary.py +++ b/src/bindings/python/tests_compatibility/test_ngraph/test_ops_unary.py @@ -99,7 +99,7 @@ def test_sigmoid(): assert np.allclose(result, expected) -def test_softmax(): +def test_softmax_positive_axis(): axis = 1 input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32) @@ -110,6 +110,17 @@ def test_softmax(): assert np.allclose(result, expected) +def test_softmax_negative_axis(): + axis = -1 + input_tensor = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32) + + result = run_op_node([input_tensor], ng.softmax, axis) + + expected = [[0.09003056, 0.24472842, 0.6652409], [0.09003056, 0.24472842, 0.6652409]] + + assert np.allclose(result, expected) + + def test_erf(): input_tensor = np.array([-1.0, 0.0, 1.0, 2.5, 3.14, 4.0], dtype=np.float32) expected = [-0.842701, 0.0, 0.842701, 0.999593, 0.999991, 1.0]