SoftPlus operation implementation (#1365)

This commit is contained in:
Anton Chetverikov
2020-08-25 19:36:39 +03:00
committed by GitHub
parent 7d1c5877ff
commit 84c7b048db
35 changed files with 639 additions and 140 deletions

View File

@@ -149,6 +149,7 @@ from ngraph.opset4 import sign
from ngraph.opset4 import sin
from ngraph.opset4 import sinh
from ngraph.opset4 import softmax
from ngraph.opset4 import softplus
from ngraph.opset4 import space_to_batch
from ngraph.opset4 import space_to_depth
from ngraph.opset4 import split

View File

@@ -137,6 +137,7 @@ from ngraph.opset1.ops import sign
from ngraph.opset1.ops import sin
from ngraph.opset1.ops import sinh
from ngraph.opset1.ops import softmax
from ngraph.opset4.ops import softplus
from ngraph.opset2.ops import space_to_batch
from ngraph.opset1.ops import space_to_depth
from ngraph.opset1.ops import split

View File

@@ -139,6 +139,16 @@ def non_max_suppression(
return _get_node_factory_opset4().create("NonMaxSuppression", inputs, attributes)
@nameable_op
def softplus(data: NodeInput, name: Optional[str] = None) -> Node:
"""Apply SoftPlus operation on each element of input tensor.
:param data: The tensor providing input data.
:return: The new node with SoftPlus operation applied on each element.
"""
return _get_node_factory_opset4().create("SoftPlus", as_nodes(data), {})
@nameable_op
def mish(data: NodeInput, name: Optional[str] = None,) -> Node:
"""Return a node which performs Mish.