SoftPlus operation implementation (#1365)
This commit is contained in:
committed by
GitHub
parent
7d1c5877ff
commit
84c7b048db
@@ -149,6 +149,7 @@ from ngraph.opset4 import sign
|
||||
from ngraph.opset4 import sin
|
||||
from ngraph.opset4 import sinh
|
||||
from ngraph.opset4 import softmax
|
||||
from ngraph.opset4 import softplus
|
||||
from ngraph.opset4 import space_to_batch
|
||||
from ngraph.opset4 import space_to_depth
|
||||
from ngraph.opset4 import split
|
||||
|
||||
@@ -137,6 +137,7 @@ from ngraph.opset1.ops import sign
|
||||
from ngraph.opset1.ops import sin
|
||||
from ngraph.opset1.ops import sinh
|
||||
from ngraph.opset1.ops import softmax
|
||||
from ngraph.opset4.ops import softplus
|
||||
from ngraph.opset2.ops import space_to_batch
|
||||
from ngraph.opset1.ops import space_to_depth
|
||||
from ngraph.opset1.ops import split
|
||||
|
||||
@@ -139,6 +139,16 @@ def non_max_suppression(
|
||||
return _get_node_factory_opset4().create("NonMaxSuppression", inputs, attributes)
|
||||
|
||||
|
||||
@nameable_op
|
||||
def softplus(data: NodeInput, name: Optional[str] = None) -> Node:
|
||||
"""Apply SoftPlus operation on each element of input tensor.
|
||||
|
||||
:param data: The tensor providing input data.
|
||||
:return: The new node with SoftPlus operation applied on each element.
|
||||
"""
|
||||
return _get_node_factory_opset4().create("SoftPlus", as_nodes(data), {})
|
||||
|
||||
|
||||
@nameable_op
|
||||
def mish(data: NodeInput, name: Optional[str] = None,) -> Node:
|
||||
"""Return a node which performs Mish.
|
||||
|
||||
Reference in New Issue
Block a user