Add mxnet extractors (#1667)

* Add mxnet extractors for hyperbolic functions
This commit is contained in:
iliya mironov 2020-08-07 14:36:41 +03:00 committed by GitHub
parent 6085c797d3
commit c8d74632f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 86 additions and 38 deletions

View File

@ -40,6 +40,46 @@ Standard MXNet\* symbols:
| Symbol Name in MXNet\*| Limitations|
| :----------| :----------|
| _Plus | No |
| _contrib_MultiBoxDetection | "force_suppress" = 1 is not supported, non-default variances are not supported |
| _contrib_MultiBoxPrior | No |
| _contrib_Proposal | No |
| _copy | Not needed for inference |
| _minus_scalar | No |
| _mul_scalar | No |
| _arange | No |
| _contrib_AdaptiveAvgPooling2D | Converted to the Average Pooling with fixed paddings |
| _maximum | No |
| _minimum | No |
| add_n | No |
| arccosh | No |
| arcsinh | No |
| arctanh | No |
| broadcast_add | No |
| broadcast_mul | No |
| cumsum | No |
| div_scalar | No |
| elementwise_sub | No |
| elemwise_add | No |
| elemwise_mul | No |
| exp | No |
| expand_dims | No |
| greater_scalar | No |
| minus_scalar | No |
| null | Not needed for inference |
| repeat | No |
| rnn | No |
| rnn_param_concat | No |
| sigmoid | No |
| slice | No |
| slice_axis | No |
| slice_channel | No |
| slice_like | No |
| stack | No |
| swapaxis | No |
| tile | No |
| transpose | No |
| zeros | No |
| Activation | supported "act_type" = "relu", "sigmoid", "softrelu" or "tanh" |
| BatchNorm | No |
| Concat | No |
@ -70,43 +110,6 @@ Standard MXNet\* symbols:
| Tile | No |
| UpSampling | No |
| Where | No |
| _Plus | No |
| _contrib_MultiBoxDetection | "force_suppress" = 1 is not supported, non-default variances are not supported |
| _contrib_MultiBoxPrior | No |
| _contrib_Proposal | No |
| _copy | Not needed for inference |
| _minus_scalar | No |
| _mul_scalar | No |
| _arange | No |
| _contrib_AdaptiveAvgPooling2D | Converted to the Average Pooling with fixed paddings |
| _maximum | No |
| _minimum | No |
| add_n | No |
| broadcast_add | No |
| broadcast_mul | No |
| cumsum | No |
| div_scalar | No |
| elementwise_sub | No |
| elemwise_add | No |
| elemwise_mul | No |
| exp | No |
| expand_dims | No |
| greater_scalar | No |
| minus_scalar | No |
| null | Not needed for inference |
| repeat | No |
| rnn | No |
| rnn_param_concat | No |
| sigmoid | No |
| slice | No |
| slice_axis | No |
| slice_channel | No |
| slice_like | No |
| stack | No |
| swapaxis | No |
| tile | No |
| transpose | No |
| zeros | No |
## TensorFlow\* Supported Operations

View File

@ -14,7 +14,7 @@
limitations under the License.
"""
from extensions.ops.activation_ops import SoftPlus, Sigmoid, Tanh, ReLU
from extensions.ops.activation_ops import SoftPlus, Sigmoid, Tanh, ReLU, Asinh, Acosh, Atanh
from mo.front.extractor import FrontExtractorOp
from mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs
from mo.utils.error import Error
@ -44,3 +44,33 @@ class ActivationFrontExtractor(FrontExtractorOp):
act_type)
act_class.update_node_stat(node)
return cls.enabled
class AsinhFrontExtractor(FrontExtractorOp):
op = 'arcsinh'
enabled = True
@classmethod
def extract(cls, node):
Asinh.update_node_stat(node)
return cls.enabled
class AcoshFrontExtractor(FrontExtractorOp):
op = 'arccosh'
enabled = True
@classmethod
def extract(cls, node):
Acosh.update_node_stat(node)
return cls.enabled
class AtanhFrontExtractor(FrontExtractorOp):
op = 'arctanh'
enabled = True
@classmethod
def extract(cls, node):
Atanh.update_node_stat(node)
return cls.enabled

View File

@ -69,6 +69,11 @@ class Asinh(Activation):
op = 'Asinh'
operation = staticmethod(lambda x: np.arcsinh(x))
def __init__(self, graph: Graph, attrs: dict):
sp_attrs = {'version': 'opset4'}
sp_attrs.update(attrs)
super().__init__(graph, sp_attrs)
class Cos(Activation):
op = 'Cos'
@ -89,6 +94,11 @@ class Acosh(Activation):
op = 'Acosh'
operation = staticmethod(lambda x: np.arccosh(x))
def __init__(self, graph: Graph, attrs: dict):
sp_attrs = {'version': 'opset4'}
sp_attrs.update(attrs)
super().__init__(graph, sp_attrs)
class Tan(Activation):
op = 'Tan'
@ -109,6 +119,11 @@ class Atanh(Activation):
op = 'Atanh'
operation = staticmethod(lambda x: np.arctanh(x))
def __init__(self, graph: Graph, attrs: dict):
sp_attrs = {'version': 'opset4'}
sp_attrs.update(attrs)
super().__init__(graph, sp_attrs)
class ReLU6(AttributedClamp):
op = 'ReLU6'