Files
openvino/model-optimizer/extensions/front/softmax.py
Evgeny Lazarev 970b1301b5 Cleanup IR v7 from the MO (#1008)
* Removed back phase transformations related to IRv7

* Fixed setting value for the input port using the 'set_value' method

* Removed front and middle phase transformations related to IRv7

* Cleanup the rest of the Model Optimizer transformations from IRv7 specific transformations

* Final cleanup of the deprecated IR v7 related code

* Removed 'blobs_as_input' usage in the Model Optimizer.

* Removed function '_fuse_add' from the Model Optimizer since it is not used anymore.

* Removed 'keep_in_IR' node attribute for FakeQuantize ops in the MO

* Disabled failing gpu_engine.user_context test
2020-06-22 11:52:00 +03:00

86 lines
3.3 KiB
Python

"""
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging as log
import numpy as np
from extensions.front.reduce_axis_normalizer import ReduceAxisNormalizer
from mo.front.common.replacement import FrontReplacementSubgraph
from mo.front.subgraph_matcher import SubgraphMatch
from mo.graph.graph import Graph
from mo.ops.softmax import Softmax
class SoftmaxFromKeras(FrontReplacementSubgraph):
"""
The transformation looks for the pattern that Keras produces for SoftMax layer. The transformation works if the
softmax is performed over one pre-defined axis.
"""
enabled = True
def run_after(self):
return [ReduceAxisNormalizer]
def pattern(self):
return dict(
nodes=[
('input', dict()),
('reduce_max', dict(op='ReduceMax')),
('reduce_indices_max', dict(op='Const', value=lambda x: x is not None and x.size != 0)),
('sub', dict(op='Sub')),
('exp', dict(op='Exp')),
('reduce_sum', dict(op='ReduceSum')),
('reduce_indices_sum', dict(op='Const', value=lambda x: x is not None and x.size != 0)),
('div', dict(op='Div')),
],
edges=[
('input', 'sub', {'in': 0}),
('input', 'reduce_max', {'in': 0}),
('reduce_indices_max', 'reduce_max', {'in': 1}),
('reduce_max', 'sub', {'in': 1}),
('sub', 'exp', {'in': 0}),
('exp', 'div', {'in': 0}),
('exp', 'reduce_sum', {'in': 0}),
('reduce_indices_sum', 'reduce_sum', {'in': 1}),
('reduce_sum', 'div', {'in': 1}),
])
def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):
reduce_max_axis = match['reduce_indices_max'].value
reduce_sum_axis = match['reduce_indices_sum'].value
if reduce_max_axis.ndim == 0:
reduce_max_axis = reduce_max_axis.reshape([1])
if reduce_sum_axis.ndim == 0:
reduce_sum_axis = reduce_sum_axis.reshape([1])
if len(reduce_max_axis) != 1:
log.info('The reductions indices contain more than 1 element. Cannot convert to Softmax.')
return
if not np.array_equal(reduce_max_axis, reduce_sum_axis):
log.info('The reduce indices are not equal: {} vs {}. Cannot convert to Softmax'
''.format(reduce_max_axis, reduce_sum_axis))
return
softmax = Softmax(graph, {'name': match['input'].name + '/Softmax', 'axis': reduce_sum_axis[0]}).create_node()
match['input'].out_port(0).connect(softmax.in_port(0))
match['div'].out_port(0).get_connection().set_source(softmax.out_port(0))
log.debug('Successfully created SoftMax node')