[ MO ] Warnings fix (#5105)

* warnings fix

* remove redundant backslash

* add whitespace in the error message
This commit is contained in:
Yegor Kruglov 2021-04-08 14:09:00 +03:00 committed by GitHub
parent d30740af66
commit ca889f530d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 30 additions and 30 deletions

View File

@ -15,7 +15,7 @@ from mo.ops.shape import Shape
class InterpolateConcat(BackReplacementPattern):
"""
r"""
Replaces hard-coded 1-port input of Interpolate with reshape-able sub-graph using the following Concat inputs
BEFORE:
@ -85,7 +85,7 @@ class InterpolateConcat(BackReplacementPattern):
class InterpolateReshapeWA(BackReplacementPattern):
"""
r"""
Replaces hard-coded 1-port input of Interpolate with reshape-able sub-graph.
WARNING: Could cause troubles if model has hard-coded Interpolate intentionally -- rare situation
BEFORE:

View File

@ -70,7 +70,7 @@ class MatMulConstTransposesExtraction(BackReplacementPattern):
class PullTransposeThroughFQUp(BackReplacementPattern):
"""
r"""
BEFORE AFTER
T T T T T
\ \ | / / \ \ | / /
@ -135,7 +135,7 @@ class PullTransposeThroughFQUp(BackReplacementPattern):
class SmartReshape_HC_Reshape_MatMul(BackReplacementPattern):
"""
r"""
Relaxes hard-coded input of Reshape in such sub-graphs:
input_1 Constant

View File

@ -97,7 +97,7 @@ class ReverseChannelsPropagationDown(BackReplacementPattern):
@staticmethod
def pass_rc_through_conv(node, reverse_channels):
"""
r"""
For non grouped convolution:
BEFORE AFTER
@ -167,7 +167,7 @@ class ReverseChannelsPropagationDown(BackReplacementPattern):
@staticmethod
def pass_rc_through_eltwise(node, reverse_channels):
"""
r"""
BEFORE AFTER
previous_op previous_op'
@ -268,7 +268,7 @@ class ReverseChannelsPropagationUp(BackReplacementPattern):
@staticmethod
def lift_up_through_eltwise(node: Node, reverse_channels: Node):
"""
r"""
BEFORE AFTER
previous_op previous_op'

View File

@ -14,7 +14,7 @@ from mo.ops.const import Const
class CompressQuantizeWeights(BackReplacementPattern):
"""
r"""
Compress weights transformation goal is to pre-quantize data to minimize runtime calculations with constant data.
To achieve this goal we perform FakeQuantize decomposition to separate quantization from dequantization in it.

View File

@ -12,7 +12,7 @@ from mo.ops.broadcast import Broadcast
class DropoutWithRandomUniformReplacer(FrontReplacementSubgraph):
"""
r"""
This transformation replaces possible Dropout block (in inference mode) with RandomUniform
to Broadcast of half-ones in a sub-graph.
WARNING: the transformation can be triggered for other block with RandomUniform by mistake,

View File

@ -13,7 +13,7 @@ from mo.ops.unsqueeze import Unsqueeze
class ExpandRangeConstant(FrontReplacementSubgraph):
"""
r"""
Searches for Constant operations filled with range values starting from 0 and replaces it with Range operation
Faced in ONNX BERT -- replacing it makes model reshape-able by sequence length

View File

@ -13,7 +13,7 @@ from mo.ops.shape import Shape
class InterpolateWithConcat(FrontReplacementPattern):
"""
r"""
Replaces hard-coded 1-port input of Interpolate with reshape-able sub-graph using the following Concat inputs
BEFORE:

View File

@ -13,7 +13,7 @@ from mo.utils.utils import refer_to_faq_msg
def apply_biases_to_last_layer(graph, counts):
"""
r"""
When user provides counts file, it is a file that contains log-apriory probabilities,
technically it should be subtracted from the bias of the last layer unless it is a SoftMax.

View File

@ -64,7 +64,7 @@ def align_frame_time(graph: Graph, node: Node, frame_time_max):
class MemoryOffsetAdjustment(FrontReplacementSubgraph):
"""
r"""
Pass used to fix wrong results in the following situation:
input
| \

View File

@ -11,7 +11,7 @@ from mo.ops.memoryoffset import MemoryOffset
class TdnnComponentReplacer(FrontReplacementPattern):
'''
r"""
Expand TdnnComponent into MemoryOffsets, Concat and FullyConected nodes
BEFORE:
@ -31,7 +31,7 @@ class TdnnComponentReplacer(FrontReplacementPattern):
|
FullyConnected
|
'''
"""
enabled = True
run_not_recursively = True

View File

@ -13,7 +13,7 @@ from mo.ops.unsqueeze import Unsqueeze
class NonConstBeginStridedSliceReplacement(FrontReplacementSubgraph):
"""
r"""
The transformation handles StridedSlice operation with dynamic begin and end values
when slicing performs along just one dimension with a dynamic index.
For example, StridedSlice with begin=(0,idx,0), end=(0,idx+1,0),

View File

@ -12,7 +12,7 @@ from mo.graph.graph import Graph
class UnpackPackReverseInputChannels(FrontReplacementSubgraph):
"""
r"""
Unpack - Pack nodes sequence from TensorFlow connected like it shown below is a way to ReverseChannels
/ 0 - 2 \

View File

@ -35,7 +35,7 @@ class FIFOQueue(FrontReplacementSubgraph):
@staticmethod
def replace_sub_graph(graph: Graph, match: dict, **kwargs):
"""
r"""
Usually graph looks like:
main_graph

View File

@ -8,7 +8,7 @@ from mo.graph.graph import Graph, Node, rename_node
class FloorDivDecomposition(FrontReplacementPattern):
"""
r"""
BEFORE: AFTER:
input_0 input_1 input_0 input_1
\ / \ /

View File

@ -7,7 +7,7 @@ from mo.graph.graph import Graph, Node
class IdentityN_to_Identity(FrontReplacementPattern):
"""
r"""
Replaces IdentityN op with several Identity ops.
Example:

View File

@ -90,7 +90,7 @@ class BlockLSTMtoLSTMSequence(MiddleReplacementPattern):
@staticmethod
def replace_pattern(graph: Graph, match: dict):
time_len = match['concatenated_hidden_states'].shape[0]
"""
r"""
Working with concatenated_cell_states_data part first, because IE TensorIterator primitive doesn't have
concatenated cell states output and if we can not collapse it, then we does not support this type of BlockLSTM

View File

@ -165,7 +165,7 @@ class MarkSubGraphsWithCorrectLayout(MiddleReplacementPattern):
@staticmethod
def walk_up_from_in_ports_to_out_ports(in_ports: Set[Port], out_ports: Set[Port], port_condition=None):
""""
r""""
Returns all intermediate ports and nodes of such a sub-graph:
out_ports

View File

@ -6,7 +6,7 @@ from mo.middle.replacement import MiddleReplacementPattern
class RemoveUselessConcatSplitPattern(MiddleReplacementPattern):
"""
r"""
Remove useless construction with concat and split like follows:
/ / | \ \
br1 br2 .. br(n-1)br(n)

View File

@ -6,7 +6,7 @@ from mo.middle.replacement import MiddleReplacementPattern
class RemoveUselessCropsPattern(MiddleReplacementPattern):
"""
r"""
Remove useless construction with crops and concat like follows:
in_node
/ / | \ \

View File

@ -18,7 +18,7 @@ from mo.ops.result import Result
class ReplaceSpliceNodePattern(MiddleReplacementPattern):
"""
r"""
This pass decomposes Splice layer to the sequence Slice Concat and Memory layers
For example:
Let's suppose we have next graph:

View File

@ -17,7 +17,7 @@ from mo.utils.error import Error
class StridedSliceNormalizer(MiddleReplacementPattern):
"""
r"""
StridedSlice is not normal if it cannot be permuted by ApplyPermutations. This normalizer
inserts blank colons ':' in slice expression so that it can be correctly permuted
from NHWC to NCHW layout. It changes masks and inserts blank begin, end and strides values.

View File

@ -245,7 +245,7 @@ class BackEdgeSimpleInputMatcher(MiddleReplacementPattern):
class SmartMatcherInputSlicingWithGather(MiddleReplacementPattern):
"""
r"""
The transformation matches a sub-graph where input tensor is consequently sliced along some axis
for each time step (or index) inside TensorFlow 1.x while_loop operation.
In the original graph StridedSlice with non-constant begin and end attributes performs this slicing.

View File

@ -143,7 +143,7 @@ def add_convolution_to_swap_xy_coordinates(graph: Graph, input_node: Node, coord
def add_fake_background_loc(graph: Graph, input_node: Node):
"""
r"""
DetectionOutput layer expects that box coordinates contains coordinates of boxes for the "background" class also,
but in the TensorFlow\* Object Detection API the tensor contains information about real object classes only.
The function copies a slice of the output data of the node 'input_node' and then concats it to the beginning of the

View File

@ -203,8 +203,8 @@ class Connection:
return {}, None
if self.destinations and len(self.destinations) > 1:
raise Error("set_destination applicable only for connections that has exactly one destination or \
when there is no destinations")
raise Error("set_destination applicable only for connections that has exactly one destination or "
"when there is no destinations")
if port.type == 'out':
raise Error("Wrong port type in set_destination method. Should be 'in' but given 'out'")