* fix batch adding to init value of read value * fix for batch in Kaldi models * added broadcast to be able reshape in IE * test fixes, added batch broadcasting to created constants * pep fixes * move all changes to 1 transformation * added unit test and fix insertSelect transformation * added comments * remove unneeded params search * fix element_size to send correct batch * fix update batch in element_size * couple fixes * update BOM file * fix review comments * review fixes * review fixes * fix license headers
33 lines
990 B
Python
33 lines
990 B
Python
# Copyright (C) 2018-2021 Intel Corporation
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
from mo.front.common.partial_infer.elemental import copy_shape_infer
|
|
from mo.graph.graph import Graph, Node
|
|
from mo.ops.op import Op
|
|
|
|
|
|
class MemoryOffset(Op):
|
|
op = 'MemoryOffset'
|
|
enabled = False
|
|
|
|
def __init__(self, graph: Graph, attrs: dict):
|
|
super().__init__(graph, {
|
|
'op': 'MemoryOffset',
|
|
'type': None,
|
|
'pair_name': None,
|
|
'splitted': False,
|
|
'has_default': False,
|
|
'infer': self.infer,
|
|
'in_ports_count': 1,
|
|
'out_ports_count': 1,
|
|
}, attrs)
|
|
|
|
@staticmethod
|
|
def infer(node: Node):
|
|
if node.has_valid('element_size'):
|
|
# element_size should be set by Kaldi loader or MemoryOffsetAdjustment or SplitRecurrentMemoryOffset
|
|
node.out_port(0).data.set_shape(node.element_size)
|
|
else:
|
|
# for TDNN blocks
|
|
copy_shape_infer(node)
|