Additional telemetry events in MO. (#5662)
* Added additional telemetry events. * Separated sending tf1 and tf2. * Small correction. * Unit test fix. * Added op_names_statistic field in graph. Added op names saving in loop ext, while ext. * Optimize imports. * Added debug print. * Added comments, removed debug print. * Added comment. * Renamed dynamic shapes event label to partially defined, added unit tests. * Added attribute checks, moved telemetry methods to separate file. * Small corrections. * Updated BOM file.
This commit is contained in:
@@ -1073,6 +1073,7 @@ mo/utils/simple_proto_parser.py
|
||||
mo/utils/str_to.py
|
||||
mo/utils/summarize_graph.py
|
||||
mo/utils/telemetry_stub.py
|
||||
mo/utils/telemetry_utils.py
|
||||
mo/utils/tensorboard_util.py
|
||||
mo/utils/unsupported_ops.py
|
||||
mo/utils/utils.py
|
||||
|
||||
@@ -47,6 +47,8 @@ class LoopExtractor(FrontExtractorOp):
|
||||
# create an NX node
|
||||
id = body_graph.unique_id(node_id(pb_node))
|
||||
body_graph.add_node(id, pb=pb_node, kind='op')
|
||||
if hasattr(body_graph, 'op_names_statistic') and hasattr(pb_node, 'op_type'):
|
||||
body_graph.op_names_statistic[pb_node.op_type] += 1
|
||||
|
||||
# add incoming edges based on data_nodes_map
|
||||
for dst_port, inp in enumerate(pb_node.input):
|
||||
|
||||
@@ -35,6 +35,8 @@ def update_body_graph(body_graph: Graph, subgraph_proto: dict,
|
||||
id = body_graph.unique_id(pb_node.name)
|
||||
map_original_name[pb_node.name] = id
|
||||
body_graph.add_node(id, pb=pb_node, kind='op')
|
||||
if hasattr(body_graph, 'op_names_statistic') and hasattr(pb_node, 'op'):
|
||||
body_graph.op_names_statistic[pb_node.op] += 1
|
||||
|
||||
# add incoming edges based on data_nodes_map
|
||||
for dst_port, inp in enumerate(pb_node.input):
|
||||
|
||||
@@ -8,6 +8,7 @@ from mo.front.common.register_custom_ops import update_extractors_with_extension
|
||||
from mo.front.extractor import extract_node_attrs
|
||||
from mo.graph.graph import Graph
|
||||
from mo.utils.error import Error
|
||||
from mo.utils.telemetry_utils import send_op_names_info, send_shapes_info
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
|
||||
|
||||
@@ -49,3 +50,5 @@ class CaffeLoader(Loader):
|
||||
argv.enable_flattening_nested_params if hasattr(argv, 'enable_flattening_nested_params') else False
|
||||
)
|
||||
extract_node_attrs(graph, lambda node: caffe_extractor(node, check_for_duplicates(caffe_type_extractors)))
|
||||
send_op_names_info('caffe', graph)
|
||||
send_shapes_info('caffe', graph)
|
||||
|
||||
@@ -8,6 +8,7 @@ from mo.front.kaldi.extractor import kaldi_extractor, kaldi_type_extractors
|
||||
from mo.front.kaldi.loader.loader import load_kaldi_model
|
||||
from mo.graph.graph import Graph
|
||||
from mo.utils.error import Error
|
||||
from mo.utils.telemetry_utils import send_shapes_info, send_op_names_info
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
|
||||
|
||||
@@ -27,3 +28,6 @@ class KaldiLoader(Loader):
|
||||
|
||||
update_extractors_with_extensions(kaldi_type_extractors)
|
||||
extract_node_attrs(graph, lambda node: kaldi_extractor(node))
|
||||
|
||||
send_op_names_info('kaldi', graph)
|
||||
send_shapes_info('kaldi', graph)
|
||||
|
||||
@@ -17,6 +17,7 @@ from mo.front.mxnet.extractor import mxnet_op_extractors, mxnet_op_extractor
|
||||
from mo.front.mxnet.loader import symbol2nx, load_symbol_def
|
||||
from mo.front.mxnet.nd_to_params import save_params_file
|
||||
from mo.graph.graph import Graph
|
||||
from mo.utils.telemetry_utils import send_shapes_info, send_op_names_info
|
||||
|
||||
|
||||
class MxNetLoader(Loader):
|
||||
@@ -51,3 +52,5 @@ class MxNetLoader(Loader):
|
||||
graph.graph['feature_dim'] = 1 if graph.graph['layout'] == 'NCHW' else 3
|
||||
|
||||
extract_node_attrs(graph, mxnet_op_extractor)
|
||||
send_op_names_info('mxnet', graph)
|
||||
send_shapes_info('mxnet', graph)
|
||||
|
||||
@@ -15,6 +15,7 @@ from mo.front.onnx.extractor import onnx_op_extractor, onnx_op_extractors
|
||||
from mo.front.onnx.loader import load_onnx_model, protobuf2nx
|
||||
from mo.graph.graph import Graph
|
||||
from mo.utils.error import Error
|
||||
from mo.utils.telemetry_utils import send_shapes_info, send_op_names_info
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
|
||||
|
||||
@@ -59,3 +60,5 @@ class ONNXLoader(Loader):
|
||||
|
||||
graph.check_empty_graph('protobuf2nx. It may happen due to problems with loaded model')
|
||||
extract_node_attrs(graph, lambda node: onnx_op_extractor(node, check_for_duplicates(onnx_op_extractors)))
|
||||
send_op_names_info('onnx', graph)
|
||||
send_shapes_info('onnx', graph)
|
||||
|
||||
@@ -25,6 +25,7 @@ from mo.front.tf.loader import load_tf_graph_def, protobuf2nx
|
||||
from mo.graph.graph import Graph
|
||||
from mo.utils import tensorboard_util
|
||||
from mo.utils.error import Error
|
||||
from mo.utils.telemetry_utils import send_op_names_info, send_shapes_info, send_framework_info
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
|
||||
|
||||
@@ -40,13 +41,14 @@ class TFLoader(Loader):
|
||||
log.info('Loading library "{}" with custom operations'.format(library))
|
||||
tf_v1.load_op_library(library)
|
||||
|
||||
graph_def, variables_values = load_tf_graph_def(graph_file_name=argv.input_model,
|
||||
graph_def, variables_values, framework = load_tf_graph_def(graph_file_name=argv.input_model,
|
||||
is_binary=not argv.input_model_is_text,
|
||||
checkpoint=argv.input_checkpoint,
|
||||
user_output_node_names_list=argv.output,
|
||||
model_dir=argv.saved_model_dir,
|
||||
meta_graph_file=argv.input_meta_graph,
|
||||
saved_model_tags=argv.saved_model_tags)
|
||||
send_framework_info(framework)
|
||||
|
||||
try:
|
||||
tf_v1.import_graph_def(graph_def, name='')
|
||||
@@ -98,3 +100,5 @@ class TFLoader(Loader):
|
||||
|
||||
graph.check_empty_graph('protobuf2nx. It may happen due to problems with loaded model')
|
||||
extract_node_attrs(graph, lambda node: tf_op_extractor(node, check_for_duplicates(tf_op_extractors)))
|
||||
send_op_names_info(framework, graph)
|
||||
send_shapes_info(framework, graph)
|
||||
|
||||
@@ -282,6 +282,8 @@ def caffe_pb_to_nx(graph, proto, model):
|
||||
|
||||
node_id = graph.unique_id(layer.name)
|
||||
graph.add_node(node_id, pb=layer, model_pb=model_layer, kind='op', type='Parameter')
|
||||
if hasattr(graph, 'op_names_statistic') and hasattr(layer, 'type'):
|
||||
graph.op_names_statistic[layer.type] += 1
|
||||
|
||||
# connect inputs based on blob_producers dictionary
|
||||
for dst_port, bottom in enumerate(layer.bottom):
|
||||
|
||||
@@ -151,6 +151,8 @@ def load_kalid_nnet1_model(graph, file_descr, name):
|
||||
kind='op',
|
||||
layer_i=layer_i,
|
||||
layer_o=layer_o)
|
||||
if hasattr(graph, 'op_names_statistic'):
|
||||
graph.op_names_statistic[component_type] += 1
|
||||
|
||||
prev_node = Node(graph, prev_layer_id)
|
||||
if prev_node.op == 'Parameter':
|
||||
@@ -280,6 +282,8 @@ def load_components(file_descr, graph, component_layer_map=None):
|
||||
parameters=get_parameters(file_descr, start_index, end_index),
|
||||
op=component_type,
|
||||
kind='op')
|
||||
if hasattr(graph, 'op_names_statistic'):
|
||||
graph.op_names_statistic[component_type] += 1
|
||||
|
||||
all_components.append(layer_id)
|
||||
log.debug('{} (type is {}) was loaded'.format(layer_id, component_type))
|
||||
|
||||
@@ -107,6 +107,9 @@ def symbol2nx(graph, model_nodes, model_params, input_names: str = ''):
|
||||
node['value'] = np.zeros(rnn_states[node['name']])
|
||||
node_name = graph.unique_id(node['name'])
|
||||
graph.add_node(node_name, **symbol_attrs(node))
|
||||
if hasattr(graph, 'op_names_statistic') and 'op' in node:
|
||||
if node['op'] != 'null':
|
||||
graph.op_names_statistic[node['op']] += 1
|
||||
graph.node[node_name].update(common_mxnet_fields(Node(graph, node_name)))
|
||||
index_node_keys[i] = node_name
|
||||
fw_name_map[node_name] = node['name']
|
||||
|
||||
@@ -77,6 +77,8 @@ def protobuf2nx(graph: Graph, pb):
|
||||
fw_name = node_id(node)
|
||||
id = graph.unique_id(fw_name)
|
||||
graph.add_node(id, pb=node, kind='op')
|
||||
if hasattr(graph, 'op_names_statistic') and hasattr(node, 'op_type'):
|
||||
graph.op_names_statistic[node.op_type] += 1
|
||||
|
||||
# add incoming edges based on data_nodes_map
|
||||
for dst_port, inp in enumerate(node.input):
|
||||
|
||||
@@ -4,8 +4,9 @@
|
||||
import logging as log
|
||||
import os
|
||||
import re
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from mo.graph.graph import Node
|
||||
from mo.utils.error import Error, FrameworkError
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
from mo.utils.versions_checker import get_environment_setup
|
||||
@@ -189,7 +190,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
|
||||
try:
|
||||
if graph_file_name and not meta_graph_file and not checkpoint:
|
||||
# frozen graph
|
||||
return read_file_to_graph_def(graph_def, graph_file_name, is_binary), variables_values
|
||||
return read_file_to_graph_def(graph_def, graph_file_name, is_binary), variables_values, 'tf'
|
||||
if graph_file_name and not meta_graph_file and checkpoint:
|
||||
# inference graph and checkpoint
|
||||
graph_def = read_file_to_graph_def(graph_def, graph_file_name, is_binary)
|
||||
@@ -200,7 +201,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
|
||||
graph_def, variables_values = freeze_checkpoints(graph_def=graph_def, checkpoint_dir=checkpoint,
|
||||
output_node_names=outputs)
|
||||
# we are sure that checkpoint is existing file or directory due to cli_parser configuration
|
||||
return graph_def, variables_values
|
||||
return graph_def, variables_values, 'tf'
|
||||
if not graph_file_name and meta_graph_file:
|
||||
meta_graph_file = deducing_metagraph_path(meta_graph_file)
|
||||
input_meta_graph_def = read_file_to_graph_def(tf_v1.MetaGraphDef(), meta_graph_file, is_binary)
|
||||
@@ -211,7 +212,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
|
||||
outputs = get_output_node_names_list(input_meta_graph_def.graph_def, user_output_node_names_list)
|
||||
graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, input_meta_graph_def.graph_def,
|
||||
outputs)
|
||||
return graph_def, variables_values
|
||||
return graph_def, variables_values, 'tf'
|
||||
if model_dir:
|
||||
# saved model directory
|
||||
try:
|
||||
@@ -235,7 +236,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
|
||||
graph_def = frozen_func.graph.as_graph_def(add_shapes=True)
|
||||
# disable eager execution since next steps are executed with a graph in non-eager mode
|
||||
tf_v1.disable_eager_execution()
|
||||
return graph_def, variables_values
|
||||
return graph_def, variables_values, 'tf2'
|
||||
except (TypeError, KeyError):
|
||||
# disable eager execution since TensorFlow 1 model is handled
|
||||
tf_v1.disable_eager_execution()
|
||||
@@ -245,7 +246,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
|
||||
meta_graph_def = tf_v1.saved_model.loader.load(sess, tags, model_dir)
|
||||
outputs = get_output_node_names_list(meta_graph_def.graph_def, user_output_node_names_list)
|
||||
graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, meta_graph_def.graph_def, outputs)
|
||||
return graph_def, variables_values
|
||||
return graph_def, variables_values, 'tf'
|
||||
except Exception as e:
|
||||
raise FrameworkError('SavedModel format load failure: {}', e) from e
|
||||
except Exception as e:
|
||||
@@ -260,6 +261,14 @@ def protobuf_attrs(pb:tf_v1.NodeDef):
|
||||
def protobuf2nx(graph, pb: tf_v1.GraphDef):
|
||||
fill_graph_with_nodes(graph, pb.node, get_id=lambda pb: pb.name, get_attrs=protobuf_attrs)
|
||||
|
||||
if hasattr(graph, 'op_names_statistic'):
|
||||
for node_name in graph.nodes:
|
||||
node = Node(graph, node_name)
|
||||
node_pb = node.soft_get('pb', None)
|
||||
if node_pb is not None:
|
||||
if hasattr(node_pb, 'op'):
|
||||
graph.op_names_statistic[node_pb.op] += 1
|
||||
|
||||
# Create a library with auxiliary functions used in TensorFlow 2 operations
|
||||
if hasattr(pb, 'library') and hasattr(pb.library, 'function'):
|
||||
graph.graph['library'] = {}
|
||||
|
||||
@@ -561,6 +561,7 @@ class Graph(nx.MultiDiGraph):
|
||||
self.node = self.nodes
|
||||
|
||||
unique_id_count = 0
|
||||
op_names_statistic = collections.Counter()
|
||||
|
||||
# SAFE API DESCRIPTION
|
||||
# all provided methods below are designed to be more safe and convenient
|
||||
|
||||
@@ -37,6 +37,7 @@ from mo.utils.guess_framework import deduce_framework_by_namespace
|
||||
from mo.utils.logger import init_logger
|
||||
from mo.utils.model_analysis import AnalysisResults
|
||||
from mo.utils.utils import refer_to_faq_msg
|
||||
from mo.utils.telemetry_utils import send_params_info, send_framework_info
|
||||
from mo.utils.version import get_version, get_simplified_mo_version, get_simplified_ie_version
|
||||
from mo.utils.versions_checker import check_requirements # pylint: disable=no-name-in-module
|
||||
|
||||
@@ -110,7 +111,6 @@ def prepare_ir(argv: argparse.Namespace):
|
||||
|
||||
log.debug(str(argv))
|
||||
log.debug("Model Optimizer started")
|
||||
t = tm.Telemetry()
|
||||
|
||||
model_name = "<UNKNOWN_NAME>"
|
||||
if argv.model_name:
|
||||
@@ -231,23 +231,22 @@ def prepare_ir(argv: argparse.Namespace):
|
||||
argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values(argv.input,
|
||||
argv.freeze_placeholder_with_value)
|
||||
if is_tf:
|
||||
t.send_event('mo', 'framework', 'tf')
|
||||
from mo.front.tf.register_custom_ops import get_front_classes
|
||||
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
|
||||
elif is_caffe:
|
||||
t.send_event('mo', 'framework', 'caffe')
|
||||
send_framework_info('caffe')
|
||||
from mo.front.caffe.register_custom_ops import get_front_classes
|
||||
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
|
||||
elif is_mxnet:
|
||||
t.send_event('mo', 'framework', 'mxnet')
|
||||
send_framework_info('mxnet')
|
||||
from mo.front.mxnet.register_custom_ops import get_front_classes
|
||||
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
|
||||
elif is_kaldi:
|
||||
t.send_event('mo', 'framework', 'kaldi')
|
||||
send_framework_info('kaldi')
|
||||
from mo.front.kaldi.register_custom_ops import get_front_classes
|
||||
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
|
||||
elif is_onnx:
|
||||
t.send_event('mo', 'framework', 'onnx')
|
||||
send_framework_info('onnx')
|
||||
from mo.front.onnx.register_custom_ops import get_front_classes
|
||||
import_extensions.load_dirs(argv.framework, extensions, get_front_classes)
|
||||
graph = unified_pipeline(argv)
|
||||
@@ -384,6 +383,8 @@ def main(cli_parser: argparse.ArgumentParser, framework: str):
|
||||
init_logger('ERROR', False)
|
||||
|
||||
argv = cli_parser.parse_args()
|
||||
send_params_info(argv, cli_parser)
|
||||
|
||||
if framework:
|
||||
argv.framework = framework
|
||||
|
||||
|
||||
@@ -416,6 +416,15 @@ def get_onnx_cli_options():
|
||||
return OrderedDict(sorted(d.items(), key=lambda t: t[0]))
|
||||
|
||||
|
||||
def get_params_with_paths_list():
|
||||
return ['input_model', 'output_dir', 'caffe_parser_path', 'extensions', 'k', 'output_dir',
|
||||
'input_checkpoint', 'input_meta_graph', 'input_proto', 'input_symbol', 'mean_file',
|
||||
'mean_file_offsets', 'pretrained_model_name', 'saved_model_dir', 'tensorboard_logdir',
|
||||
'tensorflow_custom_layer_libraries', 'tensorflow_custom_operations_config_update',
|
||||
'tensorflow_object_detection_api_pipeline_config', 'tensorflow_use_custom_operations_config',
|
||||
'transformations_config']
|
||||
|
||||
|
||||
def get_caffe_cli_parser(parser: argparse.ArgumentParser = None):
|
||||
"""
|
||||
Specifies cli arguments for Model Optimizer for Caffe*
|
||||
|
||||
92
model-optimizer/mo/utils/telemetry_utils.py
Normal file
92
model-optimizer/mo/utils/telemetry_utils.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
from collections import Counter
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mo.graph.graph import Graph
|
||||
from mo.middle.pattern_match import for_graph_and_each_sub_graph_recursively
|
||||
from mo.utils.cli_parser import get_params_with_paths_list
|
||||
|
||||
try:
|
||||
import openvino_telemetry as tm
|
||||
except ImportError:
|
||||
import mo.utils.telemetry_stub as tm
|
||||
|
||||
|
||||
def send_op_names_info(framework: str, graph: Graph):
|
||||
"""
|
||||
This function sends information about operations in model.
|
||||
:param framework: framework name.
|
||||
:param graph: model graph.
|
||||
"""
|
||||
op_counter = Counter()
|
||||
|
||||
def gather_op_statistics(g: Graph, op_c: Counter = op_counter):
|
||||
if hasattr(g, 'op_names_statistic'):
|
||||
op_c += g.op_names_statistic
|
||||
|
||||
for_graph_and_each_sub_graph_recursively(graph, gather_op_statistics)
|
||||
|
||||
t = tm.Telemetry()
|
||||
for op_name in op_counter:
|
||||
t.send_event('mo', 'op_count', "{}_{}".format(framework, op_name), op_counter[op_name])
|
||||
|
||||
|
||||
def send_shapes_info(framework: str, graph: Graph):
|
||||
"""
|
||||
This function sends information about model input shapes.
|
||||
:param framework: framework name.
|
||||
:param graph: model graph.
|
||||
"""
|
||||
shapes = []
|
||||
for node in graph.get_op_nodes():
|
||||
op_type = node.soft_get('type', None)
|
||||
if op_type == 'Parameter':
|
||||
if 'shape' in node:
|
||||
shapes.append(node['shape'])
|
||||
t = tm.Telemetry()
|
||||
|
||||
if shapes:
|
||||
shape_str = ""
|
||||
is_partially_defined = "0"
|
||||
for shape in shapes:
|
||||
shape_str += np.array2string(shape) + ","
|
||||
if not all(shape > 0):
|
||||
is_partially_defined = "1"
|
||||
message_str = "{fw:" + framework + ",shape:\"" + shape_str[:-1] + "\"}"
|
||||
t.send_event('mo', 'input_shapes', message_str)
|
||||
t.send_event('mo', 'partially_defined_shape',
|
||||
"{partially_defined_shape:" + is_partially_defined + ",fw:" + framework + "}")
|
||||
|
||||
|
||||
def send_params_info(argv: argparse.Namespace, cli_parser: argparse.ArgumentParser):
|
||||
"""
|
||||
This function sends information about used command line parameters.
|
||||
:param argv: command line parameters.
|
||||
:param cli_parser: command line parameters parser.
|
||||
"""
|
||||
t = tm.Telemetry()
|
||||
params_with_paths = get_params_with_paths_list()
|
||||
for arg in vars(argv):
|
||||
arg_value = getattr(argv, arg)
|
||||
if arg_value != cli_parser.get_default(arg):
|
||||
if arg in params_with_paths:
|
||||
# If command line argument value is a directory or a path to file it is not sent
|
||||
# as it may contain confidential information. "1" value is used instead.
|
||||
param_str = arg + ":" + str(1)
|
||||
else:
|
||||
param_str = arg + ":" + str(arg_value)
|
||||
|
||||
t.send_event('mo', 'cli_parameters', param_str)
|
||||
|
||||
|
||||
def send_framework_info(framework: str):
|
||||
"""
|
||||
This function sends information about used framework.
|
||||
:param framework: framework name.
|
||||
"""
|
||||
t = tm.Telemetry()
|
||||
t.send_event('mo', 'framework', framework)
|
||||
@@ -15,7 +15,7 @@ pbtxt = 'node{name:"Placeholder"op:"Placeholder"attr{key:"dtype"value{type:DT_FL
|
||||
class TestingSummarizeGraph(unittest.TestCase):
|
||||
def test_summarize_graph(self):
|
||||
with patch('mo.front.tf.loader.open', mock_open(read_data=pbtxt)) as m:
|
||||
graph_def, _ = load_tf_graph_def('path', False)
|
||||
graph_def, _, _ = load_tf_graph_def('path', False)
|
||||
summary = summarize_graph(graph_def)
|
||||
self.assertEqual(len(summary['outputs']), 1)
|
||||
self.assertEqual(summary['outputs'][0], 'Output/Identity')
|
||||
|
||||
88
model-optimizer/unit_tests/mo/utils/telemetry_utils_test.py
Normal file
88
model-optimizer/unit_tests/mo/utils/telemetry_utils_test.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import unittest
|
||||
from collections import Counter
|
||||
from unittest.mock import Mock
|
||||
|
||||
from mo.front.common.partial_infer.utils import int64_array
|
||||
from mo.graph.graph import Graph, Node
|
||||
from mo.utils.telemetry_utils import send_op_names_info, send_shapes_info
|
||||
from unit_tests.utils.graph import build_graph, regular_op
|
||||
|
||||
try:
|
||||
import openvino_telemetry as tm
|
||||
except ImportError:
|
||||
import mo.utils.telemetry_stub as tm
|
||||
|
||||
|
||||
class TestTelemetryUtils(unittest.TestCase):
|
||||
@staticmethod
|
||||
def init_telemetry_mocks():
|
||||
tm.Telemetry.__init__ = Mock(return_value=None)
|
||||
tm.Telemetry.send_event = Mock()
|
||||
|
||||
def test_send_op_names_info(self):
|
||||
graph = Graph()
|
||||
graph.add_nodes_from(['node1'])
|
||||
graph.op_names_statistic = Counter(['a', 'a', 'a', 'b', 'b'])
|
||||
|
||||
sub_graph1 = Graph()
|
||||
sub_graph1.add_nodes_from(['node2'])
|
||||
sub_graph1.op_names_statistic = Counter(['a', 'c', 'c'])
|
||||
|
||||
sub_graph2 = Graph()
|
||||
sub_graph2.op_names_statistic = Counter(['a', 'd'])
|
||||
|
||||
node1 = Node(graph, 'node1')
|
||||
node1['sub_graphs'] = ['sub_graph1']
|
||||
node1['sub_graph1'] = sub_graph1
|
||||
|
||||
node2 = Node(sub_graph1, 'node2')
|
||||
node2['sub_graphs'] = ['sub_graph2']
|
||||
node2['sub_graph2'] = sub_graph2
|
||||
|
||||
self.init_telemetry_mocks()
|
||||
|
||||
send_op_names_info('framework', graph)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'op_count', 'framework_a', 5)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'op_count', 'framework_b', 2)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'op_count', 'framework_c', 2)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'op_count', 'framework_d', 1)
|
||||
|
||||
def test_send_shapes_info(self):
|
||||
graph = build_graph({**regular_op('placeholder1', {'shape': int64_array([1, 3, 20, 20]), 'type': 'Parameter'}),
|
||||
**regular_op('placeholder2', {'shape': int64_array([2, 4, 10]), 'type': 'Parameter'}),
|
||||
**regular_op('mul', {'shape': int64_array([7, 8]), 'type': 'Multiply'})}, [])
|
||||
|
||||
self.init_telemetry_mocks()
|
||||
|
||||
send_shapes_info('framework', graph)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'input_shapes', '{fw:framework,shape:"[ 1 3 20 20],[ 2 4 10]"}')
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'partially_defined_shape',
|
||||
'{partially_defined_shape:0,fw:framework}')
|
||||
|
||||
def test_send_dynamic_shapes_case1(self):
|
||||
graph = build_graph({**regular_op('placeholder1', {'shape': int64_array([-1, 3, 20, 20]), 'type': 'Parameter'}),
|
||||
**regular_op('mul', {'shape': int64_array([7, 8]), 'type': 'Multiply'})}, [])
|
||||
|
||||
self.init_telemetry_mocks()
|
||||
|
||||
send_shapes_info('framework', graph)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'input_shapes', '{fw:framework,shape:"[-1 3 20 20]"}')
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'partially_defined_shape',
|
||||
'{partially_defined_shape:1,fw:framework}')
|
||||
|
||||
def test_send_dynamic_shapes_case2(self):
|
||||
graph = build_graph({**regular_op('placeholder1', {'shape': int64_array([2, 3, 20, 20]), 'type': 'Parameter'}),
|
||||
**regular_op('placeholder2', {'shape': int64_array([7, 4, 10]), 'type': 'Parameter'}),
|
||||
**regular_op('placeholder3', {'shape': int64_array([5, 4, 0]), 'type': 'Parameter'}),
|
||||
**regular_op('mul', {'shape': int64_array([7, 8]), 'type': 'Multiply'})}, [])
|
||||
|
||||
self.init_telemetry_mocks()
|
||||
|
||||
send_shapes_info('framework', graph)
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'input_shapes',
|
||||
'{fw:framework,shape:"[ 2 3 20 20],[ 7 4 10],[5 4 0]"}')
|
||||
tm.Telemetry.send_event.assert_any_call('mo', 'partially_defined_shape',
|
||||
'{partially_defined_shape:1,fw:framework}')
|
||||
Reference in New Issue
Block a user