[IE Tools] Update tools with new Python API (#944)
This commit is contained in:
parent
499c976194
commit
c39e32a47b
@ -77,7 +77,7 @@ def get_net_copy_with_output(model: str, output: str, core: IECore):
|
|||||||
def get_model_info(net: IENetwork):
|
def get_model_info(net: IENetwork):
|
||||||
layers = net.layers
|
layers = net.layers
|
||||||
precision = layers[list(layers.keys())[0]].out_data[0].precision
|
precision = layers[list(layers.keys())[0]].out_data[0].precision
|
||||||
return layers, net.inputs, net.outputs, precision
|
return layers, net.input_info, net.outputs, precision
|
||||||
|
|
||||||
|
|
||||||
###
|
###
|
||||||
@ -222,8 +222,8 @@ def two_ir_mode(args):
|
|||||||
def dump_mode(args):
|
def dump_mode(args):
|
||||||
core = get_plugin(args.device, args.l, args.config)
|
core = get_plugin(args.device, args.l, args.config)
|
||||||
net = get_net(model=args.model, core=core)
|
net = get_net(model=args.model, core=core)
|
||||||
out_layers = get_layers_list(net.layers, net.inputs, net.outputs, args.layers)
|
out_layers = get_layers_list(net.layers, net.input_info, net.outputs, args.layers)
|
||||||
inputs = input_processing(args.model, net.inputs, args.input)
|
inputs = input_processing(args.model, net.input_info, args.input)
|
||||||
dump_dict = {}
|
dump_dict = {}
|
||||||
for out_layer in out_layers:
|
for out_layer in out_layers:
|
||||||
log.info('Layer {} processing'.format(out_layer))
|
log.info('Layer {} processing'.format(out_layer))
|
||||||
|
@ -320,7 +320,7 @@ def read_multi_input_file(input_file: str, net_inputs: dict):
|
|||||||
', '.join(net_inputs.keys())))
|
', '.join(net_inputs.keys())))
|
||||||
if 'blob' in npz[net_input].item(0):
|
if 'blob' in npz[net_input].item(0):
|
||||||
just_blob = npz[net_input].item(0)['blob']
|
just_blob = npz[net_input].item(0)['blob']
|
||||||
network_shape = net_inputs[net_input].shape
|
network_shape = net_inputs[net_input].input_data.shape
|
||||||
log.info('Layer {} shape = {}, input blob from multi-input file shape = {}'
|
log.info('Layer {} shape = {}, input blob from multi-input file shape = {}'
|
||||||
''.format(net_input, network_shape, just_blob.shape))
|
''.format(net_input, network_shape, just_blob.shape))
|
||||||
try:
|
try:
|
||||||
@ -344,7 +344,7 @@ def read_image_file(input_file: str, net_inputs: dict):
|
|||||||
if image is None:
|
if image is None:
|
||||||
raise Exception('Can not read input image ' + input_file)
|
raise Exception('Can not read input image ' + input_file)
|
||||||
only_layer_name = list(net_inputs.keys())[0]
|
only_layer_name = list(net_inputs.keys())[0]
|
||||||
shape = net_inputs[only_layer_name].shape
|
shape = net_inputs[only_layer_name].input_data.shape
|
||||||
if len(shape) != 4:
|
if len(shape) != 4:
|
||||||
raise Exception('Can not interpret input shape as image')
|
raise Exception('Can not interpret input shape as image')
|
||||||
n, c, h, w = shape
|
n, c, h, w = shape
|
||||||
@ -361,7 +361,7 @@ def input_processing(model_path: str, net_inputs: dict, input_file: str, layers_
|
|||||||
inputs = dict()
|
inputs = dict()
|
||||||
if input_file is None:
|
if input_file is None:
|
||||||
for net_input in net_inputs:
|
for net_input in net_inputs:
|
||||||
inputs[net_input] = np.clip(np.random.normal(0.5, 0.1, size=net_inputs[net_input].shape), 0, 1)
|
inputs[net_input] = np.clip(np.random.normal(0.5, 0.1, size=net_inputs[net_input].input_data.shape), 0, 1)
|
||||||
dump_output_file(model_path + '_random_input_dump.npz', {inp: {'blob': inputs[inp]} for inp in inputs})
|
dump_output_file(model_path + '_random_input_dump.npz', {inp: {'blob': inputs[inp]} for inp in inputs})
|
||||||
return inputs
|
return inputs
|
||||||
try:
|
try:
|
||||||
|
6
tools/.gitignore
vendored
6
tools/.gitignore
vendored
@ -1,8 +1,2 @@
|
|||||||
accuracy_checker
|
|
||||||
accuracy_checker.log
|
|
||||||
i8_normalized.dot
|
|
||||||
openvino.tools.benchmark.log
|
openvino.tools.benchmark.log
|
||||||
__pycache__/
|
__pycache__/
|
||||||
accuracy_checker.local/
|
|
||||||
accuracy_checker.original/
|
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
## General
|
## General
|
||||||
`openvino.tools` package includes:
|
`openvino.tools` package includes:
|
||||||
* openvino.tools.accuracy_checker
|
|
||||||
* openvino.tools.benchmark
|
* openvino.tools.benchmark
|
||||||
|
|
||||||
Please, refer to https://docs.openvinotoolkit.org for details.
|
Please, refer to https://docs.openvinotoolkit.org for details.
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
"""
|
|
||||||
Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "0.0.1"
|
|
@ -1,106 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import logging as log
|
|
||||||
import sys
|
|
||||||
|
|
||||||
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
|
|
||||||
import os
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from openvino.inference_engine import IENetwork
|
|
||||||
|
|
||||||
|
|
||||||
def python_api_infer(net, feed_dict, device, lib, api, nireq, outputs_to_add: list = None):
|
|
||||||
"""
|
|
||||||
Function to perform IE inference using python API "in place"
|
|
||||||
:param net: IENetwork instance
|
|
||||||
:param feed_dict: Dict which contains mapping between input blob and input data
|
|
||||||
:param device: Device name for inference
|
|
||||||
:param lib: Absolute path to custom kernel lib
|
|
||||||
:param outputs_to_add: Layer names list to take output from
|
|
||||||
:param api: Defines use synchronous infer or asynchronous
|
|
||||||
:param nireq: Number of infer requests to create for asynchronous infer
|
|
||||||
:return: Dict containing out blob name and out data
|
|
||||||
"""
|
|
||||||
|
|
||||||
from openvino.inference_engine import IECore
|
|
||||||
ie = IECore()
|
|
||||||
|
|
||||||
if outputs_to_add:
|
|
||||||
net.add_outputs(outputs_to_add)
|
|
||||||
|
|
||||||
exec_net = ie.load_network(net, device, num_requests=nireq)
|
|
||||||
|
|
||||||
if api == "async":
|
|
||||||
res = []
|
|
||||||
for i in range(nireq):
|
|
||||||
reqest_handler = exec_net.start_async(request_id=i, inputs=feed_dict)
|
|
||||||
reqest_handler.wait()
|
|
||||||
res.append(reqest_handler.outputs)
|
|
||||||
else:
|
|
||||||
res = exec_net.infer(inputs=feed_dict)
|
|
||||||
del net
|
|
||||||
# It's important to delete executable network first to avoid double free in plugin offloading.
|
|
||||||
# Issue relates ony for hetero and Myriad plugins
|
|
||||||
del exec_net
|
|
||||||
del ie
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def cli_parser():
|
|
||||||
parser = argparse.ArgumentParser(description='Python_api reproducer')
|
|
||||||
parser.add_argument('-i', dest='feed_dict', required=True, help='Path to input data in .npz format')
|
|
||||||
parser.add_argument('-m', dest='ir_path', required=True, help='Path to XML file of IR')
|
|
||||||
parser.add_argument('-d', dest='device', required=True, help='Target device to infer on')
|
|
||||||
parser.add_argument('-api', dest='api', default='sync', help='')
|
|
||||||
parser.add_argument('-nireq', dest='nireq', default=1, help='')
|
|
||||||
parser.add_argument('-r', dest='out_path', default=None,
|
|
||||||
help='Dumps results to the output folder')
|
|
||||||
parser.add_argument('--out_layers', dest='out_layers', default=[],
|
|
||||||
help='Names of layers to dump inference results. Example: "input,conv3d"')
|
|
||||||
parser.add_argument('--dump_all_layers', dest='dump_all_layers', default=False, action="store_true",
|
|
||||||
help='Bool value to dump inference results from all layers')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
feed_dict = args.feed_dict
|
|
||||||
ir_path = args.ir_path
|
|
||||||
device = args.device
|
|
||||||
lib = args.lib
|
|
||||||
api = args.api
|
|
||||||
nireq = int(args.nireq)
|
|
||||||
out_path = args.out_path
|
|
||||||
if out_path and not os.path.exists(out_path):
|
|
||||||
os.makedirs(out_path)
|
|
||||||
out_layers = args.out_layers.split(",") if args.out_layers else args.out_layers
|
|
||||||
dump_all_layers = args.dump_all_layers
|
|
||||||
if out_layers and dump_all_layers:
|
|
||||||
raise AttributeError('CMD arguments "out_layers" and "dump_all_layers" were specified together. '
|
|
||||||
'Please, specify only one argument')
|
|
||||||
return feed_dict, ir_path, device, lib, api, nireq, out_path, out_layers, dump_all_layers
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
feed_dict, ir_path, device, lib, api, nireq, out_path, out_layers, dump_all_layers = cli_parser()
|
|
||||||
|
|
||||||
bin_path = os.path.splitext(ir_path)[0] + '.bin'
|
|
||||||
feed_dict = dict(np.load(feed_dict))
|
|
||||||
network = IENetwork(model=ir_path, weights=bin_path)
|
|
||||||
if dump_all_layers:
|
|
||||||
out_layers = list(network.layers.keys())
|
|
||||||
results = python_api_infer(net=network, feed_dict=feed_dict, device=device, lib=lib, api=api, nireq=nireq,
|
|
||||||
outputs_to_add=out_layers)
|
|
||||||
if out_path:
|
|
||||||
if api == "async":
|
|
||||||
for i, result in enumerate(results):
|
|
||||||
dump_path = os.path.join(out_path, "dump_req{}.npz".format(str(i)))
|
|
||||||
np.savez(dump_path, **result)
|
|
||||||
log.info("Path for inference results for {} request: {}".format(str(i), dump_path))
|
|
||||||
else:
|
|
||||||
dump_path = os.path.join(out_path, "dump.npz")
|
|
||||||
np.savez(os.path.join(out_path, "dump.npz"), **results)
|
|
||||||
log.info("Path for inference results: {}".format(dump_path))
|
|
||||||
else:
|
|
||||||
log.info("Inference results won't be saved in the file. "
|
|
||||||
"To do it need to specify '-r' option.")
|
|
||||||
log.info("Inference results:")
|
|
||||||
log.info(results)
|
|
||||||
log.info("SUCCESS!")
|
|
Loading…
Reference in New Issue
Block a user