* [Python API] Move samples and docs to the new directory * move samples to the new directory * try to fix build and pychecks * fix links * fix pychecks * fix cmake * fix cpack installation * Update inference-engine/ie_bridges/python/CMakeLists.txt Co-authored-by: Sergey Lyubimtsev <sergey.lyubimtsev@intel.com> Co-authored-by: Sergey Lyubimtsev <sergey.lyubimtsev@intel.com>
126 lines
5.6 KiB
Python
Executable File
126 lines
5.6 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# -*- coding: utf-8 -*-
|
|
# Copyright (C) 2018-2021 Intel Corporation
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
import argparse
|
|
import logging as log
|
|
import sys
|
|
|
|
import cv2
|
|
import numpy as np
|
|
from openvino.inference_engine import IECore
|
|
|
|
|
|
def parse_args() -> argparse.Namespace:
|
|
"""Parse and return command line arguments"""
|
|
parser = argparse.ArgumentParser(add_help=False)
|
|
args = parser.add_argument_group('Options')
|
|
# fmt: off
|
|
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
|
args.add_argument('-m', '--model', required=True, type=str,
|
|
help='Required. Path to an .xml or .onnx file with a trained model.')
|
|
args.add_argument('-i', '--input', required=True, type=str, help='Required. Path to an image file.')
|
|
args.add_argument('-d', '--device', default='CPU', type=str,
|
|
help='Optional. Specify the target device to infer on; CPU, GPU, MYRIAD, HDDL or HETERO: '
|
|
'is acceptable. The sample will look for a suitable plugin for device specified. '
|
|
'Default value is CPU.')
|
|
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
|
args.add_argument('-nt', '--number_top', default=10, type=int, help='Optional. Number of top results.')
|
|
# fmt: on
|
|
return parser.parse_args()
|
|
|
|
|
|
def main():
|
|
log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.INFO, stream=sys.stdout)
|
|
args = parse_args()
|
|
|
|
# ---------------------------Step 1. Initialize inference engine core--------------------------------------------------
|
|
log.info('Creating Inference Engine')
|
|
ie = IECore()
|
|
|
|
# ---------------------------Step 2. Read a model in OpenVINO Intermediate Representation or ONNX format---------------
|
|
log.info(f'Reading the network: {args.model}')
|
|
# (.xml and .bin files) or (.onnx file)
|
|
net = ie.read_network(model=args.model)
|
|
|
|
if len(net.input_info) != 1:
|
|
log.error('Sample supports only single input topologies')
|
|
return -1
|
|
if len(net.outputs) != 1:
|
|
log.error('Sample supports only single output topologies')
|
|
return -1
|
|
|
|
# ---------------------------Step 3. Configure input & output----------------------------------------------------------
|
|
log.info('Configuring input and output blobs')
|
|
# Get names of input and output blobs
|
|
input_blob = next(iter(net.input_info))
|
|
out_blob = next(iter(net.outputs))
|
|
|
|
# Set input and output precision manually
|
|
net.input_info[input_blob].precision = 'U8'
|
|
net.outputs[out_blob].precision = 'FP32'
|
|
|
|
# Get a number of classes recognized by a model
|
|
num_of_classes = max(net.outputs[out_blob].shape)
|
|
|
|
# ---------------------------Step 4. Loading model to the device-------------------------------------------------------
|
|
log.info('Loading the model to the plugin')
|
|
exec_net = ie.load_network(network=net, device_name=args.device)
|
|
|
|
# ---------------------------Step 5. Create infer request--------------------------------------------------------------
|
|
# load_network() method of the IECore class with a specified number of requests (default 1) returns an ExecutableNetwork
|
|
# instance which stores infer requests. So you already created Infer requests in the previous step.
|
|
|
|
# ---------------------------Step 6. Prepare input---------------------------------------------------------------------
|
|
original_image = cv2.imread(args.input)
|
|
image = original_image.copy()
|
|
_, _, h, w = net.input_info[input_blob].input_data.shape
|
|
|
|
if image.shape[:-1] != (h, w):
|
|
log.warning(f'Image {args.input} is resized from {image.shape[:-1]} to {(h, w)}')
|
|
image = cv2.resize(image, (w, h))
|
|
|
|
# Change data layout from HWC to CHW
|
|
image = image.transpose((2, 0, 1))
|
|
# Add N dimension to transform to NCHW
|
|
image = np.expand_dims(image, axis=0)
|
|
|
|
# ---------------------------Step 7. Do inference----------------------------------------------------------------------
|
|
log.info('Starting inference in synchronous mode')
|
|
res = exec_net.infer(inputs={input_blob: image})
|
|
|
|
# ---------------------------Step 8. Process output--------------------------------------------------------------------
|
|
# Generate a label list
|
|
if args.labels:
|
|
with open(args.labels, 'r') as f:
|
|
labels = [line.split(',')[0].strip() for line in f]
|
|
|
|
res = res[out_blob]
|
|
# Change a shape of a numpy.ndarray with results to get another one with one dimension
|
|
probs = res.reshape(num_of_classes)
|
|
# Get an array of args.number_top class IDs in descending order of probability
|
|
top_n_idexes = np.argsort(probs)[-args.number_top :][::-1]
|
|
|
|
header = 'classid probability'
|
|
header = header + ' label' if args.labels else header
|
|
|
|
log.info(f'Image path: {args.input}')
|
|
log.info(f'Top {args.number_top} results: ')
|
|
log.info(header)
|
|
log.info('-' * len(header))
|
|
|
|
for class_id in top_n_idexes:
|
|
probability_indent = ' ' * (len('classid') - len(str(class_id)) + 1)
|
|
label_indent = ' ' * (len('probability') - 8) if args.labels else ''
|
|
label = labels[class_id] if args.labels else ''
|
|
log.info(f'{class_id}{probability_indent}{probs[class_id]:.7f}{label_indent}{label}')
|
|
log.info('')
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
|
return 0
|
|
|
|
|
|
if __name__ == '__main__':
|
|
sys.exit(main())
|