Files
openvino/samples/python/hello_classification/hello_classification.py

115 lines
4.2 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
2021-12-21 01:33:12 +03:00
import logging as log
import sys
2018-10-16 13:45:03 +03:00
import cv2
import numpy as np
2021-12-21 01:33:12 +03:00
from openvino.preprocess import PrePostProcessor, ResizeAlgorithm
from openvino.runtime import Core, Layout, Type
2018-10-16 13:45:03 +03:00
def main():
log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.INFO, stream=sys.stdout)
2018-10-16 13:45:03 +03:00
2021-12-21 01:33:12 +03:00
# Parsing and validation of input arguments
if len(sys.argv) != 4:
[IE Python Sample] Update docs (#9807) * update hello_classification readme * update classification_async readme * update hello_query_device readme * Fix hello_classification launch line * Update hello_reshape_ssd readme * update speech sample docs * update ngraph sample docs * fix launch command * refactor py ngraph imports * Replace `network` with `model` * update example section with openvino-dev * Update samples/python/classification_sample_async/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/classification_sample_async/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_classification/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_classification/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_reshape_ssd/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Andrey Zaytsev <andrey.zaytsev@intel.com> * Replace `Inference Engine` with `OpenVINO` * fix ngraph ref * Replace `Inference Engine` by `OpenVINO™ Runtime` * Fix IR mentions Co-authored-by: Vladimir Dudnik <vladimir.dudnik@intel.com> Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> Co-authored-by: Andrey Zaytsev <andrey.zaytsev@intel.com>
2022-02-14 19:03:45 +03:00
log.info(f'Usage: {sys.argv[0]} <path_to_model> <path_to_image> <device_name>')
2021-12-21 01:33:12 +03:00
return 1
model_path = sys.argv[1]
image_path = sys.argv[2]
device_name = sys.argv[3]
# --------------------------- Step 1. Initialize OpenVINO Runtime Core ------------------------------------------------
log.info('Creating OpenVINO Runtime Core')
core = Core()
2021-12-21 01:33:12 +03:00
# --------------------------- Step 2. Read a model --------------------------------------------------------------------
[IE Python Sample] Update docs (#9807) * update hello_classification readme * update classification_async readme * update hello_query_device readme * Fix hello_classification launch line * Update hello_reshape_ssd readme * update speech sample docs * update ngraph sample docs * fix launch command * refactor py ngraph imports * Replace `network` with `model` * update example section with openvino-dev * Update samples/python/classification_sample_async/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/classification_sample_async/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_classification/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_classification/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/hello_reshape_ssd/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> * Update samples/python/ngraph_function_creation_sample/README.md Co-authored-by: Andrey Zaytsev <andrey.zaytsev@intel.com> * Replace `Inference Engine` with `OpenVINO` * fix ngraph ref * Replace `Inference Engine` by `OpenVINO™ Runtime` * Fix IR mentions Co-authored-by: Vladimir Dudnik <vladimir.dudnik@intel.com> Co-authored-by: Anastasiya Ageeva <anastasiya.ageeva@intel.com> Co-authored-by: Andrey Zaytsev <andrey.zaytsev@intel.com>
2022-02-14 19:03:45 +03:00
log.info(f'Reading the model: {model_path}')
# (.xml and .bin files) or (.onnx file)
2021-12-21 01:33:12 +03:00
model = core.read_model(model_path)
2018-10-16 13:45:03 +03:00
2021-12-21 01:33:12 +03:00
if len(model.inputs) != 1:
log.error('Sample supports only single input topologies')
return -1
2021-12-21 01:33:12 +03:00
if len(model.outputs) != 1:
log.error('Sample supports only single output topologies')
return -1
2018-10-16 13:45:03 +03:00
2021-12-21 01:33:12 +03:00
# --------------------------- Step 3. Set up input --------------------------------------------------------------------
# Read input image
image = cv2.imread(image_path)
# Add N dimension
input_tensor = np.expand_dims(image, 0)
2018-10-16 13:45:03 +03:00
2021-12-21 01:33:12 +03:00
# --------------------------- Step 4. Apply preprocessing -------------------------------------------------------------
ppp = PrePostProcessor(model)
2021-12-21 01:33:12 +03:00
_, h, w, _ = input_tensor.shape
2021-12-21 01:33:12 +03:00
# 1) Set input tensor information:
# - input() provides information about a single model input
# - reuse precision and shape from already available `input_tensor`
2021-12-21 01:33:12 +03:00
# - layout of data is 'NHWC'
ppp.input().tensor() \
.set_shape(input_tensor.shape) \
.set_element_type(Type.u8) \
.set_layout(Layout('NHWC')) # noqa: ECE001, N400
2018-10-16 13:45:03 +03:00
2021-12-21 01:33:12 +03:00
# 2) Adding explicit preprocessing steps:
# - apply linear resize from tensor spatial dims to model spatial dims
ppp.input().preprocess().resize(ResizeAlgorithm.RESIZE_LINEAR)
2021-12-21 01:33:12 +03:00
# 3) Here we suppose model has 'NCHW' layout for input
ppp.input().model().set_layout(Layout('NCHW'))
2021-12-21 01:33:12 +03:00
# 4) Set output tensor information:
# - precision of tensor is supposed to be 'f32'
ppp.output().tensor().set_element_type(Type.f32)
# 5) Apply preprocessing modifying the original 'model'
2021-12-21 01:33:12 +03:00
model = ppp.build()
2021-12-21 01:33:12 +03:00
# --------------------------- Step 5. Loading model to the device -----------------------------------------------------
log.info('Loading the model to the plugin')
compiled_model = core.compile_model(model, device_name)
# --------------------------- Step 6. Create infer request and do inference synchronously -----------------------------
log.info('Starting inference in synchronous mode')
2021-12-21 01:33:12 +03:00
results = compiled_model.infer_new_request({0: input_tensor})
2021-12-21 01:33:12 +03:00
# --------------------------- Step 7. Process output ------------------------------------------------------------------
predictions = next(iter(results.values()))
# Change a shape of a numpy.ndarray with results to get another one with one dimension
2021-12-21 01:33:12 +03:00
probs = predictions.reshape(-1)
2021-12-21 01:33:12 +03:00
# Get an array of 10 class IDs in descending order of probability
top_10 = np.argsort(probs)[-10:][::-1]
2021-12-21 01:33:12 +03:00
header = 'class_id probability'
log.info(f'Image path: {image_path}')
log.info('Top 10 results: ')
log.info(header)
log.info('-' * len(header))
2021-12-21 01:33:12 +03:00
for class_id in top_10:
probability_indent = ' ' * (len('class_id') - len(str(class_id)) + 1)
log.info(f'{class_id}{probability_indent}{probs[class_id]:.7f}')
log.info('')
2021-12-21 01:33:12 +03:00
# ----------------------------------------------------------------------------------------------------------------------
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
return 0
2018-10-16 13:45:03 +03:00
if __name__ == '__main__':
sys.exit(main())