feat: linters for IE Py API, wheel, samples (#5352)
* feat: linters for IE Py API, wheel, samples * fix indent after auto-formatter * ignore formatting for argument parsing
This commit is contained in:
parent
4daa74303a
commit
10f3d7e065
60
.github/workflows/py_checks.yml
vendored
60
.github/workflows/py_checks.yml
vendored
@ -1,13 +1,13 @@
|
||||
name: IE Python Checks
|
||||
# TODO: add for IE API, wheels
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'inference-engine/ie_bridges/python/sample/**'
|
||||
- 'inference-engine/ie_bridges/python/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'inference-engine/ie_bridges/python/sample/**'
|
||||
- 'inference-engine/ie_bridges/python/**'
|
||||
jobs:
|
||||
linters:
|
||||
runs-on: ubuntu-18.04
|
||||
@ -21,13 +21,55 @@ jobs:
|
||||
with:
|
||||
python-version: '3.6'
|
||||
- name: Install dependencies
|
||||
run: python -m pip install -r inference-engine/ie_bridges/python/sample/requirements_dev.txt
|
||||
- name: Run Flake
|
||||
run: python -m flake8 ./ --config=setup.cfg --show-source
|
||||
run: python -m pip install -r inference-engine/ie_bridges/python/requirements_dev.txt
|
||||
- name: Run Flake on samples
|
||||
run: python -m flake8 ./ --config=../setup.cfg
|
||||
working-directory: inference-engine/ie_bridges/python/sample
|
||||
- name: Create code style diff for samples
|
||||
if: failure()
|
||||
run: |
|
||||
python -m black -l 160 -S ./
|
||||
git diff > samples_diff.diff
|
||||
working-directory: inference-engine/ie_bridges/python/sample
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: samples_diff
|
||||
path: samples_diff.diff
|
||||
- name: Run Flake on src
|
||||
run: python -m flake8 ./ --config=../setup.cfg
|
||||
working-directory: inference-engine/ie_bridges/python/src
|
||||
- name: Create code style diff for Python src
|
||||
if: failure()
|
||||
run: |
|
||||
python -m black -l 160 -S ./
|
||||
git diff > src_diff.diff
|
||||
working-directory: inference-engine/ie_bridges/python/src
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: src_diff
|
||||
path: src_diff.diff
|
||||
- name: Run Flake on wheel
|
||||
run: python -m flake8 ./ --config=../setup.cfg
|
||||
working-directory: inference-engine/ie_bridges/python/wheel
|
||||
- name: Create code style diff for wheel
|
||||
if: failure()
|
||||
run: |
|
||||
python -m black -l 160 -S ./
|
||||
git diff > wheel_diff.diff
|
||||
working-directory: inference-engine/ie_bridges/python/wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: wheel_diff
|
||||
path: wheel_diff.diff
|
||||
|
||||
- name: Run MyPy
|
||||
run: python -m mypy ./ --config-file ./setup.cfg --show-error-context --show-column-numbers --pretty
|
||||
working-directory: inference-engine/ie_bridges/python/sample
|
||||
run: python -m mypy ./ --config-file ./setup.cfg
|
||||
working-directory: inference-engine/ie_bridges/python
|
||||
- name: Run Bandit
|
||||
run: python -m bandit -r ./ -f screen
|
||||
working-directory: inference-engine/ie_bridges/python/sample
|
||||
working-directory: inference-engine/ie_bridges/python
|
||||
|
||||
|
2
inference-engine/ie_bridges/python/.bandit
Normal file
2
inference-engine/ie_bridges/python/.bandit
Normal file
@ -0,0 +1,2 @@
|
||||
[bandit]
|
||||
skips: B101
|
@ -1,7 +1,7 @@
|
||||
bandit
|
||||
black
|
||||
flake8
|
||||
flake8-annotations-complexity
|
||||
flake8-bandit
|
||||
flake8-broken-line
|
||||
flake8-bugbear
|
||||
flake8-class-attributes-order
|
@ -15,6 +15,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to an .xml or .onnx file with a trained model.')
|
||||
@ -31,7 +32,7 @@ def parse_args() -> argparse.Namespace:
|
||||
'Default value is CPU.')
|
||||
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
||||
args.add_argument('-nt', '--number_top', default=10, type=int, help='Optional. Number of top results.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -159,8 +160,7 @@ def main():
|
||||
break
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -15,6 +15,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to an .xml or .onnx file with a trained model.')
|
||||
@ -25,7 +26,7 @@ def parse_args() -> argparse.Namespace:
|
||||
'Default value is CPU.')
|
||||
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
||||
args.add_argument('-nt', '--number_top', default=10, type=int, help='Optional. Number of top results.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -116,8 +117,7 @@ def main():
|
||||
log.info('')
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -15,6 +15,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to an .xml or .onnx file with a trained model.')
|
||||
@ -30,7 +31,7 @@ def parse_args() -> argparse.Namespace:
|
||||
'is acceptable. The sample will look for a suitable plugin for device specified. '
|
||||
'Default value is CPU.')
|
||||
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -126,8 +127,7 @@ def main():
|
||||
xmax = int(detection[5] * w)
|
||||
ymax = int(detection[6] * h)
|
||||
|
||||
log.info(f'Found: label = {label}, confidence = {confidence:.2f}, '
|
||||
f'coords = ({xmin}, {ymin}), ({xmax}, {ymax})')
|
||||
log.info(f'Found: label = {label}, confidence = {confidence:.2f}, ' f'coords = ({xmin}, {ymin}), ({xmax}, {ymax})')
|
||||
|
||||
# Draw a bounding box on a output image
|
||||
cv2.rectangle(output_image, (xmin, ymin), (xmax, ymax), (0, 255, 0), 2)
|
||||
@ -136,8 +136,7 @@ def main():
|
||||
log.info('Image out.bmp was created!')
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -19,6 +19,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to a file with network weights.')
|
||||
@ -29,7 +30,7 @@ def parse_args() -> argparse.Namespace:
|
||||
'Default value is CPU.')
|
||||
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
||||
args.add_argument('-nt', '--number_top', default=10, type=int, help='Optional. Number of top results.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -109,7 +110,8 @@ def create_ngraph_function(args: argparse.Namespace) -> ngraph.impl.Function:
|
||||
reshape_1_dims, reshape_1_length = shape_and_length([2])
|
||||
# workaround to get int64 weights from float32 ndarray w/o unnecessary copying
|
||||
dtype_weights = np.frombuffer(
|
||||
weights[weights_offset:weights_offset + 2 * reshape_1_length], dtype=np.int64,
|
||||
weights[weights_offset : weights_offset + 2 * reshape_1_length],
|
||||
dtype=np.int64,
|
||||
)
|
||||
reshape_1_kernel = ngraph.constant(dtype_weights)
|
||||
weights_offset += 2 * reshape_1_length
|
||||
@ -251,8 +253,7 @@ def main():
|
||||
log.info('')
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -15,6 +15,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to an .xml or .onnx file with a trained model.')
|
||||
@ -30,7 +31,7 @@ def parse_args() -> argparse.Namespace:
|
||||
'is acceptable. The sample will look for a suitable plugin for device specified. '
|
||||
'Default value is CPU.')
|
||||
args.add_argument('--labels', default=None, type=str, help='Optional. Path to a labels mapping file.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -139,8 +140,7 @@ def main(): # noqa
|
||||
xmax = int(xmax * w)
|
||||
ymax = int(ymax * h)
|
||||
|
||||
log.info(f'Found: label = {label}, confidence = {confidence:.2f}, '
|
||||
f'coords = ({xmin}, {ymin}), ({xmax}, {ymax})')
|
||||
log.info(f'Found: label = {label}, confidence = {confidence:.2f}, ' f'coords = ({xmin}, {ymin}), ({xmax}, {ymax})')
|
||||
|
||||
# Draw a bounding box on a output image
|
||||
cv2.rectangle(output_image, (xmin, ymin), (xmax, ymax), (0, 255, 0), 2)
|
||||
@ -149,8 +149,7 @@ def main(): # noqa
|
||||
log.info('Image out.bmp created!')
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -1,12 +0,0 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
max-parameters-amount = 8
|
||||
show_source = True
|
||||
docstring-convention = google
|
||||
enable-extensions=G
|
||||
|
||||
[pydocstyle]
|
||||
convention = google
|
||||
|
||||
[mypy]
|
||||
ignore_missing_imports = True
|
@ -15,6 +15,7 @@ def parse_args() -> argparse.Namespace:
|
||||
"""Parse and return command line arguments"""
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
args = parser.add_argument_group('Options')
|
||||
# fmt: off
|
||||
args.add_argument('-h', '--help', action='help', help='Show this help message and exit.')
|
||||
args.add_argument('-m', '--model', required=True, type=str,
|
||||
help='Required. Path to an .xml or .onnx file with a trained model.')
|
||||
@ -37,7 +38,7 @@ def parse_args() -> argparse.Namespace:
|
||||
help='Optional. Mean value of green channel for mean value subtraction in postprocessing.')
|
||||
args.add_argument('--mean_val_b', default=0, type=float,
|
||||
help='Optional. Mean value of blue channel for mean value subtraction in postprocessing.')
|
||||
|
||||
# fmt: on
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -137,8 +138,7 @@ def main():
|
||||
log.info(f'Image out_{i}.bmp created!')
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
log.info('This sample is an API example, '
|
||||
'for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
log.info('This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n')
|
||||
return 0
|
||||
|
||||
|
||||
|
23
inference-engine/ie_bridges/python/setup.cfg
Normal file
23
inference-engine/ie_bridges/python/setup.cfg
Normal file
@ -0,0 +1,23 @@
|
||||
[flake8]
|
||||
filename = *.py, *.pyx
|
||||
max-line-length = 160
|
||||
ignore = E203
|
||||
max-parameters-amount = 8
|
||||
show_source = True
|
||||
docstring-convention = google
|
||||
enable-extensions = G
|
||||
per-file-ignores =
|
||||
*.pyx: E225, E226, E251, E999, E800, E265, E203, E266, E227, E211
|
||||
tests/*: S101, T001
|
||||
*__init__.py: F403, F405, F405
|
||||
|
||||
[pydocstyle]
|
||||
convention = google
|
||||
|
||||
[mypy]
|
||||
ignore_missing_imports = True
|
||||
disable_error_code = attr-defined
|
||||
show_column_numbers = True
|
||||
show_error_context = True
|
||||
show_absolute_path = True
|
||||
pretty = True
|
@ -1,3 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.platform == "win32":
|
||||
if sys.platform == 'win32':
|
||||
# Installer, yum, pip installs openvino dlls to the different directories
|
||||
# and those paths need to be visible to the openvino modules
|
||||
#
|
||||
@ -24,9 +25,9 @@ if sys.platform == "win32":
|
||||
if (3, 8) <= sys.version_info:
|
||||
os.add_dll_directory(os.path.abspath(lib_path))
|
||||
else:
|
||||
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]
|
||||
os.environ['PATH'] = os.path.abspath(lib_path) + ';' + os.environ['PATH']
|
||||
|
||||
from .ie_api import *
|
||||
__all__ = ['IENetwork', "TensorDesc", "IECore", "Blob", "PreProcessInfo", "get_version"]
|
||||
__version__ = get_version()
|
||||
|
||||
__all__ = ['IENetwork', 'TensorDesc', 'IECore', 'Blob', 'PreProcessInfo', 'get_version']
|
||||
__version__ = get_version() # type: ignore
|
||||
|
@ -6,16 +6,15 @@ from .cimport ie_api_impl_defs as C
|
||||
import numpy as np
|
||||
from enum import Enum
|
||||
|
||||
supported_precisions = ["FP32", "FP64", "FP16", "I64", "U64", "I32", "U32",
|
||||
"I16", "I4", "I8", "U16", "U4", "U8", "BOOL", "BIN", "BF16"]
|
||||
supported_precisions = ['FP32', 'FP64', 'FP16', 'I64', 'U64', 'I32', 'U32',
|
||||
'I16', 'I4', 'I8', 'U16', 'U4', 'U8', 'BOOL', 'BIN', 'BF16']
|
||||
|
||||
known_plugins = ['CPU', 'GPU', 'FPGA', 'MYRIAD', 'HETERO', 'HDDL', 'MULTI']
|
||||
|
||||
layout_int_to_str_map = {0: "ANY", 1: "NCHW", 2: "NHWC", 3: "NCDHW", 4: "NDHWC", 64: "OIHW", 95: "SCALAR", 96: "C",
|
||||
128: "CHW", 192: "HW", 193: "NC", 194: "CN", 200: "BLOCKED"}
|
||||
layout_int_to_str_map = {0: 'ANY', 1: 'NCHW', 2: 'NHWC', 3: 'NCDHW', 4: 'NDHWC', 64: 'OIHW', 95: 'SCALAR', 96: 'C',
|
||||
128: 'CHW', 192: 'HW', 193: 'NC', 194: 'CN', 200: 'BLOCKED'}
|
||||
|
||||
format_map = {
|
||||
'FP32' : np.float32,
|
||||
format_map = {'FP32' : np.float32,
|
||||
'FP64' : np.float64,
|
||||
'FP16' : np.float16,
|
||||
'I64' : np.int64,
|
||||
@ -34,21 +33,21 @@ format_map = {
|
||||
}
|
||||
|
||||
layout_str_to_enum = {'ANY': C.Layout.ANY,
|
||||
"NHWC": C.Layout.NHWC,
|
||||
"NCHW": C.Layout.NCHW,
|
||||
"NCDHW": C.Layout.NCDHW,
|
||||
"NDHWC": C.Layout.NDHWC,
|
||||
"OIHW": C.Layout.OIHW,
|
||||
"GOIHW": C.Layout.GOIHW,
|
||||
"OIDHW": C.Layout.OIDHW,
|
||||
"GOIDHW": C.Layout.GOIDHW,
|
||||
"SCALAR": C.Layout.SCALAR,
|
||||
"C": C.Layout.C,
|
||||
"CHW": C.Layout.CHW,
|
||||
"HW": C.Layout.HW,
|
||||
"NC": C.Layout.NC,
|
||||
"CN": C.Layout.CN,
|
||||
"BLOCKED": C.Layout.BLOCKED
|
||||
'NHWC': C.Layout.NHWC,
|
||||
'NCHW': C.Layout.NCHW,
|
||||
'NCDHW': C.Layout.NCDHW,
|
||||
'NDHWC': C.Layout.NDHWC,
|
||||
'OIHW': C.Layout.OIHW,
|
||||
'GOIHW': C.Layout.GOIHW,
|
||||
'OIDHW': C.Layout.OIDHW,
|
||||
'GOIDHW': C.Layout.GOIDHW,
|
||||
'SCALAR': C.Layout.SCALAR,
|
||||
'C': C.Layout.C,
|
||||
'CHW': C.Layout.CHW,
|
||||
'HW': C.Layout.HW,
|
||||
'NC': C.Layout.NC,
|
||||
'CN': C.Layout.CN,
|
||||
'BLOCKED': C.Layout.BLOCKED
|
||||
}
|
||||
|
||||
|
||||
|
@ -60,14 +60,19 @@ cdef c_map_to_dict(map[string, string] c_map):
|
||||
def get_version():
|
||||
return C.get_version().decode()
|
||||
|
||||
|
||||
## This class defines Tensor description
|
||||
cdef class TensorDesc:
|
||||
|
||||
def __eq__(self, other : TensorDesc):
|
||||
return self.layout == other.layout and self.precision == other.precision and self.dims == other.dims
|
||||
|
||||
def __ne__(self, other : TensorDesc):
|
||||
return self.layout != other.layout or self.precision != other.precision or self.dims != other.dims
|
||||
|
||||
def __deepcopy__(self, memodict={}):
|
||||
return TensorDesc(deepcopy(self.precision, memodict), deepcopy(self.dims, memodict), deepcopy(self.layout, memodict))
|
||||
|
||||
## Class constructor
|
||||
# @param precision: target memory precision
|
||||
# @param dims: target memory dimensions
|
||||
@ -77,26 +82,32 @@ cdef class TensorDesc:
|
||||
if precision not in supported_precisions:
|
||||
raise ValueError(f"Unsupported precision {precision}! List of supported precisions: {supported_precisions}")
|
||||
self.impl = C.CTensorDesc(C.Precision.FromStr(precision.encode()), dims, layout_str_to_enum[layout])
|
||||
|
||||
## Shape (dimensions) of the TensorDesc object
|
||||
@property
|
||||
def dims(self):
|
||||
return self.impl.getDims()
|
||||
|
||||
@dims.setter
|
||||
def dims(self, dims_array : [list, tuple]):
|
||||
self.impl.setDims(dims_array)
|
||||
|
||||
## Precision of the TensorDesc object
|
||||
@property
|
||||
def precision(self):
|
||||
return self.impl.getPrecision().name().decode()
|
||||
|
||||
@precision.setter
|
||||
def precision(self, precision : str):
|
||||
if precision not in supported_precisions:
|
||||
raise ValueError(f"Unsupported precision {precision}! List of supported precisions: {supported_precisions}")
|
||||
self.impl.setPrecision(C.Precision.FromStr(precision.encode()))
|
||||
|
||||
## Layout of the TensorDesc object
|
||||
@property
|
||||
def layout(self):
|
||||
return layout_int_to_str_map[self.impl.getLayout()]
|
||||
|
||||
@layout.setter
|
||||
def layout(self, layout : str):
|
||||
if layout not in layout_str_to_enum.keys():
|
||||
@ -500,11 +511,13 @@ cdef class PreProcessChannel:
|
||||
property mean_value:
|
||||
def __get__(self):
|
||||
return deref(self._ptr).meanValue
|
||||
|
||||
def __set__(self, float mean_value):
|
||||
deref(self._ptr).meanValue = mean_value
|
||||
property std_scale:
|
||||
def __get__(self):
|
||||
return deref(self._ptr).stdScale
|
||||
|
||||
def __set__(self, float std_scale):
|
||||
deref(self._ptr).stdScale = std_scale
|
||||
property mean_data:
|
||||
@ -512,6 +525,7 @@ cdef class PreProcessChannel:
|
||||
blob = Blob()
|
||||
blob._ptr = deref(self._ptr).meanData
|
||||
return blob
|
||||
|
||||
def __set__(self, Blob mean_data):
|
||||
deref(self._ptr).meanData = mean_data._ptr
|
||||
|
||||
@ -757,18 +771,22 @@ cdef class CDataPtr:
|
||||
@property
|
||||
def name(self):
|
||||
return deref(self._ptr).getName().decode()
|
||||
|
||||
## Precision of the data object
|
||||
@property
|
||||
def precision(self):
|
||||
return deref(self._ptr).getPrecision().name().decode()
|
||||
|
||||
## Shape (dimensions) of the data object
|
||||
@property
|
||||
def shape(self):
|
||||
return deref(self._ptr).getDims()
|
||||
|
||||
## Layout of the data object
|
||||
@property
|
||||
def layout(self):
|
||||
return layout_int_to_str_map[deref(self._ptr).getLayout()]
|
||||
|
||||
## Checks if the current data object is resolved
|
||||
@property
|
||||
def initialized(self):
|
||||
@ -810,7 +828,6 @@ cdef class ExecutableNetwork:
|
||||
res[name] = deepcopy(value.buffer)
|
||||
return res
|
||||
|
||||
|
||||
## Starts asynchronous inference for specified infer request.
|
||||
# Wraps `async_infer()` method of the `InferRequest` class.
|
||||
# @param request_id: Index of infer request to start inference
|
||||
@ -877,6 +894,7 @@ cdef class ExecutableNetwork:
|
||||
data_ptr._ptr = in_.second
|
||||
inputs[in_.first.decode()] = data_ptr
|
||||
return inputs
|
||||
|
||||
## A dictionary that maps output layer names to CDataPtr objects
|
||||
@property
|
||||
def outputs(self):
|
||||
@ -888,6 +906,7 @@ cdef class ExecutableNetwork:
|
||||
data_ptr._ptr = in_.second
|
||||
outputs[in_.first.decode()] = data_ptr
|
||||
return outputs
|
||||
|
||||
## Gets executable graph information from a device
|
||||
# @return An instance of `IENetwork`
|
||||
#
|
||||
@ -1180,10 +1199,10 @@ cdef class InferRequest:
|
||||
cpdef get_perf_counts(self):
|
||||
cdef map[string, C.ProfileInfo] c_profile = deref(self.impl).getPerformanceCounts()
|
||||
profile = {}
|
||||
for l in c_profile:
|
||||
info = l.second
|
||||
for line in c_profile:
|
||||
info = line.second
|
||||
# TODO: add execution index. Check if unsigned int is properly converted to int in python.
|
||||
profile[l.first.decode()] = {"status": info.status.decode(), "exec_type": info.exec_type.decode(),
|
||||
profile[line.first.decode()] = {"status": info.status.decode(), "exec_type": info.exec_type.decode(),
|
||||
"layer_type": info.layer_type.decode(), "real_time": info.real_time,
|
||||
"cpu_time": info.cpu_time, "execution_index": info.execution_index}
|
||||
return profile
|
||||
@ -1294,8 +1313,7 @@ cdef class IENetwork:
|
||||
cdef string weights_
|
||||
if init_from_buffer:
|
||||
warnings.warn("Reading network using constructor is deprecated. "
|
||||
"Please, use IECore.read_network() method instead",
|
||||
DeprecationWarning)
|
||||
"Please, use IECore.read_network() method instead", DeprecationWarning)
|
||||
memcpy(xml_buffer, <char*> model, len(model))
|
||||
memcpy(bin_buffer, <uint8_t *> weights, len(weights))
|
||||
xml_buffer[len(model)] = b'\0'
|
||||
@ -1304,8 +1322,7 @@ cdef class IENetwork:
|
||||
else:
|
||||
if model and weights:
|
||||
warnings.warn("Reading network using constructor is deprecated. "
|
||||
"Please, use IECore.read_network() method instead",
|
||||
DeprecationWarning)
|
||||
"Please, use IECore.read_network() method instead", DeprecationWarning)
|
||||
if not os.path.isfile(model):
|
||||
raise Exception(f"Path to the model {model} doesn't exist or it's a directory")
|
||||
if not os.path.isfile(weights):
|
||||
@ -1389,7 +1406,6 @@ cdef class IENetwork:
|
||||
raise AttributeError(f"Invalid batch size {batch}! Batch size should be positive integer value")
|
||||
self.impl.setBatch(batch)
|
||||
|
||||
|
||||
## Marks any intermediate layer as output layer to retrieve the inference results from the specified layers.
|
||||
# @param outputs: List of layers to be set as model outputs. The list can contain strings with layer names to be set
|
||||
# as outputs or tuples with layer name as first element and output port id as second element.
|
||||
@ -1405,13 +1421,13 @@ cdef class IENetwork:
|
||||
def add_outputs(self, outputs):
|
||||
if not isinstance(outputs, list):
|
||||
outputs = [outputs]
|
||||
for i, l in enumerate(outputs):
|
||||
if isinstance(l, str):
|
||||
self.impl.addOutput(l.encode(), 0)
|
||||
elif isinstance(l, tuple) and len(l) == 2:
|
||||
self.impl.addOutput(l[0].encode(), l[1])
|
||||
for i, line in enumerate(outputs):
|
||||
if isinstance(line, str):
|
||||
self.impl.addOutput(line.encode(), 0)
|
||||
elif isinstance(line, tuple) and len(line) == 2:
|
||||
self.impl.addOutput(line[0].encode(), line[1])
|
||||
else:
|
||||
raise TypeError(f"Incorrect type {type(l)} for layer to add at index {i}. "
|
||||
raise TypeError(f"Incorrect type {type(line)} for layer to add at index {i}. "
|
||||
"Expected string with layer name or tuple with two elements: layer name as "
|
||||
"first element and port id as second")
|
||||
|
||||
@ -1447,7 +1463,7 @@ cdef class IENetwork:
|
||||
# net.reshape({input_layer: (n, c, h*2, w*2)})
|
||||
# ```
|
||||
def reshape(self, input_shapes: dict):
|
||||
cdef map[string, vector[size_t]] c_input_shapes;
|
||||
cdef map[string, vector[size_t]] c_input_shapes
|
||||
cdef vector[size_t] c_shape
|
||||
net_inputs = self.input_info
|
||||
for input, shape in input_shapes.items():
|
||||
@ -1540,6 +1556,3 @@ cdef class BlobBuffer:
|
||||
return np.asarray(self).view(dtype=np.float16)
|
||||
else:
|
||||
return np.asarray(self)
|
||||
|
||||
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.platform == "win32":
|
||||
if sys.platform == 'win32':
|
||||
# Installer, yum, pip installs openvino dlls to the different directories
|
||||
# and those paths need to be visible to the openvino modules
|
||||
#
|
||||
@ -24,7 +25,8 @@ if sys.platform == "win32":
|
||||
if (3, 8) <= sys.version_info:
|
||||
os.add_dll_directory(os.path.abspath(lib_path))
|
||||
else:
|
||||
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]
|
||||
os.environ['PATH'] = os.path.abspath(lib_path) + ';' + os.environ['PATH']
|
||||
|
||||
from .offline_transformations_api import *
|
||||
|
||||
__all__ = ['ApplyMOCTransformations']
|
||||
|
@ -7,17 +7,22 @@ from ..inference_engine.ie_api cimport IENetwork
|
||||
from libcpp cimport bool
|
||||
from libcpp.string cimport string
|
||||
|
||||
|
||||
def ApplyMOCTransformations(IENetwork network, bool cf):
|
||||
C.ApplyMOCTransformations(network.impl, cf)
|
||||
|
||||
|
||||
def ApplyPOTTransformations(IENetwork network, string device):
|
||||
C.ApplyPOTTransformations(network.impl, device)
|
||||
|
||||
|
||||
def ApplyLowLatencyTransformation(IENetwork network):
|
||||
C.ApplyLowLatencyTransformation(network.impl)
|
||||
|
||||
|
||||
def ApplyPruningTransformation(IENetwork network):
|
||||
C.ApplyPruningTransformation(network.impl)
|
||||
|
||||
|
||||
def CheckAPI():
|
||||
C.CheckAPI()
|
||||
|
@ -1,5 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from .test_utils_api import *
|
||||
|
||||
__all__ = ['CompareNetworks']
|
||||
|
@ -8,6 +8,7 @@ from libcpp cimport bool
|
||||
from libcpp.string cimport string
|
||||
from libcpp.pair cimport pair
|
||||
|
||||
|
||||
def CompareNetworks(IENetwork lhs, IENetwork rhs):
|
||||
cdef pair[bool, string] c_pair
|
||||
c_pair = C.CompareNetworks(lhs.impl, rhs.impl)
|
||||
|
@ -1,10 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import errno
|
||||
import subprocess
|
||||
import subprocess # nosec
|
||||
import typing
|
||||
from pathlib import Path
|
||||
from shutil import copyfile
|
||||
from distutils.command.install import install
|
||||
@ -18,94 +20,82 @@ from decouple import config
|
||||
|
||||
WHEEL_LIBS_INSTALL_DIR = os.path.join('openvino', 'libs')
|
||||
WHEEL_LIBS_PACKAGE = 'openvino.libs'
|
||||
PYTHON_VERSION = f"python{sys.version_info.major}.{sys.version_info.minor}"
|
||||
PYTHON_VERSION = f'python{sys.version_info.major}.{sys.version_info.minor}'
|
||||
|
||||
# The following variables can be defined in environment or .env file
|
||||
CMAKE_BUILD_DIR = config('CMAKE_BUILD_DIR', ".")
|
||||
CMAKE_BUILD_DIR = config('CMAKE_BUILD_DIR', '.')
|
||||
CORE_LIBS_DIR = config('CORE_LIBS_DIR', '')
|
||||
PLUGINS_LIBS_DIR = config('PLUGINS_LIBS_DIR', '')
|
||||
NGRAPH_LIBS_DIR = config('NGRAPH_LIBS_DIR', '')
|
||||
TBB_LIBS_DIR = config('TBB_LIBS_DIR', '')
|
||||
PY_PACKAGES_DIR = config('PY_PACKAGES_DIR', '')
|
||||
LIBS_RPATH = "$ORIGIN" if sys.platform == "linux" else "@loader_path"
|
||||
LIBS_RPATH = '$ORIGIN' if sys.platform == 'linux' else '@loader_path'
|
||||
|
||||
LIB_INSTALL_CFG = {
|
||||
"ie_libs": {
|
||||
'ie_libs': {
|
||||
'name': 'core',
|
||||
'prefix': 'libs.core',
|
||||
'install_dir': CORE_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"hetero_plugin": {
|
||||
'hetero_plugin': {
|
||||
'name': 'hetero',
|
||||
'prefix': 'libs.plugins',
|
||||
'install_dir': PLUGINS_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"gpu_plugin": {
|
||||
'gpu_plugin': {
|
||||
'name': 'gpu',
|
||||
'prefix': 'libs.plugins',
|
||||
'install_dir': PLUGINS_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"cpu_plugin": {
|
||||
'cpu_plugin': {
|
||||
'name': 'cpu',
|
||||
'prefix': 'libs.plugins',
|
||||
'install_dir': PLUGINS_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"multi_plugin": {
|
||||
'multi_plugin': {
|
||||
'name': 'multi',
|
||||
'prefix': 'libs.plugins',
|
||||
'install_dir': PLUGINS_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"myriad_plugin": {
|
||||
'myriad_plugin': {
|
||||
'name': 'myriad',
|
||||
'prefix': 'libs.plugins',
|
||||
'install_dir': PLUGINS_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"ngraph_libs": {
|
||||
'ngraph_libs': {
|
||||
'name': 'ngraph',
|
||||
'prefix': 'libs.ngraph',
|
||||
'install_dir': NGRAPH_LIBS_DIR,
|
||||
'rpath': LIBS_RPATH,
|
||||
},
|
||||
"tbb_libs": {
|
||||
'name': 'tbb',
|
||||
'prefix': 'libs.tbb',
|
||||
'install_dir': TBB_LIBS_DIR
|
||||
},
|
||||
'tbb_libs': {'name': 'tbb', 'prefix': 'libs.tbb', 'install_dir': TBB_LIBS_DIR},
|
||||
}
|
||||
|
||||
PY_INSTALL_CFG = {
|
||||
"ie_py": {
|
||||
'name': PYTHON_VERSION,
|
||||
'prefix': 'site-packages',
|
||||
'install_dir': PY_PACKAGES_DIR
|
||||
},
|
||||
"ngraph_py": {
|
||||
'name': f"pyngraph_{PYTHON_VERSION}",
|
||||
'prefix': 'site-packages',
|
||||
'install_dir': PY_PACKAGES_DIR
|
||||
},
|
||||
'ie_py': {'name': PYTHON_VERSION, 'prefix': 'site-packages', 'install_dir': PY_PACKAGES_DIR},
|
||||
'ngraph_py': {'name': f'pyngraph_{PYTHON_VERSION}', 'prefix': 'site-packages', 'install_dir': PY_PACKAGES_DIR},
|
||||
}
|
||||
|
||||
|
||||
class PrebuiltExtension(Extension):
|
||||
"""Initialize Extension"""
|
||||
|
||||
def __init__(self, name, sources, *args, **kwargs):
|
||||
if len(sources) != 1:
|
||||
nln = '\n'
|
||||
raise DistutilsSetupError(
|
||||
f"PrebuiltExtension can accept only one source, but got: {nln}{nln.join(sources)}"
|
||||
)
|
||||
raise DistutilsSetupError(f'PrebuiltExtension can accept only one source, but got: {nln}{nln.join(sources)}')
|
||||
super().__init__(name, sources, *args, **kwargs)
|
||||
|
||||
|
||||
class CustomBuild(build):
|
||||
"""Custom implementation of build_clib"""
|
||||
|
||||
def run(self):
|
||||
self.run_command('build_clib')
|
||||
build.run(self)
|
||||
@ -113,6 +103,7 @@ class CustomBuild(build):
|
||||
|
||||
class CustomInstall(install):
|
||||
"""Enable build_clib during the installation"""
|
||||
|
||||
def run(self):
|
||||
self.run_command('build_clib')
|
||||
install.run(self)
|
||||
@ -120,6 +111,7 @@ class CustomInstall(install):
|
||||
|
||||
class PrepareLibs(build_clib):
|
||||
"""Prepare prebuilt libraries"""
|
||||
|
||||
def run(self):
|
||||
self.configure(LIB_INSTALL_CFG)
|
||||
self.configure(PY_INSTALL_CFG)
|
||||
@ -132,16 +124,13 @@ class PrepareLibs(build_clib):
|
||||
install_dir = comp_data.get('install_dir')
|
||||
if install_dir and not os.path.isabs(install_dir):
|
||||
install_dir = os.path.join(install_prefix, install_dir)
|
||||
self.announce(f"Installing {comp}", level=3)
|
||||
self.spawn(["cmake",
|
||||
"--install", CMAKE_BUILD_DIR,
|
||||
"--prefix", install_prefix,
|
||||
"--component", comp_data.get('name')])
|
||||
self.announce(f'Installing {comp}', level=3)
|
||||
self.spawn(['cmake', '--install', CMAKE_BUILD_DIR, '--prefix', install_prefix, '--component', comp_data.get('name')])
|
||||
# set rpath if applicable
|
||||
if sys.platform != "win32" and comp_data.get('rpath'):
|
||||
file_types = ["*.so"] if sys.platform == "linux" else ["*.dylib", "*.so"]
|
||||
for file in file_types:
|
||||
for path in Path(install_dir).glob(file):
|
||||
if sys.platform != 'win32' and comp_data.get('rpath'):
|
||||
file_types = ['*.so'] if sys.platform == 'linux' else ['*.dylib', '*.so']
|
||||
for file_type in file_types:
|
||||
for path in Path(install_dir).glob(file_type):
|
||||
set_rpath(comp_data['rpath'], path)
|
||||
|
||||
def generate_package(self, src_dirs):
|
||||
@ -162,28 +151,28 @@ class PrepareLibs(build_clib):
|
||||
copyfile(file_path, dst_file)
|
||||
|
||||
if Path(package_dir).exists():
|
||||
self.announce(f"Adding {WHEEL_LIBS_PACKAGE} package", level=3)
|
||||
self.announce(f'Adding {WHEEL_LIBS_PACKAGE} package', level=3)
|
||||
packages.append(WHEEL_LIBS_PACKAGE)
|
||||
package_data.update({WHEEL_LIBS_PACKAGE: ['*']})
|
||||
|
||||
|
||||
class CopyExt(build_ext):
|
||||
"""Copy extension files to the build directory"""
|
||||
|
||||
def run(self):
|
||||
for extension in self.extensions:
|
||||
if not isinstance(extension, PrebuiltExtension):
|
||||
raise DistutilsSetupError(
|
||||
f"copy_ext can accept PrebuiltExtension only, but got {extension.name}")
|
||||
raise DistutilsSetupError(f'copy_ext can accept PrebuiltExtension only, but got {extension.name}')
|
||||
src = extension.sources[0]
|
||||
dst = self.get_ext_fullpath(extension.name)
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
# setting relative path to find dlls
|
||||
if sys.platform != "win32":
|
||||
if sys.platform != 'win32':
|
||||
rpath = os.path.relpath(get_package_dir(PY_INSTALL_CFG), os.path.dirname(src))
|
||||
if sys.platform == "linux":
|
||||
rpath = os.path.join("$ORIGIN", rpath, WHEEL_LIBS_INSTALL_DIR)
|
||||
elif sys.platform == "darwin":
|
||||
rpath = os.path.join("@loader_path", rpath, WHEEL_LIBS_INSTALL_DIR)
|
||||
if sys.platform == 'linux':
|
||||
rpath = os.path.join('$ORIGIN', rpath, WHEEL_LIBS_INSTALL_DIR)
|
||||
elif sys.platform == 'darwin':
|
||||
rpath = os.path.join('@loader_path', rpath, WHEEL_LIBS_INSTALL_DIR)
|
||||
set_rpath(rpath, src)
|
||||
|
||||
copy_file(src, dst, verbose=self.verbose, dry_run=self.dry_run)
|
||||
@ -193,7 +182,7 @@ def is_tool(name):
|
||||
"""Check if the command-line tool is available"""
|
||||
try:
|
||||
devnull = subprocess.DEVNULL
|
||||
subprocess.Popen([name], stdout=devnull, stderr=devnull).communicate()
|
||||
subprocess.Popen([name], stdout=devnull, stderr=devnull).communicate() # nosec
|
||||
except OSError as error:
|
||||
if error.errno == errno.ENOENT:
|
||||
return False
|
||||
@ -206,76 +195,77 @@ def remove_rpath(file_path):
|
||||
:param file_path: binary path
|
||||
:type file_path: pathlib.Path
|
||||
"""
|
||||
if sys.platform == "darwin":
|
||||
cmd = f'otool -l {file_path} ' \
|
||||
f'| grep LC_RPATH -A3 ' \
|
||||
f'| grep -o "path.*" ' \
|
||||
f'| cut -d " " -f2 ' \
|
||||
if sys.platform == 'darwin':
|
||||
cmd = (
|
||||
f'otool -l {file_path} ' # noqa: P103
|
||||
f'| grep LC_RPATH -A3 '
|
||||
f'| grep -o "path.*" '
|
||||
f'| cut -d " " -f2 '
|
||||
f'| xargs -I{{}} install_name_tool -delete_rpath {{}} {file_path}'
|
||||
if os.WEXITSTATUS(os.system(cmd)) != 0:
|
||||
sys.exit(f"Could not remove rpath for {file_path}")
|
||||
)
|
||||
if os.WEXITSTATUS(os.system(cmd)) != 0: # nosec
|
||||
sys.exit(f'Could not remove rpath for {file_path}')
|
||||
else:
|
||||
sys.exit(f"Unsupported platform: {sys.platform}")
|
||||
sys.exit(f'Unsupported platform: {sys.platform}')
|
||||
|
||||
|
||||
def set_rpath(rpath, executable):
|
||||
"""Setting rpath for linux and macOS libraries"""
|
||||
print(f"Setting rpath {rpath} for {executable}")
|
||||
cmd = ""
|
||||
rpath_tool = ""
|
||||
if sys.platform == "linux":
|
||||
rpath_tool = "patchelf"
|
||||
cmd = [rpath_tool, "--set-rpath", rpath, executable]
|
||||
elif sys.platform == "darwin":
|
||||
rpath_tool = "install_name_tool"
|
||||
cmd = [rpath_tool, "-add_rpath", rpath, executable]
|
||||
print(f'Setting rpath {rpath} for {executable}') # noqa: T001
|
||||
cmd = []
|
||||
rpath_tool = ''
|
||||
if sys.platform == 'linux':
|
||||
rpath_tool = 'patchelf'
|
||||
cmd = [rpath_tool, '--set-rpath', rpath, executable]
|
||||
elif sys.platform == 'darwin':
|
||||
rpath_tool = 'install_name_tool'
|
||||
cmd = [rpath_tool, '-add_rpath', rpath, executable]
|
||||
else:
|
||||
sys.exit(f"Unsupported platform: {sys.platform}")
|
||||
sys.exit(f'Unsupported platform: {sys.platform}')
|
||||
|
||||
if is_tool(rpath_tool):
|
||||
if sys.platform == "darwin":
|
||||
if sys.platform == 'darwin':
|
||||
remove_rpath(executable)
|
||||
ret_info = subprocess.run(cmd, check=True)
|
||||
ret_info = subprocess.run(cmd, check=True, shell=False) # nosec
|
||||
if ret_info.returncode != 0:
|
||||
sys.exit(f"Could not set rpath: {rpath} for {executable}")
|
||||
sys.exit(f'Could not set rpath: {rpath} for {executable}')
|
||||
else:
|
||||
sys.exit(f"Could not found {rpath_tool} on the system, "
|
||||
f"please make sure that this tool is installed")
|
||||
sys.exit(f'Could not found {rpath_tool} on the system, ' f'please make sure that this tool is installed')
|
||||
|
||||
|
||||
def find_prebuilt_extensions(search_dirs):
|
||||
"""collect prebuilt python extensions"""
|
||||
extensions = []
|
||||
ext_pattern = ""
|
||||
if sys.platform == "linux":
|
||||
ext_pattern = "**/*.so"
|
||||
elif sys.platform == "win32":
|
||||
ext_pattern = "**/*.pyd"
|
||||
elif sys.platform == "darwin":
|
||||
ext_pattern = "**/*.so"
|
||||
ext_pattern = ''
|
||||
if sys.platform == 'linux':
|
||||
ext_pattern = '**/*.so'
|
||||
elif sys.platform == 'win32':
|
||||
ext_pattern = '**/*.pyd'
|
||||
elif sys.platform == 'darwin':
|
||||
ext_pattern = '**/*.so'
|
||||
for base_dir in search_dirs:
|
||||
for path in Path(base_dir).glob(ext_pattern):
|
||||
relpath = path.relative_to(base_dir)
|
||||
if relpath.parent != ".":
|
||||
if relpath.parent != '.':
|
||||
package_names = str(relpath.parent).split(os.path.sep)
|
||||
else:
|
||||
package_names = []
|
||||
package_names.append(path.name.split(".", 1)[0])
|
||||
name = ".".join(package_names)
|
||||
package_names.append(path.name.split('.', 1)[0])
|
||||
name = '.'.join(package_names)
|
||||
extensions.append(PrebuiltExtension(name, sources=[str(path)]))
|
||||
return extensions
|
||||
|
||||
|
||||
def get_description(desc_file_path):
|
||||
"""read description from README.md"""
|
||||
with open(desc_file_path, "r", encoding="utf-8") as fstream:
|
||||
with open(desc_file_path, 'r', encoding='utf-8') as fstream:
|
||||
description = fstream.read()
|
||||
return description
|
||||
|
||||
|
||||
def get_dependencies(requirements_file_path):
|
||||
"""read dependencies from requirements.txt"""
|
||||
with open(requirements_file_path, "r", encoding="utf-8") as fstream:
|
||||
with open(requirements_file_path, 'r', encoding='utf-8') as fstream:
|
||||
dependencies = fstream.read()
|
||||
return dependencies
|
||||
|
||||
@ -299,7 +289,7 @@ def get_package_dir(install_cfg):
|
||||
Get python package path based on config
|
||||
All the packages should be located in one directory
|
||||
"""
|
||||
py_package_path = ""
|
||||
py_package_path = ''
|
||||
dirs = get_dir_list(install_cfg)
|
||||
if len(dirs) != 0:
|
||||
# setup.py support only one package directory, all modules should be located there
|
||||
@ -307,17 +297,17 @@ def get_package_dir(install_cfg):
|
||||
return py_package_path
|
||||
|
||||
|
||||
platforms = ["linux", "win32", "darwin"]
|
||||
platforms = ['linux', 'win32', 'darwin']
|
||||
if not any(pl in sys.platform for pl in platforms):
|
||||
sys.exit("Unsupported platform: {}, expected: {}".format(sys.platform, "linux, win32, darwin"))
|
||||
sys.exit(f'Unsupported platform: {sys.platform}, expected: linux, win32, darwin')
|
||||
|
||||
# copy license file into the build directory
|
||||
package_license = config('WHEEL_LICENSE', '')
|
||||
if os.path.exists(package_license):
|
||||
copyfile(package_license, "LICENSE")
|
||||
copyfile(package_license, 'LICENSE')
|
||||
|
||||
packages = find_namespace_packages(','.join(get_dir_list(PY_INSTALL_CFG)))
|
||||
package_data = {}
|
||||
package_data: typing.Dict[str, list] = {}
|
||||
|
||||
setup(
|
||||
version=config('WHEEL_VERSION', '0.0.0'),
|
||||
@ -326,16 +316,16 @@ setup(
|
||||
license=config('WHEEL_LICENCE_TYPE', 'OSI Approved :: Apache Software License'),
|
||||
author=config('WHEEL_AUTHOR', 'Intel Corporation'),
|
||||
description=config('WHEEL_DESC', 'Inference Engine Python* API'),
|
||||
install_requires=get_dependencies(config('WHEEL_REQUIREMENTS', "requirements.txt")),
|
||||
install_requires=get_dependencies(config('WHEEL_REQUIREMENTS', 'requirements.txt')),
|
||||
long_description=get_description(config('WHEEL_OVERVIEW', 'pypi_overview.md')),
|
||||
long_description_content_type="text/markdown",
|
||||
long_description_content_type='text/markdown',
|
||||
download_url=config('WHEEL_DOWNLOAD_URL', 'https://github.com/openvinotoolkit/openvino/tags'),
|
||||
url=config('WHEEL_URL', 'https://docs.openvinotoolkit.org/latest/index.html'),
|
||||
cmdclass={
|
||||
"build": CustomBuild,
|
||||
"install": CustomInstall,
|
||||
"build_clib": PrepareLibs,
|
||||
"build_ext": CopyExt,
|
||||
'build': CustomBuild,
|
||||
'install': CustomInstall,
|
||||
'build_clib': PrepareLibs,
|
||||
'build_ext': CopyExt,
|
||||
},
|
||||
ext_modules=find_prebuilt_extensions(get_dir_list(PY_INSTALL_CFG)),
|
||||
packages=packages,
|
||||
|
Loading…
Reference in New Issue
Block a user