Compare commits
335 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
023e7c2c3f | ||
|
|
34ddb70f7d | ||
|
|
21e092122f | ||
|
|
92c1333653 | ||
|
|
c26ec8b312 | ||
|
|
32054ff180 | ||
|
|
7cff005ada | ||
|
|
06707cc53f | ||
|
|
fff93d8f05 | ||
|
|
637ddd5dfb | ||
|
|
fa4c5e8e38 | ||
|
|
c9fc6f0531 | ||
|
|
c9eb6ae62b | ||
|
|
eef56ca80c | ||
|
|
36f1c00e02 | ||
|
|
5c43765011 | ||
|
|
bbfc9bbc14 | ||
|
|
9c607528ef | ||
|
|
ae9e0510f0 | ||
|
|
76af547c17 | ||
|
|
5e97a3123f | ||
|
|
532dec140b | ||
|
|
c41c6294f9 | ||
|
|
3bbe88e659 | ||
|
|
2f3d5f68cd | ||
|
|
843f81a1cc | ||
|
|
c596707a09 | ||
|
|
cf60baf2f0 | ||
|
|
aeb70036d7 | ||
|
|
dea04dae8c | ||
|
|
14b44803ba | ||
|
|
06286f2aae | ||
|
|
97e5fc4bae | ||
|
|
47218284b2 | ||
|
|
6079a35b81 | ||
|
|
4f4352f301 | ||
|
|
a67d74c41f | ||
|
|
26c563132d | ||
|
|
dc1ca195dd | ||
|
|
f5ad3e6f89 | ||
|
|
6c736ce001 | ||
|
|
30ab6534e1 | ||
|
|
259a4c25ce | ||
|
|
347930008c | ||
|
|
4fa251483a | ||
|
|
30f8af70fc | ||
|
|
3fc6d8a188 | ||
|
|
66c8df6a87 | ||
|
|
e53eb86334 | ||
|
|
2df99d4263 | ||
|
|
deab4d38b0 | ||
|
|
412428f1dd | ||
|
|
167c96a8af | ||
|
|
b7363ba711 | ||
|
|
5cef9f3734 | ||
|
|
0bf1f53356 | ||
|
|
18004bdb5e | ||
|
|
9fc818478a | ||
|
|
ef8a8dd309 | ||
|
|
d4e880de3d | ||
|
|
fe198dd544 | ||
|
|
b0eb3e67ee | ||
|
|
434361cea9 | ||
|
|
aa30580109 | ||
|
|
051a429c31 | ||
|
|
8eb88d51f2 | ||
|
|
971811c8c8 | ||
|
|
629ca3a5d8 | ||
|
|
92e5e010b9 | ||
|
|
c7313bab7f | ||
|
|
d798831c95 | ||
|
|
4d01adbe01 | ||
|
|
1d51d2185a | ||
|
|
65b00c1dfb | ||
|
|
9758305b32 | ||
|
|
d7c77212b8 | ||
|
|
e544dd1e28 | ||
|
|
bc98d17121 | ||
|
|
bcd38100db | ||
|
|
b6f2c06b26 | ||
|
|
b4546ad1e0 | ||
|
|
d02b9a9b81 | ||
|
|
d8e82d56d2 | ||
|
|
e91453e006 | ||
|
|
6a60f93af0 | ||
|
|
ca643edb1b | ||
|
|
7a11e36eeb | ||
|
|
155916acde | ||
|
|
ac65ea30fd | ||
|
|
3b5de94a09 | ||
|
|
ff00817bb7 | ||
|
|
eefaf56075 | ||
|
|
f1811ad060 | ||
|
|
d155483573 | ||
|
|
626bc4f3d4 | ||
|
|
60d4d62536 | ||
|
|
e7f5f53f92 | ||
|
|
a224078c5c | ||
|
|
9a968b12db | ||
|
|
f0498ad011 | ||
|
|
63ee9f8916 | ||
|
|
93b60cacfa | ||
|
|
0022eebd71 | ||
|
|
807f85f93f | ||
|
|
7f09b54af8 | ||
|
|
cad3ccd8a3 | ||
|
|
a0d1dae91d | ||
|
|
4d3ddc1684 | ||
|
|
70c2058b61 | ||
|
|
3571d44896 | ||
|
|
20d812d959 | ||
|
|
b485e829d6 | ||
|
|
f51c533ea8 | ||
|
|
67e3e06bee | ||
|
|
7a5d447e9f | ||
|
|
f80bd537bf | ||
|
|
f9ac555857 | ||
|
|
6e491a89ad | ||
|
|
2100521a14 | ||
|
|
a705f0c358 | ||
|
|
c7d130efbe | ||
|
|
c10ff28f12 | ||
|
|
698dfc4bf6 | ||
|
|
85aa23ec8a | ||
|
|
1001caf04e | ||
|
|
0e60aed97a | ||
|
|
3183c116d9 | ||
|
|
01e60d057d | ||
|
|
d7fad0109a | ||
|
|
28ffbf0857 | ||
|
|
546377dc8e | ||
|
|
e53b1b7fbc | ||
|
|
158d32139f | ||
|
|
2bb7010193 | ||
|
|
1ffada0b23 | ||
|
|
023344a317 | ||
|
|
e2d1ae7055 | ||
|
|
cfb5f27899 | ||
|
|
53927034da | ||
|
|
63a77bb4a1 | ||
|
|
7edebd8d87 | ||
|
|
da230131d0 | ||
|
|
72d9a9fae7 | ||
|
|
20ef9a9423 | ||
|
|
b457553593 | ||
|
|
ed85690136 | ||
|
|
3a80f0476b | ||
|
|
4e0c7a217f | ||
|
|
447dd3570d | ||
|
|
3ea1657e4f | ||
|
|
cdd31da1c7 | ||
|
|
9f6fde9af2 | ||
|
|
99a2423ec0 | ||
|
|
4f6c976add | ||
|
|
4c44ce9795 | ||
|
|
6f69ba04c8 | ||
|
|
a79cd75596 | ||
|
|
b2816dc1ec | ||
|
|
638c7b891c | ||
|
|
cbe45b7d0a | ||
|
|
1d179fdb39 | ||
|
|
be3b4a3362 | ||
|
|
5776b66fb2 | ||
|
|
8377c714aa | ||
|
|
f15096e101 | ||
|
|
265e3c7cba | ||
|
|
daaeaa5881 | ||
|
|
278868b7a1 | ||
|
|
dbdaaa93dd | ||
|
|
3863656f44 | ||
|
|
0583b37a14 | ||
|
|
d48e0ef5a6 | ||
|
|
41ed6f0891 | ||
|
|
69e3af4c99 | ||
|
|
935b48b978 | ||
|
|
88264b895a | ||
|
|
65f62945dd | ||
|
|
004f414b89 | ||
|
|
4001d0d99f | ||
|
|
d970d0494e | ||
|
|
cd01ccd449 | ||
|
|
b4893945c7 | ||
|
|
7ec63cafe3 | ||
|
|
3bf7a69df1 | ||
|
|
bad5bb30a3 | ||
|
|
3d42871523 | ||
|
|
9af51a165f | ||
|
|
e2729b87f3 | ||
|
|
3ef1a26174 | ||
|
|
cbad43f3a5 | ||
|
|
3a24eb6a62 | ||
|
|
963f55a189 | ||
|
|
f7052a107d | ||
|
|
6cfa77223e | ||
|
|
11bd4f8a42 | ||
|
|
be3b711972 | ||
|
|
011128cb54 | ||
|
|
5f8f9ec108 | ||
|
|
a4f13ae9fe | ||
|
|
09192b804e | ||
|
|
67d733d5a8 | ||
|
|
e290b14ab1 | ||
|
|
5cc8114322 | ||
|
|
e51e1682ca | ||
|
|
5f6999ed7e | ||
|
|
bb41994f56 | ||
|
|
33aca7d2c4 | ||
|
|
77162bf8ee | ||
|
|
23f41213bb | ||
|
|
b731ce13d8 | ||
|
|
0efe474342 | ||
|
|
ec5c9db932 | ||
|
|
00b53d6c33 | ||
|
|
25d36568f8 | ||
|
|
246790f264 | ||
|
|
958e425775 | ||
|
|
e025f1464b | ||
|
|
125dd89c01 | ||
|
|
f276b5fbb4 | ||
|
|
57da5ddab8 | ||
|
|
e734377590 | ||
|
|
c75fd4db92 | ||
|
|
79b780413f | ||
|
|
93333780c7 | ||
|
|
3c718809d3 | ||
|
|
d5434a036e | ||
|
|
7fcb12603e | ||
|
|
c5124763db | ||
|
|
27e8580b7d | ||
|
|
fd1cc08cd8 | ||
|
|
e337350cc1 | ||
|
|
d24132912e | ||
|
|
946ed119c8 | ||
|
|
dcdeb34c8f | ||
|
|
bfe416704d | ||
|
|
44cd77f54b | ||
|
|
31fe146539 | ||
|
|
2012d084f2 | ||
|
|
d337b4ed97 | ||
|
|
5c2eb05990 | ||
|
|
d3ea03bbfc | ||
|
|
6788153ba9 | ||
|
|
851f64946a | ||
|
|
c1625743df | ||
|
|
73f3b7c8fc | ||
|
|
4a44f84dab | ||
|
|
bb039adef8 | ||
|
|
4943a954c7 | ||
|
|
7595512d1f | ||
|
|
c3aa866a33 | ||
|
|
42a8364cb6 | ||
|
|
d3764a7563 | ||
|
|
e835a4cf58 | ||
|
|
d3923f2ce0 | ||
|
|
c6e03d73d8 | ||
|
|
0b23215b72 | ||
|
|
2f9fd74151 | ||
|
|
8c8629a4af | ||
|
|
04bb8ab51d | ||
|
|
74e8b54ce3 | ||
|
|
b6a05c232e | ||
|
|
507c06c8bc | ||
|
|
244f4e9fe7 | ||
|
|
43fdf32729 | ||
|
|
20c1755efc | ||
|
|
0064c299c3 | ||
|
|
2e3928071f | ||
|
|
f1aa573b79 | ||
|
|
acc311e6f9 | ||
|
|
d006030ad3 | ||
|
|
fc899e6ceb | ||
|
|
a3d482035e | ||
|
|
ca9a78874a | ||
|
|
b8611139ca | ||
|
|
6e2cfbca0c | ||
|
|
b36d0df477 | ||
|
|
ccb7438803 | ||
|
|
deb008a26f | ||
|
|
eab7ef4895 | ||
|
|
778063e5cb | ||
|
|
d222c99ca7 | ||
|
|
29d24c613a | ||
|
|
f1b7a7292b | ||
|
|
ec2ca3e54f | ||
|
|
6d56e824d2 | ||
|
|
7566e8202f | ||
|
|
f30dcc218c | ||
|
|
3ad0e4e434 | ||
|
|
4893f27fb9 | ||
|
|
12f0fc72db | ||
|
|
6dd7ce89af | ||
|
|
ed9cd78421 | ||
|
|
eb57da7605 | ||
|
|
5bc6a1e723 | ||
|
|
76b3d2d47b | ||
|
|
dd0a195f2d | ||
|
|
3248c3002a | ||
|
|
c78a575d23 | ||
|
|
d22e5e8260 | ||
|
|
ba0a339888 | ||
|
|
0a5a63bc0c | ||
|
|
32488a5c26 | ||
|
|
8081638bb5 | ||
|
|
c50d41826d | ||
|
|
7b5887afba | ||
|
|
54bb6b057f | ||
|
|
645641e87d | ||
|
|
3d63b13ba5 | ||
|
|
5b428f0655 | ||
|
|
9d6501e9a6 | ||
|
|
5b07298559 | ||
|
|
3a0a7e79ff | ||
|
|
11b84926d4 | ||
|
|
112c58cc40 | ||
|
|
2430d96a3e | ||
|
|
64df940035 | ||
|
|
67077e4aa7 | ||
|
|
5b009e9a38 | ||
|
|
1bb752f1b8 | ||
|
|
5176df56dd | ||
|
|
107d67e44d | ||
|
|
833ff8b591 | ||
|
|
9314daeb3c | ||
|
|
aa2cb40f17 | ||
|
|
079e16c4d1 | ||
|
|
357cc7eb4c | ||
|
|
822692f526 | ||
|
|
4ea5ac39fc | ||
|
|
67ac796715 | ||
|
|
6300b1490d | ||
|
|
165c00fe6d | ||
|
|
68bdd184ef | ||
|
|
56b67d7d1c | ||
|
|
ae03bda480 | ||
|
|
127cbac5bc |
@@ -1,19 +0,0 @@
|
||||
BasedOnStyle: Google
|
||||
IndentWidth: 4
|
||||
UseTab: Never
|
||||
---
|
||||
Language: Cpp
|
||||
Standard: Cpp11
|
||||
|
||||
AccessModifierOffset: -4
|
||||
AllowAllArgumentsOnNextLine: false
|
||||
AllowShortFunctionsOnASingleLine: Empty
|
||||
AllowShortLambdasOnASingleLine: Empty
|
||||
AlwaysBreakBeforeMultilineStrings: false
|
||||
ColumnLimit: 120
|
||||
DerivePointerAlignment: false
|
||||
FixNamespaceComments: true
|
||||
IndentCaseLabels: false
|
||||
SpaceBeforeCpp11BracedList: true
|
||||
SpaceBeforeCtorInitializerColon: false
|
||||
---
|
||||
55
.github/workflows/mo.yml
vendored
Normal file
55
.github/workflows/mo.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: MO
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'model-optimizer/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'model-optimizer/**'
|
||||
|
||||
jobs:
|
||||
Pylint-UT:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.6
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('model-optimizer/requirements*.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
${{ runner.os }}-
|
||||
|
||||
# tensorflow 1.15 causes modules import
|
||||
# errors, most likely due to https://github.com/PyCQA/pylint/issues/2603
|
||||
# for tensorflow.core.framework and tensorflow.contrib
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools
|
||||
# For Pylint
|
||||
pip install tensorflow==1.14.0 tensorboard==1.14.0 tensorflow-estimator==1.14.0
|
||||
# For UT
|
||||
pip install unittest-xml-reporting==3.0.2
|
||||
# MO requirements
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements_dev.txt
|
||||
working-directory: model-optimizer
|
||||
|
||||
- name: Pylint
|
||||
run: pylint -d C,R,W mo/ mo.py extensions/
|
||||
working-directory: model-optimizer
|
||||
|
||||
- name: UT
|
||||
run: |
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`
|
||||
export MO_ROOT=`pwd`
|
||||
env
|
||||
mkdir ../mo-ut-logs
|
||||
python3 -m xmlrunner discover -p *_test.py --output=../mo-ut-logs
|
||||
working-directory: model-optimizer
|
||||
383
.gitignore
vendored
383
.gitignore
vendored
@@ -1,342 +1,71 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
# build/artifact dirs
|
||||
_*
|
||||
# but ensure we don't skip __init__.py
|
||||
!__init__.py
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
[Xx]64/
|
||||
[Xx]86/
|
||||
[Bb]uild/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
|
||||
# PY.TEST
|
||||
*.pyc
|
||||
tests/integration/report.html
|
||||
tests/integration/report.xml
|
||||
tests/integration/assets/
|
||||
tests/integration/__pycache__/
|
||||
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# DNX
|
||||
project.lock.json
|
||||
artifacts/
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
|
||||
# TODO: Un-comment the next line if you do not want to checkin
|
||||
# your web deploy settings because they may include unencrypted
|
||||
# passwords
|
||||
#*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/packages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/packages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/packages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignoreable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Microsoft Azure ApplicationInsights config file
|
||||
ApplicationInsights.config
|
||||
|
||||
# Windows Store app package directory
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
[Ss]tyle[Cc]op.*
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
node_modules/
|
||||
orleans.codegen.cs
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Target VS files:
|
||||
vsx64
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# LightSwitch generated files
|
||||
GeneratedArtifacts/
|
||||
ModelManifest.xml
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
*.filters
|
||||
/External
|
||||
/Output
|
||||
/InferenceEngineMain/models
|
||||
/Test
|
||||
/HTTPClient/*.a
|
||||
/InferenceEngineMain/newModels
|
||||
# developer tools
|
||||
*.idea
|
||||
.vscode
|
||||
cmake-build-*
|
||||
.DS_Store
|
||||
|
||||
# For IDEA
|
||||
.idea/
|
||||
VS/
|
||||
Xcode/
|
||||
temp/
|
||||
report/
|
||||
.kdev4/
|
||||
*.kdev4
|
||||
*.kate-swp
|
||||
|
||||
/lin-build
|
||||
/win-build
|
||||
/CMakeFiles
|
||||
*.stamp
|
||||
*.depend
|
||||
*.vcxproj
|
||||
*.sln
|
||||
/CMakeCache.txt
|
||||
.vimprj/
|
||||
build_IA32/
|
||||
.dir-locals.el
|
||||
GTAGS
|
||||
GPATH
|
||||
GRTAGS
|
||||
GSYMS
|
||||
**/tags
|
||||
compile_commands.json
|
||||
service/dot-net-service/Output
|
||||
**/sublime_build
|
||||
/.project
|
||||
.vscode/
|
||||
/vsx32
|
||||
/service/dot-net-service/.klocwork/DotNetService
|
||||
cmake-build-*/
|
||||
/lin64
|
||||
|
||||
.gdb_history
|
||||
bin/
|
||||
build/
|
||||
.local_vimrc
|
||||
.ycm_extra_conf.py
|
||||
tags
|
||||
.gdb_history
|
||||
.vimspector.json
|
||||
doc/
|
||||
!ngraph/doc
|
||||
docs/build_documentation/work_dir/
|
||||
inference-engine/plugins/
|
||||
inference-engine/temp
|
||||
inference-engine/report
|
||||
.repo/
|
||||
docs/template_plugin/html/
|
||||
CMakeLists.txt.user
|
||||
docs/IE_PLUGIN_DG/html/
|
||||
|
||||
|
||||
# from Model Optimizer repo
|
||||
.idea
|
||||
.project
|
||||
.cproject
|
||||
.pydevproject
|
||||
.settings
|
||||
/bin/
|
||||
/gen/
|
||||
*.project
|
||||
*.cproject
|
||||
*.pydevproject
|
||||
*.settings
|
||||
*/gen/
|
||||
__pycache__
|
||||
*.swp
|
||||
/config.xml
|
||||
|
||||
# Python-specific
|
||||
.env3
|
||||
*.env3
|
||||
*.pyc
|
||||
|
||||
# Tests-specific
|
||||
.coverage
|
||||
htmlcov
|
||||
pylint_report.txt
|
||||
pylint_report_comments.txt
|
||||
|
||||
# Documentation-generated
|
||||
docs/build
|
||||
docs/source/_static
|
||||
docs/source/_templates
|
||||
docs/source/generated/
|
||||
*.coverage
|
||||
*htmlcov
|
||||
*pylint_report.txt
|
||||
*pylint_report_comments.txt
|
||||
|
||||
# Artifacts
|
||||
/*.bin
|
||||
/*.xml
|
||||
/*.json
|
||||
/*.so
|
||||
/*.txt
|
||||
/*.mapping
|
||||
/*.dat
|
||||
/*.svg
|
||||
/model-optimizer/*.bin
|
||||
/model-optimizer/*.xml
|
||||
/model-optimizer/*.json
|
||||
/model-optimizer/*.so
|
||||
/model-optimizer/*.txt
|
||||
/model-optimizer/*.pb
|
||||
/model-optimizer/*.pbtxt
|
||||
/model-optimizer/!CMakeLists.txt
|
||||
/model-optimizer/*.mapping
|
||||
/model-optimizer/*.dat
|
||||
/model-optimizer/*.svg
|
||||
|
||||
# ngraph
|
||||
ngraph/src/CPackConfig.cmake
|
||||
ngraph/src/CPackSourceConfig.cmake
|
||||
ngraph/src/VERSION
|
||||
ngraph/src/gtest/
|
||||
ngraph/src/json/
|
||||
ngraph/src/ngraphConfig.cmake
|
||||
ngraph/src/ngraphConfigVersion.cmake
|
||||
ngraph/src/protobuf/
|
||||
ngraph/src/src/
|
||||
ngraph/src/test/
|
||||
|
||||
14
.gitmodules
vendored
14
.gitmodules
vendored
@@ -2,7 +2,15 @@
|
||||
path = inference-engine/thirdparty/ade
|
||||
url = https://github.com/opencv/ade.git
|
||||
ignore = dirty
|
||||
[submodule "ngraph"]
|
||||
path = ngraph
|
||||
url = https://github.com/NervanaSystems/ngraph.git
|
||||
[submodule "inference-engine/thirdparty/mkl-dnn"]
|
||||
path = inference-engine/thirdparty/mkl-dnn
|
||||
url = https://github.com/openvinotoolkit/oneDNN.git
|
||||
ignore = dirty
|
||||
[submodule "inference-engine/tests/ie_test_utils/common_test_utils/gtest"]
|
||||
path = inference-engine/tests/ie_test_utils/common_test_utils/gtest
|
||||
url = https://github.com/openvinotoolkit/googletest.git
|
||||
ignore = dirty
|
||||
[submodule "inference-engine/samples/thirdparty/gflags"]
|
||||
path = inference-engine/samples/thirdparty/gflags
|
||||
url = https://github.com/gflags/gflags.git
|
||||
ignore = dirty
|
||||
@@ -9,24 +9,28 @@ cmake_policy(SET CMP0054 NEW)
|
||||
# See https://blog.kitware.com/cmake-3-13-0-available-for-download/
|
||||
|
||||
if (APPLE)
|
||||
# due to https://cmake.org/cmake/help/v3.12/policy/CMP0068.html
|
||||
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
|
||||
if(CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
# due to https://gitlab.kitware.com/cmake/cmake/issues/14254
|
||||
cmake_minimum_required(VERSION 3.12.0 FATAL_ERROR)
|
||||
else()
|
||||
# due to https://cmake.org/cmake/help/v3.12/policy/CMP0068.html
|
||||
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
|
||||
endif()
|
||||
else()
|
||||
cmake_minimum_required(VERSION 3.7.2 FATAL_ERROR)
|
||||
endif()
|
||||
|
||||
|
||||
project(OpenVINO)
|
||||
|
||||
set(OpenVINO_MAIN_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
set(IE_MAIN_SOURCE_DIR ${OpenVINO_MAIN_SOURCE_DIR}/inference-engine)
|
||||
set(CMAKE_MODULE_PATH "${OpenVINO_MAIN_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
|
||||
list(APPEND CMAKE_MODULE_PATH "${OpenVINO_MAIN_SOURCE_DIR}/cmake")
|
||||
|
||||
include(CTest)
|
||||
include(features)
|
||||
|
||||
# include developer package
|
||||
include(developer_package NO_POLICY_SCOPE)
|
||||
include(developer_package)
|
||||
|
||||
# These options are shared with 3rdparty plugins
|
||||
# by means of developer package
|
||||
@@ -37,7 +41,7 @@ include(dependencies)
|
||||
message (STATUS "PROJECT ............................... " ${PROJECT_NAME})
|
||||
message (STATUS "CMAKE_BINARY_DIR ...................... " ${CMAKE_BINARY_DIR})
|
||||
message (STATUS "OpenVINO_MAIN_SOURCE_DIR .............. " ${OpenVINO_MAIN_SOURCE_DIR})
|
||||
message (STATUS "IE_MAIN_SOURCE_DIR .............. " ${IE_MAIN_SOURCE_DIR})
|
||||
message (STATUS "IE_MAIN_SOURCE_DIR .................... " ${IE_MAIN_SOURCE_DIR})
|
||||
message (STATUS "CMAKE_GENERATOR ....................... " ${CMAKE_GENERATOR})
|
||||
message (STATUS "CMAKE_C_COMPILER_ID ................... " ${CMAKE_C_COMPILER_ID})
|
||||
message (STATUS "CMAKE_BUILD_TYPE ...................... " ${CMAKE_BUILD_TYPE})
|
||||
@@ -61,33 +65,25 @@ function(build_ngraph)
|
||||
else ()
|
||||
ngraph_set(NGRAPH_ADDRESS_SANITIZER FALSE)
|
||||
endif ()
|
||||
ngraph_set(NGRAPH_TOOLS_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_CPU_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_INTERPRETER_ENABLE TRUE)
|
||||
ngraph_set(NGRAPH_NOP_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_GPUH_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_GENERIC_CPU_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_ENABLE_CPU_CONV_AUTO FALSE)
|
||||
ngraph_set(NGRAPH_PYTHON_BUILD_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_PLAIDML_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_FAST_MATH_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_JSON_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_DYNAMIC_COMPONENTS_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_NATIVE_ARCH_ENABLE FALSE)
|
||||
|
||||
if (NOT ANDROID)
|
||||
ngraph_set(NGRAPH_UNIT_TEST_ENABLE TRUE)
|
||||
ngraph_set(NGRAPH_UNIT_TEST_OPENVINO_ENABLE TRUE)
|
||||
# ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE TRUE)
|
||||
set(NGRAPH_ONNX_IMPORT_ENABLE TRUE CACHE BOOL "" FORCE)
|
||||
if(ENABLE_TESTS)
|
||||
ngraph_set(NGRAPH_UNIT_TEST_ENABLE TRUE)
|
||||
ngraph_set(NGRAPH_IE_ENABLE TRUE)
|
||||
else()
|
||||
ngraph_set(NGRAPH_UNIT_TEST_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_IE_ENABLE FALSE)
|
||||
endif()
|
||||
ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE TRUE)
|
||||
else()
|
||||
ngraph_set(NGRAPH_UNIT_TEST_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_TEST_UTIL_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_UNIT_TEST_OPENVINO_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_IE_ENABLE FALSE)
|
||||
ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE FALSE)
|
||||
endif()
|
||||
ngraph_set(NGRAPH_INTERPRETER_ENABLE TRUE)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$")
|
||||
ie_add_compiler_flags(-Wno-error=uninitialized -Wno-error=literal-conversion)
|
||||
elseif(UNIX)
|
||||
ie_add_compiler_flags(-Wno-error=maybe-uninitialized -Wno-error=return-type -fPIC)
|
||||
@@ -100,7 +96,7 @@ function(build_ngraph)
|
||||
if (UNIX)
|
||||
ie_add_compiler_flags(-Wno-error=return-type -Wno-undef)
|
||||
elseif(WIN32)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4308 /wd4146 /wd4703 /wd4244")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4308 /wd4146 /wd4703 /wd4244 /wd4819")
|
||||
endif()
|
||||
|
||||
if(ENABLE_LTO)
|
||||
@@ -112,10 +108,56 @@ function(build_ngraph)
|
||||
set(SDL_cmake_included ON)
|
||||
# set(NGRAPH_COMPONENT_PREFIX "deployment_tools/ngraph/")
|
||||
add_subdirectory(ngraph)
|
||||
set(NGRAPH_LIBRARIES ngraph PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
build_ngraph()
|
||||
|
||||
add_subdirectory(inference-engine)
|
||||
|
||||
add_subdirectory(docs)
|
||||
|
||||
# cpack
|
||||
|
||||
# install setupvars
|
||||
|
||||
ie_cpack_add_component(setupvars REQUIRED)
|
||||
|
||||
if(UNIX)
|
||||
install(PROGRAMS scripts/setupvars/setupvars.sh
|
||||
DESTINATION bin
|
||||
COMPONENT setupvars)
|
||||
elseif(WIN32)
|
||||
install(PROGRAMS scripts/setupvars/setupvars.bat
|
||||
DESTINATION bin
|
||||
COMPONENT setupvars)
|
||||
endif()
|
||||
|
||||
# install install_dependencies
|
||||
|
||||
if(UNIX)
|
||||
ie_cpack_add_component(install_dependencies REQUIRED)
|
||||
install(DIRECTORY scripts/install_dependencies/
|
||||
DESTINATION install_dependencies
|
||||
COMPONENT install_dependencies)
|
||||
endif()
|
||||
|
||||
# install files for demo
|
||||
|
||||
ie_cpack_add_component(demo_scripts REQUIRED DEPENDS core)
|
||||
|
||||
if(UNIX)
|
||||
install(DIRECTORY scripts/demo/
|
||||
DESTINATION deployment_tools/demo
|
||||
COMPONENT demo_scripts
|
||||
USE_SOURCE_PERMISSIONS
|
||||
PATTERN *.bat EXCLUDE)
|
||||
elseif(WIN32)
|
||||
install(DIRECTORY scripts/demo/
|
||||
DESTINATION deployment_tools/demo
|
||||
COMPONENT demo_scripts
|
||||
USE_SOURCE_PERMISSIONS
|
||||
PATTERN *.sh EXCLUDE)
|
||||
endif()
|
||||
|
||||
ie_cpack(${IE_CPACK_COMPONENTS_ALL})
|
||||
|
||||
66
CODEOWNERS
Normal file
66
CODEOWNERS
Normal file
@@ -0,0 +1,66 @@
|
||||
# See help here: https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners
|
||||
|
||||
* @openvinotoolkit/openvino-maintainers
|
||||
|
||||
CODEOWNERS @openvinotoolkit/openvino-admins @openvinotoolkit/openvino-maintainers
|
||||
|
||||
# CI:
|
||||
Jenkinsfile @openvinotoolkit/openvino-admins
|
||||
azure-pipelines.yml @openvinotoolkit/openvino-admins
|
||||
/.github/ @openvinotoolkit/openvino-admins
|
||||
|
||||
# QA Tests:
|
||||
/tests/ @openvinotoolkit/openvino-tests-maintainers
|
||||
|
||||
# IE Core:
|
||||
/inference-engine/ @openvinotoolkit/openvino-ie-maintainers
|
||||
/inference-engine/src/transformations/ @GlebKazantaev @ichuraev
|
||||
/inference-engine/src/legacy_api/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/inference-engine/src/readers/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# IE CPU:
|
||||
/inference-engine/src/mkldnn_plugin/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers
|
||||
/inference-engine/src/low_precision_transformations/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers
|
||||
/inference-engine/thirdparty/mkl-dnn/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers
|
||||
|
||||
# IE GPU:
|
||||
/inference-engine/src/cldnn_engine/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
|
||||
/inference-engine/include/gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
|
||||
/inference-engine/include/cldnn/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
|
||||
/inference-engine/thirdparty/clDNN/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
|
||||
|
||||
# IE VPU:
|
||||
/inference-engine/src/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
|
||||
/inference-engine/include/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
|
||||
/inference-engine/thirdparty/movidius/ @openvinotoolkit/openvino-ie-vpu-maintainers
|
||||
/inference-engine/tests_deprecated/unit/engines/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests_deprecated/functional/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests_deprecated/behavior/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests/functional/plugin/myriad/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests/unit/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests/unit/engines/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tools/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
|
||||
/inference-engine/scripts/run_tests_myriad_multistick.sh @openvinotoolkit/openvino-ie-vpu-maintainers
|
||||
|
||||
# IE GNA:
|
||||
/inference-engine/src/gna_plugin/ @openvinotoolkit/openvino-ie-gna-maintainers
|
||||
/inference-engine/include/gna/ @openvinotoolkit/openvino-ie-gna-maintainers
|
||||
|
||||
# IE MULTI:
|
||||
/inference-engine/src/multi_device/ @openvinotoolkit/openvino-ie-multi-maintainers
|
||||
/inference-engine/include/multi-device/ @openvinotoolkit/openvino-ie-multi-maintainers
|
||||
|
||||
# IE Tests:
|
||||
/inference-engine/tests/ @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests_deprecated/ @openvinotoolkit/openvino-ie-tests-maintainers
|
||||
/inference-engine/tests/functional/inference_engine/ngraph_reader/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/inference-engine/tests/functional/inference_engine/transformations/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# MO:
|
||||
/model-optimizer/ @openvinotoolkit/openvino-mo-maintainers
|
||||
|
||||
# nGraph:
|
||||
/ngraph/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# Tools
|
||||
/tools/ @openvinotoolkit/openvino-tools-maintainers
|
||||
18
CONTRIBUTING.md
Normal file
18
CONTRIBUTING.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# How to Contribute
|
||||
We welcome community contributions to the OpenVINO™ repository.
|
||||
If you have an idea how to improve the product, please share it
|
||||
with us doing the following steps:
|
||||
|
||||
* Make sure you can build the product and run all tests and samples with your patch
|
||||
* In case of a larger feature, provide relevant unit tests and one or more sample
|
||||
* Submit a pull request at https://github.com/openvinotoolkit/openvino/pulls
|
||||
|
||||
## OpenVINO™ Coding Style Guide
|
||||
We basically use the Google style (https://google.github.io/styleguide/cppguide.html) with some exceptions:
|
||||
* 4 spaces instead of 2 spaces for indentations
|
||||
* Limitation of 160 symbols for the line length
|
||||
* Exceptions are allowed
|
||||
* Using namespace are allowed in cpp and prohibited in headers
|
||||
* Underscore symbol before member in classes/structures
|
||||
* thisStyleForFunctions()
|
||||
* theSameStyleForVariables
|
||||
10
Jenkinsfile
vendored
Executable file
10
Jenkinsfile
vendored
Executable file
@@ -0,0 +1,10 @@
|
||||
#!groovy
|
||||
properties([
|
||||
parameters([
|
||||
booleanParam(defaultValue: true,
|
||||
description: 'Cancel the rest of parallel stages if one of them fails and return status immediately',
|
||||
name: 'failFast')
|
||||
])
|
||||
])
|
||||
|
||||
dldtPipelineEntrypoint(this)
|
||||
16
README.md
16
README.md
@@ -1,5 +1,5 @@
|
||||
# [OpenVINO™ Toolkit](https://01.org/openvinotoolkit) - Deep Learning Deployment Toolkit repository
|
||||
[](https://github.com/opencv/dldt/releases/tag/2020.1)
|
||||
[](https://github.com/openvinotoolkit/openvino/releases/tag/2020.4.0)
|
||||
[](LICENSE)
|
||||
|
||||
This toolkit allows developers to deploy pre-trained deep learning models
|
||||
@@ -30,23 +30,13 @@ and release your contribution under these terms.
|
||||
* [Model Optimizer Developer Guide](https://docs.openvinotoolkit.org/latest/_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html)
|
||||
|
||||
## How to Contribute
|
||||
We welcome community contributions to the Deep Learning Deployment Toolkit
|
||||
repository. If you have an idea how to improve the product, please share it
|
||||
with us doing the following steps:
|
||||
|
||||
* Make sure you can build the product and run all tests and samples with your patch
|
||||
* In case of a larger feature, provide relevant unit tests and one or more sample
|
||||
* Submit a pull request at https://github.com/opencv/dldt/pulls
|
||||
|
||||
We will review your contribution and, if any additional fixes or modifications
|
||||
are necessary, may give some feedback to guide you. Your pull request will be
|
||||
merged into GitHub* repositories if accepted.
|
||||
See [CONTRIBUTING](./CONTRIBUTING.md) for details. Thank you!
|
||||
|
||||
## Support
|
||||
Please report questions, issues and suggestions using:
|
||||
|
||||
* The `openvino` [tag on StackOverflow]\*
|
||||
* [GitHub* Issues](https://github.com/opencv/dldt/issues)
|
||||
* [GitHub* Issues](https://github.com/openvinotoolkit/openvino/issues)
|
||||
* [Forum](https://software.intel.com/en-us/forums/computer-vision)
|
||||
|
||||
---
|
||||
|
||||
333
azure-pipelines.yml
Normal file
333
azure-pipelines.yml
Normal file
@@ -0,0 +1,333 @@
|
||||
jobs:
|
||||
- job: Lin
|
||||
# About 150% of total time
|
||||
timeoutInMinutes: 75
|
||||
pool:
|
||||
#vmImage: 'ubuntu-18.04'
|
||||
name: LIN_VMSS_VENV_F8S_WU2
|
||||
variables:
|
||||
BUILD_TYPE: Release
|
||||
BIN_DIR: ../bin/intel64/$(BUILD_TYPE)
|
||||
steps:
|
||||
- script: |
|
||||
whoami
|
||||
uname -a
|
||||
which python3
|
||||
gcc --version
|
||||
lsb_release
|
||||
env
|
||||
cat /proc/cpuinfo
|
||||
cat /proc/meminfo
|
||||
vmstat -s
|
||||
df
|
||||
displayName: 'System properties'
|
||||
- script: |
|
||||
sudo apt --assume-yes install libusb-1.0-0-dev
|
||||
python3 -m pip install -r ./inference-engine/ie_bridges/python/requirements.txt
|
||||
# For running Python API tests
|
||||
python3 -m pip install -r ./inference-engine/ie_bridges/python/src/requirements-dev.txt
|
||||
displayName: 'Install dependencies'
|
||||
- script: |
|
||||
wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip
|
||||
unzip ninja-linux.zip
|
||||
sudo cp -v ninja /usr/local/bin/
|
||||
displayName: 'Install Ninja'
|
||||
- script: git submodule update --init --recursive --jobs 8
|
||||
displayName: 'Clone submodules'
|
||||
- script: |
|
||||
mkdir dldt-build
|
||||
cd dldt-build
|
||||
displayName: 'Create build directory'
|
||||
- task: CMake@1
|
||||
inputs:
|
||||
workingDirectory: dldt-build
|
||||
# CMake must get Python 3.x version by default
|
||||
cmakeArgs: .. -GNinja -DVERBOSE_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE=/usr/bin/python3.6 -DENABLE_TESTS=ON
|
||||
- script: ninja
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'Build Lin'
|
||||
- script: ls -alR ../bin/
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'List files'
|
||||
- script: $(BIN_DIR)/unit-test --gtest_print_time=1 --gtest_filter=-backend_api.config_unsupported:*IE_GPU*
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'nGraph UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/InferenceEngineUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT old'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/ieUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/cpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/gnaUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'GNA UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/vpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'VPU UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/ieFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE FuncTests'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/cpuFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU FuncTests'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/MklDnnBehaviorTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnBehaviorTests'
|
||||
continueOnError: false
|
||||
- script: git clone https://github.com/openvinotoolkit/testdata.git
|
||||
displayName: 'Clone testdata'
|
||||
- script: |
|
||||
export DATA_PATH=`pwd`/../testdata
|
||||
export MODELS_PATH=`pwd`/../testdata
|
||||
$(BIN_DIR)/MklDnnFunctionalTests --gtest_filter=*smoke*:-smoke_MobileNet/ModelTransformationsTest.LPT/mobilenet_v2_tf_depthwise_batch1_inPluginDisabled_inTestDisabled_asymmetric*
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnFunctionalTests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
export DATA_PATH=`pwd`/../testdata
|
||||
export MODELS_PATH=`pwd`/../testdata
|
||||
$(BIN_DIR)/InferenceEngineCAPITests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE CAPITests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
export DATA_PATH=`pwd`/../testdata
|
||||
export MODELS_PATH=`pwd`/../testdata
|
||||
export LD_LIBRARY_PATH=`pwd`/$(BIN_DIR)/lib
|
||||
export PYTHONPATH=`pwd`/$(BIN_DIR)/lib/python_api/python3.6
|
||||
env
|
||||
cd ../inference-engine/ie_bridges/python/tests
|
||||
pytest
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'Python API Tests'
|
||||
continueOnError: false
|
||||
enabled: false
|
||||
|
||||
- job: Mac
|
||||
# About 200% of total time (perfomace of Mac hosts is unstable)
|
||||
timeoutInMinutes: 180
|
||||
pool:
|
||||
vmImage: 'macOS-10.15'
|
||||
variables:
|
||||
BUILD_TYPE: Release
|
||||
BIN_DIR: ../bin/intel64/$(BUILD_TYPE)
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
whoami
|
||||
uname -a
|
||||
which python3
|
||||
gcc --version
|
||||
xcrun --sdk macosx --show-sdk-version
|
||||
env
|
||||
sysctl -a
|
||||
displayName: 'System properties'
|
||||
- script: |
|
||||
brew install cython
|
||||
brew install automake
|
||||
displayName: 'Install dependencies'
|
||||
- script: brew install ninja
|
||||
displayName: 'Install Ninja'
|
||||
- script: git submodule update --init --recursive --jobs 8
|
||||
displayName: 'Clone submodules'
|
||||
- script: |
|
||||
mkdir dldt-build
|
||||
cd dldt-build
|
||||
displayName: 'Create build directory'
|
||||
- script: |
|
||||
export PATH="/usr/local/opt/cython/bin:$PATH"
|
||||
export CC=gcc
|
||||
export CXX=g++
|
||||
# Disable errors with Ninja
|
||||
export CXXFLAGS="-Wno-error=unused-command-line-argument"
|
||||
export CFLAGS="-Wno-error=unused-command-line-argument"
|
||||
cmake .. -GNinja -DVERBOSE_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_PYTHON=ON -DENABLE_TESTS=ON
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CMake'
|
||||
- script: ninja
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'Build Mac'
|
||||
- script: ls -alR ../bin/
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'List files'
|
||||
- script: $(BIN_DIR)/unit-test --gtest_print_time=1 --gtest_filter=-backend_api.config_unsupported:*IE_GPU*:IE_CPU.onnx_model_sigmoid
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'nGraph UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/InferenceEngineUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT old'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/ieUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/cpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/vpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'VPU UT'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/ieFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE FuncTests'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/cpuFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU FuncTests'
|
||||
continueOnError: false
|
||||
- script: $(BIN_DIR)/MklDnnBehaviorTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnBehaviorTests'
|
||||
continueOnError: false
|
||||
- script: git clone https://github.com/openvinotoolkit/testdata.git
|
||||
displayName: 'Clone testdata'
|
||||
- script: |
|
||||
export DATA_PATH=`pwd`/../testdata
|
||||
export MODELS_PATH=`pwd`/../testdata
|
||||
$(BIN_DIR)/MklDnnFunctionalTests --gtest_filter=*smoke*:-smoke_MobileNet/ModelTransformationsTest.LPT/mobilenet_v2_tf_depthwise_batch1_inPluginDisabled_inTestDisabled_asymmetric*
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnFunctionalTests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
export DATA_PATH=`pwd`/../testdata
|
||||
export MODELS_PATH=`pwd`/../testdata
|
||||
$(BIN_DIR)/InferenceEngineCAPITests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE CAPITests'
|
||||
continueOnError: false
|
||||
|
||||
- job: Win
|
||||
# About 150% of total time
|
||||
timeoutInMinutes: 120
|
||||
pool:
|
||||
#vmImage: 'vs2017-win2016'
|
||||
name: WIN_VMSS_VENV_F8S_WU2
|
||||
variables:
|
||||
BUILD_TYPE: Release
|
||||
BUILD_DIR: D:\dldt-build
|
||||
BIN_DIR: ..\bin\intel64
|
||||
MSVS_VARS_PATH: C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
|
||||
MSVC_COMPILER_PATH: C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Tools\MSVC\14.24.28314\bin\Hostx64\x64\cl.exe
|
||||
steps:
|
||||
- script: |
|
||||
where python3
|
||||
wmic computersystem get TotalPhysicalMemory
|
||||
wmic cpu list
|
||||
wmic logicaldisk get description,name
|
||||
wmic VOLUME list
|
||||
set
|
||||
displayName: 'System properties'
|
||||
- script: |
|
||||
certutil -urlcache -split -f https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-win.zip ninja-win.zip
|
||||
powershell -command "Expand-Archive -Force ninja-win.zip"
|
||||
displayName: Install Ninja
|
||||
- script: git submodule update --init --recursive --jobs 8
|
||||
displayName: 'Clone submodules'
|
||||
- script: |
|
||||
rd /Q /S $(BUILD_DIR)
|
||||
mkdir $(BUILD_DIR)\bin
|
||||
rd /Q /S dldt-build
|
||||
mkdir dldt-build
|
||||
displayName: 'Create build directory'
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\ninja-win;%PATH%
|
||||
call "$(MSVS_VARS_PATH)" && cmake -GNinja -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_TESTS=ON -DCMAKE_C_COMPILER:PATH="$(MSVC_COMPILER_PATH)" -DCMAKE_CXX_COMPILER:PATH="$(MSVC_COMPILER_PATH)" $(Build.Repository.LocalPath)
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'CMake'
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\ninja-win;%PATH%
|
||||
call "$(MSVS_VARS_PATH)" && ninja
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Build Win'
|
||||
- script: dir ..\bin\ /s /b
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'List files'
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\unit-test --gtest_print_time=1 --gtest_filter=-backend_api.config_unsupported:*IE_GPU*
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'nGraph UT'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\InferenceEngineUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT old'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\ieUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE UT'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\cpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU UT'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\gnaUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'GNA UT'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\vpuUnitTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'VPU UT'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\ieFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE FuncTests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\cpuFuncTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'CPU FuncTests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;%PATH%
|
||||
$(BIN_DIR)\MklDnnBehaviorTests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnBehaviorTests'
|
||||
continueOnError: false
|
||||
- script: git clone https://github.com/openvinotoolkit/testdata.git
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Clone testdata'
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;$(Build.Repository.LocalPath)\inference-engine\temp\opencv_4.3.0\opencv\bin;%PATH%
|
||||
set DATA_PATH=$(BUILD_DIR)\testdata
|
||||
set MODELS_PATH=$(BUILD_DIR)\testdata
|
||||
$(BIN_DIR)\MklDnnFunctionalTests --gtest_filter=*smoke*:-smoke_MobileNet/ModelTransformationsTest.LPT/mobilenet_v2_tf_depthwise_batch1_inPluginDisabled_inTestDisabled_asymmetric*
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'MklDnnFunctionalTests'
|
||||
continueOnError: false
|
||||
- script: |
|
||||
set PATH=$(Build.Repository.LocalPath)\inference-engine\temp\tbb\bin;$(Build.Repository.LocalPath)\inference-engine\temp\opencv_4.3.0\opencv\bin;%PATH%
|
||||
set DATA_PATH=$(BUILD_DIR)\testdata
|
||||
set MODELS_PATH=$(BUILD_DIR)\testdata
|
||||
$(BIN_DIR)\InferenceEngineCAPITests
|
||||
workingDirectory: dldt-build
|
||||
displayName: 'IE CAPITests'
|
||||
continueOnError: false
|
||||
@@ -28,7 +28,6 @@
|
||||
- [Add Inference Engine to Your Project](#add-inference-engine-to-your-project)
|
||||
- [(Optional) Additional Installation Steps for the Intel® Movidius™ Neural Compute Stick and Neural Compute Stick 2](#optional-additional-installation-steps-for-the-intel-movidius-neural-compute-stick-and-neural-compute-stick-2)
|
||||
- [For Linux, Raspbian Stretch* OS](#for-linux-raspbian-stretch-os)
|
||||
- [For Windows](#for-windows-1)
|
||||
- [Next Steps](#next-steps)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
||||
@@ -53,19 +52,20 @@ as a part of [Intel® Distribution of OpenVINO™].
|
||||
## Build on Linux\* Systems
|
||||
|
||||
The software was validated on:
|
||||
- Ubuntu\* 18.04 (64-bit) with default GCC\* 7.5.0
|
||||
- Ubuntu\* 16.04 (64-bit) with default GCC\* 5.4.0
|
||||
- CentOS\* 7.4 (64-bit) with default GCC\* 4.8.5
|
||||
|
||||
### Software Requirements
|
||||
- [CMake]\* 3.11 or higher
|
||||
- GCC\* 4.8 or higher to build the Inference Engine
|
||||
- Python 2.7 or higher for Inference Engine Python API wrapper
|
||||
- Python 3.5 or higher for Inference Engine Python API wrapper
|
||||
- (Optional) [Install Intel® Graphics Compute Runtime for OpenCL™ Driver package 19.41.14441].
|
||||
|
||||
### Build Steps
|
||||
1. Clone submodules:
|
||||
```sh
|
||||
cd dldt
|
||||
cd openvino
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
2. Install build dependencies using the `install_dependencies.sh` script in the
|
||||
@@ -172,10 +172,10 @@ Native compilation of the Inference Engine is the most straightforward solution.
|
||||
sudo apt-get install -y git cmake libusb-1.0-0-dev
|
||||
```
|
||||
|
||||
2. Go to the cloned `dldt` repository:
|
||||
2. Go to the cloned `openvino` repository:
|
||||
|
||||
```bash
|
||||
cd dldt
|
||||
cd openvino
|
||||
```
|
||||
|
||||
3. Initialize submodules:
|
||||
@@ -203,7 +203,7 @@ Native compilation of the Inference Engine is the most straightforward solution.
|
||||
|
||||
This compilation was tested on the following configuration:
|
||||
|
||||
* Host: Ubuntu\* 16.04 (64-bit, Intel® Core™ i7-6700K CPU @ 4.00GHz × 8)
|
||||
* Host: Ubuntu\* 18.04 (64-bit, Intel® Core™ i7-6700K CPU @ 4.00GHz × 8)
|
||||
* Target: Raspbian\* Stretch (32-bit, ARMv7, Raspberry Pi\* 3)
|
||||
|
||||
1. Install Docker\*:
|
||||
@@ -262,15 +262,15 @@ with the following content:
|
||||
5. Run Docker\* container with mounted source code folder from host:
|
||||
|
||||
```bash
|
||||
docker run -it -v /absolute/path/to/dldt:/dldt ie_cross_armhf /bin/bash
|
||||
docker run -it -v /absolute/path/to/openvino:/openvino ie_cross_armhf /bin/bash
|
||||
```
|
||||
|
||||
6. While in the container:
|
||||
|
||||
1. Go to the cloned `dldt` repository:
|
||||
1. Go to the cloned `openvino` repository:
|
||||
|
||||
```bash
|
||||
cd dldt
|
||||
cd openvino
|
||||
```
|
||||
|
||||
2. Create a build folder:
|
||||
@@ -291,8 +291,8 @@ with the following content:
|
||||
```
|
||||
|
||||
7. Press **Ctrl+D** to exit from Docker. You can find the resulting binaries
|
||||
in the `dldt/bin/armv7l/` directory and the OpenCV*
|
||||
installation in the `dldt/inference-engine/temp`.
|
||||
in the `openvino/bin/armv7l/` directory and the OpenCV*
|
||||
installation in the `openvino/inference-engine/temp`.
|
||||
|
||||
>**NOTE**: Native applications that link to cross-compiled Inference Engine
|
||||
library require an extra compilation flag `-march=armv7-a`.
|
||||
@@ -338,7 +338,7 @@ The software was validated on:
|
||||
- [CMake]\*3.11 or higher
|
||||
- Microsoft\* Visual Studio 2017, 2019 or [Intel® C++ Compiler] 18.0
|
||||
- (Optional) Intel® Graphics Driver for Windows* (26.20) [driver package].
|
||||
- Python 3.4 or higher for Inference Engine Python API wrapper
|
||||
- Python 3.5 or higher for Inference Engine Python API wrapper
|
||||
|
||||
### Build Steps
|
||||
|
||||
@@ -381,8 +381,8 @@ cmake -G "Visual Studio 15 2017 Win64" -T "Intel C++ Compiler 18.0" ^
|
||||
|
||||
6. Before running the samples, add paths to the TBB and OpenCV binaries used for
|
||||
the build to the `%PATH%` environment variable. By default, TBB binaries are
|
||||
downloaded by the CMake-based script to the `<dldt_repo>/inference-engine/temp/tbb/bin`
|
||||
folder, OpenCV binaries to the `<dldt_repo>/inference-engine/temp/opencv_4.3.0/opencv/bin`
|
||||
downloaded by the CMake-based script to the `<openvino_repo>/inference-engine/temp/tbb/bin`
|
||||
folder, OpenCV binaries to the `<openvino_repo>/inference-engine/temp/opencv_4.3.0/opencv/bin`
|
||||
folder.
|
||||
|
||||
### Additional Build Options
|
||||
@@ -437,7 +437,7 @@ cmake -G "Visual Studio 15 2017 Win64" -T "Intel C++ Compiler 18.0" ^
|
||||
call "C:\Program Files (x86)\IntelSWTools\compilers_and_libraries_2018\windows\bin\ipsxe-comp-vars.bat" intel64 vs2017
|
||||
set CXX=icl
|
||||
set CC=icl
|
||||
:: clean TBBROOT value set by ipsxe-comp-vars.bat, required TBB package will be downloaded by dldt cmake script
|
||||
:: clean TBBROOT value set by ipsxe-comp-vars.bat, required TBB package will be downloaded by openvino cmake script
|
||||
set TBBROOT=
|
||||
cmake -G Ninja -Wno-dev -DCMAKE_BUILD_TYPE=Release ..
|
||||
cmake --build . --config Release
|
||||
@@ -455,13 +455,13 @@ The software was validated on:
|
||||
|
||||
- [CMake]\* 3.11 or higher
|
||||
- Clang\* compiler from Xcode\* 10.1 or higher
|
||||
- Python\* 3.4 or higher for the Inference Engine Python API wrapper
|
||||
- Python\* 3.5 or higher for the Inference Engine Python API wrapper
|
||||
|
||||
### Build Steps
|
||||
|
||||
1. Clone submodules:
|
||||
```sh
|
||||
cd dldt
|
||||
cd openvino
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
2. Install build dependencies using the `install_dependencies.sh` script in the
|
||||
@@ -545,7 +545,7 @@ This section describes how to build Inference Engine for Android x86 (64-bit) op
|
||||
|
||||
2. Clone submodules
|
||||
```sh
|
||||
cd dldt
|
||||
cd openvino
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
@@ -575,8 +575,7 @@ This section describes how to build Inference Engine for Android x86 (64-bit) op
|
||||
|
||||
## Use Custom OpenCV Builds for Inference Engine
|
||||
|
||||
> **NOTE**: The recommended and tested version of OpenCV is 4.3. The minimum
|
||||
supported version is 3.4.0.
|
||||
> **NOTE**: The recommended and tested version of OpenCV is 4.4.0.
|
||||
|
||||
Required versions of OpenCV packages are downloaded automatically during the
|
||||
building Inference Engine library. If the build script can not find and download
|
||||
@@ -610,7 +609,7 @@ before running the Inference Engine build:
|
||||
For CMake projects, set the `InferenceEngine_DIR` environment variable:
|
||||
|
||||
```sh
|
||||
export InferenceEngine_DIR=/path/to/dldt/build/
|
||||
export InferenceEngine_DIR=/path/to/openvino/build/
|
||||
```
|
||||
|
||||
Then you can find Inference Engine by `find_package`:
|
||||
@@ -660,26 +659,12 @@ sudo ldconfig
|
||||
rm 97-myriad-usbboot.rules
|
||||
```
|
||||
|
||||
### For Windows
|
||||
|
||||
For Intel® Movidius™ Neural Compute Stick and Intel® Neural Compute Stick 2,
|
||||
install the Movidius™ VSC driver:
|
||||
|
||||
1. Go to the `<DLDT_ROOT_DIR>/inference-engine/thirdparty/movidius/MovidiusDriver`
|
||||
directory, where the `DLDT_ROOT_DIR` is the directory to which the DLDT
|
||||
repository was cloned.
|
||||
2. Right click on the `Movidius_VSC_Device.inf` file and choose **Install** from
|
||||
the pop-up menu.
|
||||
|
||||
You have installed the driver for your Intel® Movidius™ Neural Compute Stick
|
||||
or Intel® Neural Compute Stick 2.
|
||||
|
||||
## Next Steps
|
||||
|
||||
Congratulations, you have built the Inference Engine. To get started with the
|
||||
OpenVINO™, proceed to the Get Started guides:
|
||||
|
||||
* [Get Started with Deep Learning Deployment Toolkit on Linux*](../get-started-linux.md)
|
||||
* [Get Started with Deep Learning Deployment Toolkit on Linux*](get-started-linux.md)
|
||||
|
||||
## Notice
|
||||
|
||||
|
||||
@@ -20,14 +20,18 @@ if (NOT ENABLE_MKL_DNN)
|
||||
endif()
|
||||
|
||||
if(ENABLE_AVX512F)
|
||||
if ((CMAKE_CXX_COMPILER_ID MATCHES MSVC) AND (MSVC_VERSION VERSION_LESS 1920))
|
||||
if ((CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") AND (MSVC_VERSION VERSION_LESS 1920))
|
||||
# 1920 version of MSVC 2019. In MSVC 2017 AVX512F not work
|
||||
set(ENABLE_AVX512F OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES Clang)
|
||||
if ((CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6))
|
||||
set(ENABLE_AVX512F OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
if ((CMAKE_CXX_COMPILER_ID STREQUAL GNU) AND (NOT (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9)))
|
||||
if ((CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang") AND (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 10))
|
||||
# TBD: clarify which AppleClang version supports avx512
|
||||
set(ENABLE_AVX512F OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.9))
|
||||
set(ENABLE_AVX512F OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -4,14 +4,17 @@
|
||||
|
||||
if(NOT TARGET ie_coverage_clean)
|
||||
add_custom_target(ie_coverage_clean)
|
||||
set_target_properties(ie_coverage_clean PROPERTIES FOLDER coverage)
|
||||
endif()
|
||||
|
||||
if(NOT TARGET ie_coverage_init)
|
||||
add_custom_target(ie_coverage_init)
|
||||
set_target_properties(ie_coverage_init PROPERTIES FOLDER coverage)
|
||||
endif()
|
||||
|
||||
if(NOT TARGET ie_coverage)
|
||||
add_custom_target(ie_coverage)
|
||||
set_target_properties(ie_coverage PROPERTIES FOLDER coverage)
|
||||
endif()
|
||||
|
||||
set(IE_COVERAGE_REPORTS "${CMAKE_BINARY_DIR}/coverage")
|
||||
@@ -26,10 +29,10 @@ function(ie_coverage_clean)
|
||||
cmake_parse_arguments(IE_COVERAGE "" "REPOSITORY;DIRECTORY" "" ${ARGN})
|
||||
|
||||
add_custom_target(ie_coverage_zerocounters_${IE_COVERAGE_REPOSITORY}
|
||||
COMMAND lcov --zerocounters --quiet
|
||||
--directory "${IE_COVERAGE_DIRECTORY}"
|
||||
COMMENT "Add zero counters for coverage for ${IE_COVERAGE_REPOSITORY}"
|
||||
VERBATIM)
|
||||
COMMAND lcov --zerocounters --quiet
|
||||
--directory "${IE_COVERAGE_DIRECTORY}"
|
||||
COMMENT "Add zero counters for coverage for ${IE_COVERAGE_REPOSITORY}"
|
||||
VERBATIM)
|
||||
|
||||
add_custom_target(ie_coverage_clean_${IE_COVERAGE_REPOSITORY}
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
@@ -42,6 +45,10 @@ function(ie_coverage_clean)
|
||||
DEPENDS "${IE_COVERAGE_SCRIPT_DIR}/coverage_clean.cmake"
|
||||
VERBATIM)
|
||||
|
||||
set_target_properties(ie_coverage_zerocounters_${IE_COVERAGE_REPOSITORY}
|
||||
ie_coverage_clean_${IE_COVERAGE_REPOSITORY}
|
||||
PROPERTIES FOLDER coverage)
|
||||
|
||||
add_dependencies(ie_coverage_clean ie_coverage_zerocounters_${IE_COVERAGE_REPOSITORY}
|
||||
ie_coverage_clean_${IE_COVERAGE_REPOSITORY})
|
||||
endfunction()
|
||||
@@ -87,6 +94,8 @@ function(ie_coverage_capture)
|
||||
|
||||
add_custom_target(ie_coverage_${IE_COVERAGE_INFO_FILE}_info
|
||||
DEPENDS ${output_file})
|
||||
set_target_properties(ie_coverage_${IE_COVERAGE_INFO_FILE}_info
|
||||
PROPERTIES FOLDER coverage)
|
||||
endfunction()
|
||||
|
||||
#
|
||||
@@ -111,6 +120,8 @@ function(ie_coverage_extract)
|
||||
VERBATIM)
|
||||
add_custom_target(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
DEPENDS ${output_file})
|
||||
set_target_properties(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
PROPERTIES FOLDER coverage)
|
||||
|
||||
add_dependencies(ie_coverage_${IE_COVERAGE_OUTPUT}_info ie_coverage_${IE_COVERAGE_INPUT}_info)
|
||||
endfunction()
|
||||
@@ -137,6 +148,8 @@ function(ie_coverage_remove)
|
||||
VERBATIM)
|
||||
add_custom_target(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
DEPENDS ${output_file})
|
||||
set_target_properties(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
PROPERTIES FOLDER coverage)
|
||||
|
||||
add_dependencies(ie_coverage_${IE_COVERAGE_OUTPUT}_info ie_coverage_${IE_COVERAGE_INPUT}_info)
|
||||
endfunction()
|
||||
@@ -164,6 +177,8 @@ function(ie_coverage_merge)
|
||||
VERBATIM)
|
||||
add_custom_target(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
DEPENDS ${output_file})
|
||||
set_target_properties(ie_coverage_${IE_COVERAGE_OUTPUT}_info
|
||||
PROPERTIES FOLDER coverage)
|
||||
|
||||
add_dependencies(ie_coverage_${IE_COVERAGE_OUTPUT}_info ${dependencies})
|
||||
endfunction()
|
||||
@@ -188,6 +203,8 @@ function(ie_coverage_genhtml)
|
||||
VERBATIM)
|
||||
add_custom_target(ie_coverage_${IE_COVERAGE_INFO_FILE}_genhtml
|
||||
DEPENDS "${output_directory}/index.html")
|
||||
set_target_properties(ie_coverage_${IE_COVERAGE_INFO_FILE}_genhtml
|
||||
PROPERTIES FOLDER coverage)
|
||||
|
||||
add_dependencies(ie_coverage_${IE_COVERAGE_INFO_FILE}_genhtml ie_coverage_${IE_COVERAGE_INFO_FILE}_info)
|
||||
add_dependencies(ie_coverage ie_coverage_${IE_COVERAGE_INFO_FILE}_genhtml)
|
||||
|
||||
105
cmake/cross_compile/cross_compiled_disp_gen.cmake
Normal file
105
cmake/cross_compile/cross_compiled_disp_gen.cmake
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
# =================================================================
|
||||
#
|
||||
# Generates cpp file with dispatcher for cross compiled function
|
||||
# Parameters:
|
||||
# XARCH_API_HEADER -- path to header with function declaration
|
||||
# XARCH_FUNC_NAME -- name of function to dispatch
|
||||
# XARCH_NAMESPACES -- full namespace used to keep ODR
|
||||
# XARCH_DISP_FILE -- dispatcher file name to generate
|
||||
# XARCH_SET -- set of ARCH supported by dispatcher. space delimited
|
||||
#
|
||||
# =================================================================
|
||||
|
||||
set(_CPU_CHECK_ANY "true")
|
||||
set(_CPU_CHECK_SSE42 "with_cpu_x86_sse42()")
|
||||
set(_CPU_CHECK_AVX "with_cpu_x86_avx()")
|
||||
set(_CPU_CHECK_AVX2 "with_cpu_x86_avx2()")
|
||||
set(_CPU_CHECK_AVX512F "with_cpu_x86_avx512f()")
|
||||
|
||||
function(_generate_dispatcher)
|
||||
_find_signature_in_file(${XARCH_API_HEADER} ${XARCH_FUNC_NAME} SIGNATURE)
|
||||
_generate_call_line_from_signature("${SIGNATURE}" CALL_LINE)
|
||||
|
||||
string(REPLACE " " ";" XARCH_SET "${XARCH_SET}")
|
||||
string(REPLACE "::" ";" XARCH_NAMESPACES "${XARCH_NAMESPACES}")
|
||||
|
||||
list(GET XARCH_NAMESPACES -1 XARCH_CURRENT_NAMESPACE)
|
||||
set(PARENT_NAMESPACES ${XARCH_NAMESPACES})
|
||||
list(REMOVE_AT PARENT_NAMESPACES -1)
|
||||
|
||||
set(DISP_CONTENT
|
||||
"
|
||||
//
|
||||
// Auto generated file by CMake macros cross_compiled_file()
|
||||
// !! do not modify it !!!
|
||||
//
|
||||
#include \"${XARCH_API_HEADER}\"
|
||||
#include \"ie_system_conf.h\"
|
||||
|
||||
")
|
||||
|
||||
foreach(_namespace ${PARENT_NAMESPACES})
|
||||
string(APPEND DISP_CONTENT
|
||||
"namespace ${_namespace} {\n")
|
||||
endforeach()
|
||||
|
||||
foreach(_arch ${XARCH_SET})
|
||||
string(APPEND DISP_CONTENT
|
||||
"namespace ${_arch} {\n ${SIGNATURE}\; \n}\n")
|
||||
endforeach()
|
||||
|
||||
string(APPEND DISP_CONTENT
|
||||
"namespace ${XARCH_CURRENT_NAMESPACE} {\n\n${SIGNATURE} {\n")
|
||||
|
||||
foreach(_arch ${XARCH_SET})
|
||||
string(APPEND DISP_CONTENT
|
||||
" if (${_CPU_CHECK_${_arch}}) {\n return ${_arch}::${CALL_LINE}\;\n }\n")
|
||||
endforeach()
|
||||
|
||||
string(APPEND DISP_CONTENT "}\n\n}\n")
|
||||
|
||||
foreach(_namespace ${PARENT_NAMESPACES})
|
||||
string(APPEND DISP_CONTENT "} // namespace ${_namespace}\n")
|
||||
endforeach()
|
||||
|
||||
file(WRITE ${XARCH_DISP_FILE} ${DISP_CONTENT})
|
||||
endfunction()
|
||||
|
||||
|
||||
function(_find_signature_in_file FILE FUNCTION RESULT_NAME)
|
||||
file(READ "${FILE}" CONTENT)
|
||||
set(valid_chars "<>:_*& a-zA-Z0-9\n") ## valid chars for type/var specification (including new line /n)
|
||||
string(REGEX MATCH "[${valid_chars}]*${FUNCTION}[ ]*[(][=,${valid_chars}]*[)]" SIGNATURE ${CONTENT})
|
||||
string(STRIP "${SIGNATURE}" SIGNATURE)
|
||||
set (${RESULT_NAME} "${SIGNATURE}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(_generate_call_line_from_signature SIGNATURE RESULT_NAME)
|
||||
## extract func name
|
||||
set(_name ${SIGNATURE})
|
||||
string(REGEX REPLACE "[ ]*[(].*[)]" "" _name "${_name}") # remove arguments
|
||||
string(REGEX MATCH "[a-zA-Z0-9_]*[ ]*$" _name "${_name}") # extract func name
|
||||
|
||||
set(nt_chars "[:_*& a-zA-Z0-9\n]*") ## any sequence of chars to describe object type (no template)
|
||||
|
||||
## extract arg names
|
||||
set(_args ${SIGNATURE})
|
||||
string(REGEX MATCH "[(].*[)]" _args "${_args}") # extract args with types, all inside brackets
|
||||
string(REGEX REPLACE "<${nt_chars},${nt_chars}>" "" _args "${_args}") # remove template brackets with ','
|
||||
string(REPLACE "(" "" _args ${_args})
|
||||
string(REPLACE ")" "" _args ${_args})
|
||||
string(REPLACE "," ";" _args ${_args}) # now it's list
|
||||
foreach(_arg_elem ${_args})
|
||||
string(REGEX MATCH "[a-zA-Z0-9_]*[ ]*$" _arg_elem "${_arg_elem}")
|
||||
list(APPEND _arg_names ${_arg_elem})
|
||||
endforeach()
|
||||
string(REPLACE ";" ", " _arg_names "${_arg_names}") # back to comma separated string
|
||||
|
||||
set (${RESULT_NAME} "${_name}(${_arg_names})" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
_generate_dispatcher()
|
||||
16
cmake/cross_compile/cross_compiled_disp_gen_options.in
Normal file
16
cmake/cross_compile/cross_compiled_disp_gen_options.in
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
# =================================================================
|
||||
#
|
||||
# This file is used to add dependency on option value. If the args
|
||||
# was changes the configure file will be updated. And the dependent
|
||||
# add_custom_command will rerun.
|
||||
#
|
||||
# Otherwise the changing of CMake options will not have affect on
|
||||
# generated file.
|
||||
#
|
||||
# =================================================================
|
||||
|
||||
@_GEN_ARGS_LIST@
|
||||
227
cmake/cross_compile/cross_compiled_func.cmake
Normal file
227
cmake/cross_compile/cross_compiled_func.cmake
Normal file
@@ -0,0 +1,227 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
## list of available instruction sets
|
||||
set(_ARCH_LIST ANY SSE42 AVX AVX2 AVX512F)
|
||||
|
||||
set(_ACCEPTED_ARCHS_ANY "^(ANY)$")
|
||||
set(_ACCEPTED_ARCHS_SSE42 "^(ANY|SSE42)$")
|
||||
set(_ACCEPTED_ARCHS_AVX "^(ANY|SSE42|AVX)$")
|
||||
set(_ACCEPTED_ARCHS_AVX2 "^(ANY|SSE42|AVX|AVX2)$")
|
||||
set(_ACCEPTED_ARCHS_AVX512F "^(ANY|SSE42|AVX|AVX2|AVX512F)$")
|
||||
|
||||
## Arch specific definitions
|
||||
set(_DEFINE_ANY "")
|
||||
set(_DEFINE_SSE42 "-DHAVE_SSE42" ${_DEFINE_ANY})
|
||||
set(_DEFINE_AVX "-DHAVE_AVX" ${_DEFINE_SSE42})
|
||||
set(_DEFINE_AVX2 "-DHAVE_AVX2" ${_DEFINE_AVX})
|
||||
set(_DEFINE_AVX512F "-DHAVE_AVX512F" ${_DEFINE_AVX2})
|
||||
|
||||
## Arch specific compile options
|
||||
ie_avx512_optimization_flags(_FLAGS_AVX512F)
|
||||
ie_avx2_optimization_flags (_FLAGS_AVX2)
|
||||
ie_sse42_optimization_flags (_FLAGS_SSE42)
|
||||
set(_FLAGS_AVX "") ## TBD is not defined for IE project yet
|
||||
set(_FLAGS_ANY "") ##
|
||||
|
||||
## way to duplicate file via cmake tool set
|
||||
if (UNIX)
|
||||
## Clone sources via sym link because it allow to modify original file in IDE along with debug
|
||||
set(TO_DUPLICATE create_symlink)
|
||||
else()
|
||||
## Windows and others - just copy
|
||||
set(TO_DUPLICATE copy)
|
||||
endif()
|
||||
|
||||
set(DISPATCHER_GEN_SCRIPT ${CMAKE_CURRENT_LIST_DIR}/cross_compiled_disp_gen.cmake)
|
||||
set(DISPATCHER_GEN_OPTIONS_HOLDER ${CMAKE_CURRENT_LIST_DIR}/cross_compiled_disp_gen_options.in)
|
||||
|
||||
|
||||
#######################################
|
||||
#
|
||||
# Allow to enable multiple cross compilation of source file inside one module
|
||||
# with keeping requirements on minimal instruction set. The CPU check performed
|
||||
# in runtime via common utils declared in "ie_system_conf.h".
|
||||
#
|
||||
# Usage example:
|
||||
# cross_compiled_file(<target>
|
||||
# ARCH
|
||||
# ANY <source_file>
|
||||
# SSE SSE42 <source_file>
|
||||
# AVX AVX2 <source_file>
|
||||
# AVX512F <source_file>
|
||||
# API <header_file>
|
||||
# NAMESPACE <namespace> # like "IE::Ext::CPU::XARCH"
|
||||
# NAME <function_name> # like "my_fun"
|
||||
# )
|
||||
#
|
||||
function(cross_compiled_file TARGET)
|
||||
set(oneValueArgs API ## Header with declaration of cross compiled function
|
||||
NAMESPACE ## The namespace where cross compiled function was declared
|
||||
NAME) ## String with function signature to make cross compiled
|
||||
set(multiValueArgs ARCH) ## List of architecture described in _ARCH_LIST
|
||||
cmake_parse_arguments(X "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
## verification
|
||||
if(X_UNPARSED_ARGUMENTS)
|
||||
message(FATAL_ERROR "Unknown argument: " ${X_UNPARSED_ARGUMENTS})
|
||||
endif()
|
||||
if((NOT TARGET) OR (NOT X_NAME) OR (NOT X_NAMESPACE) OR (NOT X_API) OR (NOT X_ARCH))
|
||||
message(FATAL_ERROR "Missed arguments")
|
||||
endif()
|
||||
|
||||
_currently_requested_top_arch(TOP_ARCH)
|
||||
set(_CURRENT_ARCH_FILTER "${_ACCEPTED_ARCHS_${TOP_ARCH}}")
|
||||
|
||||
## format: ARCH1 ARCH2 <src1> ARCH3 <src2> ...
|
||||
foreach(_it ${X_ARCH})
|
||||
if (_it IN_LIST _ARCH_LIST)
|
||||
## that is arch ID
|
||||
set(_arch ${_it})
|
||||
if(_arch MATCHES ${_CURRENT_ARCH_FILTER})
|
||||
list(APPEND _CUR_ARCH_SET ${_arch})
|
||||
list(APPEND _FULL_ARCH_SET ${_arch})
|
||||
endif()
|
||||
else()
|
||||
## that is source file name
|
||||
set(_src_name ${_it})
|
||||
_remove_source_from_target(${TARGET} ${_src_name})
|
||||
_clone_source_to_target(${TARGET} ${_src_name} "${_CUR_ARCH_SET}")
|
||||
set(_CUR_ARCH_SET "")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
_add_dispatcher_to_target(${TARGET} ${X_API} ${X_NAME} "${X_NAMESPACE}" "${_FULL_ARCH_SET}")
|
||||
endfunction()
|
||||
|
||||
|
||||
##########################################
|
||||
#
|
||||
# Add source multiple time per each element in ARCH_SET.
|
||||
# Also provide corresponding arch specific flags and defines.
|
||||
#
|
||||
function(_clone_source_to_target TARGET SOURCE ARCH_SET)
|
||||
foreach(_arch ${ARCH_SET})
|
||||
set(_arch_dir cross-compiled/${_arch})
|
||||
|
||||
get_filename_component(ARCH_NAME ${SOURCE} NAME)
|
||||
get_filename_component(ARCH_INCLUDE_DIR ${SOURCE} DIRECTORY)
|
||||
set(ARCH_SOURCE "${_arch_dir}/${ARCH_NAME}")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${ARCH_SOURCE}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory
|
||||
${CMAKE_CURRENT_BINARY_DIR}/${_arch_dir}
|
||||
COMMAND ${CMAKE_COMMAND} -E ${TO_DUPLICATE}
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/${ARCH_SOURCE}
|
||||
DEPENDS ${SOURCE}
|
||||
)
|
||||
|
||||
set(_ARCH_SPECIFIC_FLAGS
|
||||
${_DEFINE_${_arch}}
|
||||
${_FLAGS_${_arch}}
|
||||
"-DXARCH=${_arch}" ## to replace XARCH with direct ARCH name
|
||||
"-I${CMAKE_CURRENT_SOURCE_DIR}/${ARCH_INCLUDE_DIR}" ## To make valid #include "some.hpp"
|
||||
)
|
||||
|
||||
_add_source_compile_flags(${ARCH_SOURCE} ${_ARCH_SPECIFIC_FLAGS})
|
||||
|
||||
list(APPEND _ARCH_SOURCES ${ARCH_SOURCE})
|
||||
endforeach()
|
||||
|
||||
_add_source_to_target(${TARGET} ${_ARCH_SOURCES})
|
||||
endfunction()
|
||||
|
||||
|
||||
##########################################
|
||||
#
|
||||
# Generate dispatcher for provided function
|
||||
# for archs in ARCH_SET.
|
||||
#
|
||||
function(_add_dispatcher_to_target TARGET HEADER FUNC_NAME NAMESPACE ARCH_SET)
|
||||
get_filename_component(DISPATCHER_NAME ${HEADER} NAME_WE)
|
||||
get_filename_component(DISPATCHER_INCLUDE_DIR ${HEADER} DIRECTORY)
|
||||
set(DISPATCHER_SOURCE "cross-compiled/${DISPATCHER_NAME}_disp.cpp")
|
||||
set(DISPATCHER_OPT_HOLDER "cross-compiled/${DISPATCHER_NAME}_holder.txt")
|
||||
|
||||
set(_GEN_ARGS_LIST
|
||||
-DXARCH_FUNC_NAME="${X_NAME}"
|
||||
-DXARCH_NAMESPACES="${NAMESPACE}"
|
||||
-DXARCH_API_HEADER="${CMAKE_CURRENT_SOURCE_DIR}/${HEADER}"
|
||||
-DXARCH_DISP_FILE="${CMAKE_CURRENT_BINARY_DIR}/${DISPATCHER_SOURCE}"
|
||||
-DXARCH_SET="${ARCH_SET}"
|
||||
)
|
||||
configure_file(${DISPATCHER_GEN_OPTIONS_HOLDER} ${DISPATCHER_OPT_HOLDER})
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${DISPATCHER_SOURCE}
|
||||
COMMAND ${CMAKE_COMMAND} ${_GEN_ARGS_LIST}
|
||||
-P ${DISPATCHER_GEN_SCRIPT}
|
||||
DEPENDS ${HEADER}
|
||||
${DISPATCHER_GEN_SCRIPT}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/${DISPATCHER_OPT_HOLDER} ## Just to make run dependency on args value
|
||||
)
|
||||
|
||||
_add_source_compile_flags(${DISPATCHER_SOURCE} "-I${DISPATCHER_INCLUDE_DIR}")
|
||||
_add_source_to_target(${TARGET} ${DISPATCHER_SOURCE})
|
||||
endfunction()
|
||||
|
||||
#######################################
|
||||
#
|
||||
# Return currently requested ARCH id
|
||||
#
|
||||
function(_currently_requested_top_arch VAR)
|
||||
if(ENABLE_AVX512F)
|
||||
set(RES AVX512F)
|
||||
elseif(ENABLE_AVX2)
|
||||
set(RES AVX2)
|
||||
elseif(ENABLE_SSE42)
|
||||
set(RES SSE42)
|
||||
else()
|
||||
set(RES ANY)
|
||||
endif()
|
||||
set (${VAR} "${RES}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
#####################################
|
||||
#
|
||||
# Utils to handle with cmake target
|
||||
#
|
||||
function(_remove_source_from_target TARGET SOURCE_FILE)
|
||||
get_target_property(ORIGINAL_SOURCES ${TARGET} SOURCES)
|
||||
|
||||
## To match by file name only. The path is any.
|
||||
list(FILTER ORIGINAL_SOURCES EXCLUDE REGEX ".*${SOURCE_FILE}$")
|
||||
|
||||
set_target_properties(${TARGET}
|
||||
PROPERTIES
|
||||
SOURCES "${ORIGINAL_SOURCES}")
|
||||
endfunction()
|
||||
|
||||
function(_add_source_to_target TARGET)
|
||||
get_target_property(ORIGINAL_SOURCES ${TARGET} SOURCES)
|
||||
|
||||
list(APPEND ORIGINAL_SOURCES ${ARGN})
|
||||
|
||||
set_target_properties(${TARGET}
|
||||
PROPERTIES
|
||||
SOURCES "${ORIGINAL_SOURCES}")
|
||||
endfunction()
|
||||
|
||||
function(_add_source_compile_flags SOURCE)
|
||||
get_source_file_property(ORIGINAL_FLAGS ${SOURCE} COMPILE_FLAGS)
|
||||
|
||||
## Empty list of COMPILE_FLAGS represented as NOTFOUND
|
||||
if(NOT ORIGINAL_FLAGS)
|
||||
set(ORIGINAL_FLAGS "")
|
||||
endif()
|
||||
|
||||
string(REPLACE ";" " " NEW_FLAGS "${ARGN}")
|
||||
string(APPEND ORIGINAL_FLAGS " " ${NEW_FLAGS})
|
||||
|
||||
set_source_files_properties(${SOURCE}
|
||||
PROPERTIES
|
||||
COMPILE_FLAGS "${ORIGINAL_FLAGS}")
|
||||
endfunction()
|
||||
@@ -2,7 +2,10 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(CMAKE_MODULE_PATH "${OpenVINO_MAIN_SOURCE_DIR}/cmake/download" ${CMAKE_MODULE_PATH})
|
||||
list(APPEND CMAKE_MODULE_PATH
|
||||
"${OpenVINO_MAIN_SOURCE_DIR}/cmake/download"
|
||||
"${OpenVINO_MAIN_SOURCE_DIR}/cmake/cross_compile"
|
||||
)
|
||||
|
||||
include(CPackComponent)
|
||||
unset(IE_CPACK_COMPONENTS_ALL CACHE)
|
||||
@@ -36,9 +39,13 @@ function(ie_cpack_set_library_dir)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(IE_CPACK_LIBRARY_PATH ${IE_CPACK_IE_DIR}/lib/${CMAKE_BUILD_TYPE}/${ARCH} PARENT_SCOPE)
|
||||
set(IE_CPACK_LIBRARY_PATH ${IE_CPACK_IE_DIR}/lib/${ARCH}/${CMAKE_BUILD_TYPE} PARENT_SCOPE)
|
||||
set(IE_CPACK_RUNTIME_PATH ${IE_CPACK_IE_DIR}/bin/${ARCH}/${CMAKE_BUILD_TYPE} PARENT_SCOPE)
|
||||
set(IE_CPACK_ARCHIVE_PATH ${IE_CPACK_IE_DIR}/lib/${ARCH}/${CMAKE_BUILD_TYPE} PARENT_SCOPE)
|
||||
else()
|
||||
set(IE_CPACK_LIBRARY_PATH ${IE_CPACK_IE_DIR}/lib/${ARCH} PARENT_SCOPE)
|
||||
set(IE_CPACK_RUNTIME_PATH ${IE_CPACK_IE_DIR}/lib/${ARCH} PARENT_SCOPE)
|
||||
set(IE_CPACK_ARCHIVE_PATH ${IE_CPACK_IE_DIR}/lib/${ARCH} PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
@@ -57,6 +64,7 @@ endmacro()
|
||||
|
||||
macro(ie_cpack)
|
||||
set(CPACK_GENERATOR "TGZ")
|
||||
string(REPLACE "/" "_" CPACK_PACKAGE_VERSION "${CI_BUILD_NUMBER}")
|
||||
if(WIN32)
|
||||
set(CPACK_PACKAGE_NAME inference-engine_${CMAKE_BUILD_TYPE})
|
||||
else()
|
||||
@@ -137,7 +145,10 @@ if("${CMAKE_BUILD_TYPE}" STREQUAL "")
|
||||
set(CMAKE_BUILD_TYPE "Release")
|
||||
endif()
|
||||
|
||||
set(OUTPUT_ROOT ${OpenVINO_MAIN_SOURCE_DIR})
|
||||
# allow to override default OUTPUT_ROOT root
|
||||
if(NOT DEFINED OUTPUT_ROOT)
|
||||
set(OUTPUT_ROOT ${OpenVINO_MAIN_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
# Enable postfixes for Debug/Release builds
|
||||
set(IE_DEBUG_POSTFIX_WIN "d")
|
||||
@@ -161,8 +172,8 @@ endif()
|
||||
set(CMAKE_DEBUG_POSTFIX ${IE_DEBUG_POSTFIX})
|
||||
set(CMAKE_RELEASE_POSTFIX ${IE_RELEASE_POSTFIX})
|
||||
|
||||
if (WIN32)
|
||||
# Support CMake multiconfiguration for Visual Studio build
|
||||
if (WIN32 OR CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
# Support CMake multiconfiguration for Visual Studio or Xcode build
|
||||
set(IE_BUILD_POSTFIX $<$<CONFIG:Debug>:${IE_DEBUG_POSTFIX}>$<$<CONFIG:Release>:${IE_RELEASE_POSTFIX}>)
|
||||
else ()
|
||||
if (${CMAKE_BUILD_TYPE} STREQUAL "Debug" )
|
||||
@@ -176,10 +187,6 @@ message(STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
add_definitions(-DIE_BUILD_POSTFIX=\"${IE_BUILD_POSTFIX}\")
|
||||
|
||||
if(NOT UNIX)
|
||||
if (WIN32)
|
||||
# set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /MT")
|
||||
# set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /MTd")
|
||||
endif()
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
|
||||
set(CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
|
||||
@@ -194,15 +201,22 @@ else()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
set(CMAKE_MACOSX_RPATH 1)
|
||||
endif(APPLE)
|
||||
# WA for Xcode generator + object libraries issue:
|
||||
# https://gitlab.kitware.com/cmake/cmake/issues/20260
|
||||
# http://cmake.3232098.n2.nabble.com/XCODE-DEPEND-HELPER-make-Deletes-Targets-Before-and-While-They-re-Built-td7598277.html
|
||||
set(CMAKE_XCODE_GENERATE_TOP_LEVEL_PROJECT_ONLY ON)
|
||||
set(CMAKE_MACOSX_RPATH ON)
|
||||
endif()
|
||||
|
||||
# Use solution folders
|
||||
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
|
||||
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0054 NEW)
|
||||
|
||||
include(sdl)
|
||||
include(os_flags NO_POLICY_SCOPE)
|
||||
include(os_flags)
|
||||
include(sanitizer)
|
||||
include(cross_compiled_func)
|
||||
|
||||
function(set_ci_build_number)
|
||||
set(OpenVINO_MAIN_SOURCE_DIR "${CMAKE_SOURCE_DIR}")
|
||||
|
||||
@@ -138,6 +138,14 @@ function (RESOLVE_DEPENDENCY NAME_OF_CMAKE_VAR)
|
||||
|
||||
endfunction(RESOLVE_DEPENDENCY)
|
||||
|
||||
function (resolve_model_dependency network archive network_model_path)
|
||||
RESOLVE_DEPENDENCY(${network_model_path}
|
||||
ARCHIVE "models_archives/${archive}"
|
||||
TARGET_PATH "${MODELS_PATH}/${network}")
|
||||
string (REPLACE ${MODELS_PATH} "" relative_path ${${network_model_path}})
|
||||
set(${network_model_path} ".${relative_path}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(reset_deps_cache)
|
||||
#
|
||||
# Reset the dependencies cache if it was set by dependency solver
|
||||
|
||||
@@ -7,7 +7,7 @@ include ("download_and_check")
|
||||
|
||||
function (GetNameAndUrlToDownload name url archive_name_unified archive_name_win archive_name_lin archive_name_mac archive_name_android)
|
||||
if (archive_name_unified)
|
||||
set (${url} "${archive_name_unified}" PARENT_SCOPE)
|
||||
set (${url} "thirdparty/unified/${archive_name_unified}" PARENT_SCOPE)
|
||||
set (${name} ${archive_name_unified} PARENT_SCOPE)
|
||||
else()
|
||||
if(archive_name_lin)
|
||||
@@ -27,7 +27,7 @@ function (GetNameAndUrlToDownload name url archive_name_unified archive_name_win
|
||||
endif()
|
||||
|
||||
set (${name} ${archive_name} PARENT_SCOPE)
|
||||
set (${url} "${archive_name}" PARENT_SCOPE)
|
||||
set (${url} "thirdparty/${PLATFORM_FOLDER}/${archive_name}" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction(GetNameAndUrlToDownload)
|
||||
|
||||
@@ -151,10 +151,12 @@ function (CheckOrDownloadAndExtract component RELATIVE_URL archive_name unpacked
|
||||
set (status "ON")
|
||||
set (on_master FALSE)
|
||||
|
||||
if(DEFINED ENV{IE_PATH_TO_DEPS})
|
||||
if(DEFINED IE_PATH_TO_DEPS)
|
||||
set(URL "${IE_PATH_TO_DEPS}/${RELATIVE_URL}")
|
||||
elseif(DEFINED ENV{IE_PATH_TO_DEPS})
|
||||
set(URL "$ENV{IE_PATH_TO_DEPS}/${RELATIVE_URL}")
|
||||
else()
|
||||
set(URL "https://download.01.org/opencv/2020/openvinotoolkit/2020.2/inference_engine/${RELATIVE_URL}")
|
||||
set(URL "https://download.01.org/opencv/master/openvinotoolkit/${RELATIVE_URL}")
|
||||
endif()
|
||||
|
||||
#no message on recursive calls
|
||||
|
||||
@@ -12,6 +12,9 @@ if(X86_64)
|
||||
else()
|
||||
set(ENABLE_MKL_DNN_DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
ie_option (ENABLE_TESTS "unit, behavior and functional tests" OFF)
|
||||
|
||||
ie_option (ENABLE_MKL_DNN "MKL-DNN plugin for inference engine" ${ENABLE_MKL_DNN_DEFAULT})
|
||||
|
||||
ie_dependent_option (ENABLE_CLDNN "clDnn based plugin for inference engine" ON "WIN32 OR X86_64;NOT APPLE;NOT MINGW" OFF)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
function(enable_fuzzing)
|
||||
# Enable (libFuzzer)[https://llvm.org/docs/LibFuzzer.html] if supported.
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND NOT WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$" AND NOT WIN32)
|
||||
# Communicate libfuzzer is enabled
|
||||
set(WITH_LIBFUZZER ON PARENT_SCOPE)
|
||||
add_compile_definitions(WITH_LIBFUZZER)
|
||||
@@ -2,19 +2,21 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
#
|
||||
# Disables deprecated warnings generation
|
||||
# Defines ie_c_cxx_deprecated varaible which contains C / C++ compiler flags
|
||||
#
|
||||
macro(disable_deprecated_warnings)
|
||||
if(WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(ie_c_cxx_deprecated "/Qdiag-disable:1478,1786")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(ie_c_cxx_deprecated "/wd4996")
|
||||
endif()
|
||||
else()
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(ie_c_cxx_deprecated "-diag-disable=1478,1786")
|
||||
else()
|
||||
set(ie_c_cxx_deprecated "-Wno-deprecated-declarations")
|
||||
@@ -35,13 +37,13 @@ endmacro()
|
||||
#
|
||||
macro(ie_deprecated_no_errors)
|
||||
if(WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(ie_c_cxx_deprecated "/Qdiag-warning:1478,1786")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(ie_c_cxx_deprecated "/wd4996")
|
||||
endif()
|
||||
else()
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(ie_c_cxx_deprecated_no_errors "-diag-warning=1478,1786")
|
||||
else()
|
||||
set(ie_c_cxx_deprecated_no_errors "-Wno-error=deprecated-declarations")
|
||||
@@ -61,15 +63,15 @@ endmacro()
|
||||
#
|
||||
function(ie_sse42_optimization_flags flags)
|
||||
if(WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# No such option for MSVC 2019
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "/arch:SSE4.2 /QxSSE4.2" PARENT_SCOPE)
|
||||
else()
|
||||
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif()
|
||||
else()
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "-msse4.2 -xSSE4.2" PARENT_SCOPE)
|
||||
else()
|
||||
set(${flags} "-msse4.2" PARENT_SCOPE)
|
||||
@@ -82,15 +84,15 @@ endfunction()
|
||||
#
|
||||
function(ie_avx2_optimization_flags flags)
|
||||
if(WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "/QxCORE-AVX2" PARENT_SCOPE)
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(${flags} "/arch:AVX2" PARENT_SCOPE)
|
||||
else()
|
||||
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif()
|
||||
else()
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "-march=core-avx2 -xCORE-AVX2 -mtune=core-avx2" PARENT_SCOPE)
|
||||
else()
|
||||
set(${flags} "-mavx2 -mfma" PARENT_SCOPE)
|
||||
@@ -104,18 +106,21 @@ endfunction()
|
||||
#
|
||||
function(ie_avx512_optimization_flags flags)
|
||||
if(WIN32)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "/QxCOMMON-AVX512" PARENT_SCOPE)
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(${flags} "/arch:AVX512" PARENT_SCOPE)
|
||||
else()
|
||||
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif()
|
||||
else()
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set(${flags} "-xCOMMON-AVX512" PARENT_SCOPE)
|
||||
endif()
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL GNU)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(${flags} "-mavx512f -mfma" PARENT_SCOPE)
|
||||
endif()
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Clang|AppleClang)$")
|
||||
set(${flags} "-mavx512f -mfma" PARENT_SCOPE)
|
||||
endif()
|
||||
endif()
|
||||
@@ -125,7 +130,22 @@ endfunction()
|
||||
# Enables Link Time Optimization compilation
|
||||
#
|
||||
macro(ie_enable_lto)
|
||||
if(UNIX)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel" AND OFF)
|
||||
ProcessorCount(N)
|
||||
if(UNIX)
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -ipo")
|
||||
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -ipo")
|
||||
set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} -ipo-jobs${N}")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} -ipo-jobs${N}")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} -ipo-jobs${N}")
|
||||
else()
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /Qipo")
|
||||
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /Qipo")
|
||||
set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} /Qipo-jobs:${N}")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /Qipo-jobs:${N}")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} /Qipo-jobs:${N}")
|
||||
endif()
|
||||
elseif(UNIX)
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -flto")
|
||||
# LTO causes issues with gcc 4.8.5 during cmake pthread check
|
||||
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.9)
|
||||
@@ -137,14 +157,12 @@ macro(ie_enable_lto)
|
||||
set(CMAKE_AR "gcc-ar")
|
||||
set(CMAKE_RANLIB "gcc-ranlib")
|
||||
endif()
|
||||
elseif(WIN32)
|
||||
if(CMAKE_BUILD_TYPE STREQUAL Release)
|
||||
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GL")
|
||||
# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /GL")
|
||||
# set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LTCG:STATUS")
|
||||
# set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /LTCG:STATUS")
|
||||
# set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /LTCG:STATUS")
|
||||
endif()
|
||||
elseif(MSVC AND OFF)
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /GL")
|
||||
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /GL")
|
||||
set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} /LTCG:STATUS")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /LTCG:STATUS")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} /LTCG:STATUS")
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
@@ -167,7 +185,7 @@ set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
|
||||
# to allows to override CMAKE_CXX_STANDARD from command line
|
||||
if(NOT DEFINED CMAKE_CXX_STANDARD)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(CMAKE_CXX_STANDARD 14)
|
||||
else()
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
@@ -176,7 +194,7 @@ if(NOT DEFINED CMAKE_CXX_STANDARD)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
endif()
|
||||
|
||||
if(COVERAGE)
|
||||
if(ENABLE_COVERAGE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --coverage")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage")
|
||||
@@ -198,10 +216,10 @@ if(WIN32)
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE")
|
||||
|
||||
if (TREAT_WARNING_AS_ERROR)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
ie_add_compiler_flags(/WX)
|
||||
ie_add_compiler_flags(/Qdiag-warning:47,1740,1786)
|
||||
elseif (CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# ie_add_compiler_flags(/WX) # Too many warnings
|
||||
endif()
|
||||
endif()
|
||||
@@ -212,43 +230,30 @@ if(WIN32)
|
||||
|
||||
# Disable noisy warnings
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# C4251 needs to have dll-interface to be used by clients of class
|
||||
ie_add_compiler_flags(/wd4251)
|
||||
# C4275 non dll-interface class used as base for dll-interface class
|
||||
ie_add_compiler_flags(/wd4275)
|
||||
endif()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
# 161 unrecognized pragma
|
||||
# 177 variable was declared but never referenced
|
||||
# 556 not matched type of assigned function pointer
|
||||
# 1744: field of class type without a DLL interface used in a class with a DLL interface
|
||||
# 2586 decorated name length exceeded, name was truncated
|
||||
# 2651: attribute does not apply to any entity
|
||||
# 3180 unrecognized OpenMP pragma
|
||||
# 11075: To get full report use -Qopt-report:4 -Qopt-report-phase ipo
|
||||
# 15335 was not vectorized: vectorization possible but seems inefficient. Use vector always directive or /Qvec-threshold0 to override
|
||||
ie_add_compiler_flags(/Qdiag-disable:161,177,556,2586,2651,3180,11075,15335)
|
||||
ie_add_compiler_flags(/Qdiag-disable:161,177,556,1744,2586,2651,3180,11075,15335)
|
||||
endif()
|
||||
|
||||
# Debug information flags
|
||||
|
||||
set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /Z7")
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /Z7")
|
||||
|
||||
if(ENABLE_DEBUG_SYMBOLS)
|
||||
ie_add_compiler_flags(/Z7)
|
||||
|
||||
set(DEBUG_SYMBOLS_LINKER_FLAGS "/DEBUG")
|
||||
if (CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
# Keep default /OPT values. See /DEBUG reference for details.
|
||||
set(DEBUG_SYMBOLS_LINKER_FLAGS "${DEBUG_SYMBOLS_LINKER_FLAGS} /OPT:REF /OPT:ICF")
|
||||
endif()
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${DEBUG_SYMBOLS_LINKER_FLAGS}")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${DEBUG_SYMBOLS_LINKER_FLAGS}")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${DEBUG_SYMBOLS_LINKER_FLAGS}")
|
||||
endif()
|
||||
else()
|
||||
# TODO: enable for C sources as well
|
||||
# ie_add_compiler_flags(-Werror)
|
||||
@@ -275,6 +280,8 @@ else()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
ie_add_compiler_flags(-diag-disable=remark)
|
||||
# noisy warnings from Intel Compiler 19.1.1.217 20200306
|
||||
ie_add_compiler_flags(-diag-disable=2196)
|
||||
endif()
|
||||
|
||||
# Linker flags
|
||||
|
||||
@@ -14,6 +14,8 @@ if (ENABLE_SANITIZER)
|
||||
set(SANITIZER_LINKER_FLAGS "-fsanitize=address")
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(SANITIZER_LINKER_FLAGS "${SANITIZER_LINKER_FLAGS} -fuse-ld=gold")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$" AND NOT WIN32)
|
||||
set(SANITIZER_LINKER_FLAGS "${SANITIZER_LINKER_FLAGS} -fuse-ld=lld")
|
||||
endif()
|
||||
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SANITIZER_COMPILER_FLAGS}")
|
||||
@@ -24,10 +26,15 @@ if (ENABLE_SANITIZER)
|
||||
endif()
|
||||
|
||||
if (ENABLE_THREAD_SANITIZER)
|
||||
set(SANITIZER_COMPILER_FLAGS "-g -fsanitize=thread")
|
||||
|
||||
set(SANITIZER_COMPILER_FLAGS "-g -fsanitize=thread -fno-omit-frame-pointer")
|
||||
set(SANITIZER_LINKER_FLAGS "-fsanitize=thread")
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$" AND NOT WIN32)
|
||||
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 8.0)
|
||||
set(SANITIZER_LINKER_FLAGS "${SANITIZER_LINKER_FLAGS} -fuse-ld=lld")
|
||||
else()
|
||||
set(SANITIZER_LINKER_FLAGS "${SANITIZER_LINKER_FLAGS} -static-libsan")
|
||||
endif()
|
||||
endif()
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SANITIZER_COMPILER_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SANITIZER_COMPILER_FLAGS}")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${SANITIZER_LINKER_FLAGS}")
|
||||
|
||||
@@ -25,7 +25,7 @@ if (CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
if (NOT ENABLE_SANITIZER)
|
||||
set(IE_C_CXX_FLAGS "${IE_C_CXX_FLAGS} -s")
|
||||
endif()
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$")
|
||||
set(IE_C_CXX_FLAGS "${IE_C_CXX_FLAGS} -fstack-protector-all")
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
if (NOT ENABLE_SANITIZER)
|
||||
@@ -36,8 +36,8 @@ if (CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} -z noexecstack -z relro -z now")
|
||||
set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} -z noexecstack -z relro -z now")
|
||||
endif()
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
|
||||
set(IE_C_CXX_FLAGS "${IE_C_CXX_FLAGS} /sdl")
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
set(IE_C_CXX_FLAGS "${IE_C_CXX_FLAGS} /sdl /guard:cf")
|
||||
endif()
|
||||
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${IE_C_CXX_FLAGS}")
|
||||
|
||||
@@ -7,7 +7,7 @@ if(CMAKE_CL_64)
|
||||
set(MSVC64 ON)
|
||||
endif()
|
||||
|
||||
if(WIN32 AND CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
if(WIN32 AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -dumpmachine
|
||||
OUTPUT_VARIABLE OPENVINO_GCC_TARGET_MACHINE
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
|
||||
60
docs/CMakeLists.txt
Normal file
60
docs/CMakeLists.txt
Normal file
@@ -0,0 +1,60 @@
|
||||
# Copyright (C) 2018-2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
add_subdirectory(examples)
|
||||
|
||||
# Detect nGraph
|
||||
find_package(ngraph QUIET)
|
||||
if(NOT ngraph_FOUND)
|
||||
set(ngraph_DIR ${CMAKE_BINARY_DIR}/ngraph)
|
||||
endif()
|
||||
|
||||
# Detect InferenceEngine
|
||||
find_package(InferenceEngine QUIET)
|
||||
if(NOT InferenceEngine_FOUND)
|
||||
set(InferenceEngine_DIR ${CMAKE_BINARY_DIR})
|
||||
endif()
|
||||
|
||||
add_subdirectory(template_extension)
|
||||
|
||||
set(all_docs_targets
|
||||
ie_docs_examples
|
||||
template_extension
|
||||
templatePlugin TemplateBehaviorTests TemplateFunctionalTests)
|
||||
foreach(target_name IN LISTS all_docs_targets)
|
||||
if (TARGET ${target_name})
|
||||
set_target_properties(${target_name} PROPERTIES FOLDER docs)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# OpenVINO docs
|
||||
|
||||
set(OPENVINO_DOCS_PATH "" CACHE PATH "Path to openvino-documentation local repository")
|
||||
set(args "")
|
||||
|
||||
if(OPENVINO_DOCS_PATH)
|
||||
set(args "${args} ovinodoc_path:${OPENVINO_DOCS_PATH}")
|
||||
endif()
|
||||
|
||||
file(GLOB_RECURSE docs_files "${OpenVINO_MAIN_SOURCE_DIR}/docs")
|
||||
file(GLOB_RECURSE include_files "${OpenVINO_MAIN_SOURCE_DIR}/inference-engine/include")
|
||||
file(GLOB_RECURSE ovino_files "${OPENVINO_DOCS_PATH}")
|
||||
|
||||
add_custom_target(ie_docs
|
||||
COMMAND ./build_docs.sh ${args}
|
||||
WORKING_DIRECTORY "${OpenVINO_MAIN_SOURCE_DIR}/docs/build_documentation"
|
||||
COMMENT "Generating OpenVINO documentation"
|
||||
SOURCES ${docs_files} ${include_files} ${ovino_files}
|
||||
VERBATIM)
|
||||
set_target_properties(ie_docs PROPERTIES FOLDER docs)
|
||||
|
||||
find_program(browser NAMES xdg-open)
|
||||
if(browser)
|
||||
add_custom_target(ie_docs_open
|
||||
COMMAND ${browser} "${OpenVINO_MAIN_SOURCE_DIR}/doc/html/index.html"
|
||||
DEPENDS ie_docs
|
||||
COMMENT "Open OpenVINO documentation"
|
||||
VERBATIM)
|
||||
set_target_properties(ie_docs_open PROPERTIES FOLDER docs)
|
||||
endif()
|
||||
13
docs/examples/CMakeLists.txt
Normal file
13
docs/examples/CMakeLists.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (C) 2018-2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME ie_docs_examples)
|
||||
|
||||
file(GLOB SOURCES *.cpp)
|
||||
|
||||
add_library(ie_docs_examples STATIC ${SOURCES})
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE inference_engine_plugin_api)
|
||||
|
||||
#add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
68
docs/examples/example_async_infer_request.cpp
Normal file
68
docs/examples/example_async_infer_request.cpp
Normal file
@@ -0,0 +1,68 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <threading/ie_itask_executor.hpp>
|
||||
#include <cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp>
|
||||
#include <memory>
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
class AcceleratorSyncRequest : public InferRequestInternal {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<AcceleratorSyncRequest>;
|
||||
|
||||
void Preprocess();
|
||||
void WriteToDevice();
|
||||
void RunOnDevice();
|
||||
void ReadFromDevice();
|
||||
void PostProcess();
|
||||
};
|
||||
|
||||
// ! [async_infer_request:define_pipeline]
|
||||
// Inherits from AsyncInferRequestThreadSafeDefault
|
||||
class AcceleratorAsyncInferRequest : public AsyncInferRequestThreadSafeDefault {
|
||||
// Store the pointer to the synchronous request and five executors
|
||||
AcceleratorAsyncInferRequest(const AcceleratorSyncRequest::Ptr& syncRequest,
|
||||
const ITaskExecutor::Ptr& preprocessExecutor,
|
||||
const ITaskExecutor::Ptr& writeToDeviceExecutor,
|
||||
const ITaskExecutor::Ptr& runOnDeviceExecutor,
|
||||
const ITaskExecutor::Ptr& readFromDeviceExecutor,
|
||||
const ITaskExecutor::Ptr& postProcessExecutor) :
|
||||
AsyncInferRequestThreadSafeDefault(syncRequest, nullptr, nullptr),
|
||||
_accSyncRequest{syncRequest},
|
||||
_preprocessExecutor{preprocessExecutor},
|
||||
_writeToDeviceExecutor{writeToDeviceExecutor},
|
||||
_runOnDeviceExecutor{runOnDeviceExecutor},
|
||||
_readFromDeviceExecutor{readFromDeviceExecutor},
|
||||
_postProcessExecutor{postProcessExecutor}
|
||||
{
|
||||
// Five pipeline stages of synchronous infer request are run by different executors
|
||||
_pipeline = {
|
||||
{ _preprocessExecutor , [this] {
|
||||
_accSyncRequest->Preprocess();
|
||||
}},
|
||||
{ _writeToDeviceExecutor , [this] {
|
||||
_accSyncRequest->WriteToDevice();
|
||||
}},
|
||||
{ _runOnDeviceExecutor , [this] {
|
||||
_accSyncRequest->RunOnDevice();
|
||||
}},
|
||||
{ _readFromDeviceExecutor , [this] {
|
||||
_accSyncRequest->ReadFromDevice();
|
||||
}},
|
||||
{ _postProcessExecutor , [this] {
|
||||
_accSyncRequest->PostProcess();
|
||||
}},
|
||||
};
|
||||
}
|
||||
|
||||
// As all stages use _accSyncRequest member we should wait for all stages tasks before the destructor destroy this member.
|
||||
~AcceleratorAsyncInferRequest() {
|
||||
StopAndWait();
|
||||
}
|
||||
|
||||
AcceleratorSyncRequest::Ptr _accSyncRequest;
|
||||
ITaskExecutor::Ptr _preprocessExecutor, _writeToDeviceExecutor, _runOnDeviceExecutor, _readFromDeviceExecutor, _postProcessExecutor;
|
||||
};
|
||||
// ! [async_infer_request:define_pipeline]
|
||||
53
docs/examples/example_itask_executor.cpp
Normal file
53
docs/examples/example_itask_executor.cpp
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <threading/ie_cpu_streams_executor.hpp>
|
||||
|
||||
#include <memory>
|
||||
#include <future>
|
||||
#include <iostream>
|
||||
|
||||
void example1() {
|
||||
// ! [itask_executor:define_pipeline]
|
||||
// std::promise is move only object so to satisfy copy callable constraint we use std::shared_ptr
|
||||
auto promise = std::make_shared<std::promise<void>>();
|
||||
// When the promise is created we can get std::future to wait the result
|
||||
auto future = promise->get_future();
|
||||
// Rather simple task
|
||||
InferenceEngine::Task task = [] {std::cout << "Some Output" << std::endl; };
|
||||
// Create an executor
|
||||
InferenceEngine::ITaskExecutor::Ptr taskExecutor = std::make_shared<InferenceEngine::CPUStreamsExecutor>();
|
||||
if (taskExecutor == nullptr) {
|
||||
// ProcessError(e);
|
||||
return;
|
||||
}
|
||||
// We capture the task and the promise. When the task is executed in the task executor context
|
||||
// we munually call std::promise::set_value() method
|
||||
taskExecutor->run([task, promise] {
|
||||
std::exception_ptr currentException;
|
||||
try {
|
||||
task();
|
||||
} catch(...) {
|
||||
// If there is some exceptions store the pointer to current exception
|
||||
currentException = std::current_exception();
|
||||
}
|
||||
|
||||
if (nullptr == currentException) {
|
||||
promise->set_value(); // <-- If there is no problems just call std::promise::set_value()
|
||||
} else {
|
||||
promise->set_exception(currentException); // <-- If there is an exception forward it to std::future object
|
||||
}
|
||||
});
|
||||
// To wait the task completion we call std::future::wait method
|
||||
future.wait(); // The current thread will be blocked here and wait when std::promise::set_value()
|
||||
// or std::promise::set_exception() method will be called.
|
||||
|
||||
// If the future store the exception it will be rethrown in std::future::get method
|
||||
try {
|
||||
future.get();
|
||||
} catch(std::exception& /*e*/) {
|
||||
// ProcessError(e);
|
||||
}
|
||||
// ! [itask_executor:define_pipeline]
|
||||
}
|
||||
18
docs/template_extension/CMakeLists.txt
Normal file
18
docs/template_extension/CMakeLists.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
# [cmake:extension]
|
||||
set(TARGET_NAME "template_extension")
|
||||
|
||||
find_package(ngraph REQUIRED)
|
||||
find_package(InferenceEngine REQUIRED)
|
||||
|
||||
file(GLOB_RECURSE SRC *.cpp)
|
||||
|
||||
add_library(${TARGET_NAME} SHARED ${SRC})
|
||||
|
||||
target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_EXTENSION_API)
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE ${InferenceEngine_LIBRARIES}
|
||||
${NGRAPH_LIBRARIES})
|
||||
# [cmake:extension]
|
||||
124
docs/template_extension/cpu_kernel.cpp
Normal file
124
docs/template_extension/cpu_kernel.cpp
Normal file
@@ -0,0 +1,124 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
#include "cpu_kernel.hpp"
|
||||
#include "op.hpp"
|
||||
#include <details/ie_exception.hpp>
|
||||
#include <ie_layouts.h>
|
||||
|
||||
using namespace TemplateExtension;
|
||||
|
||||
//! [cpu_implementation:ctor]
|
||||
OpImplementation::OpImplementation(const std::shared_ptr<ngraph::Node> &node) {
|
||||
try {
|
||||
auto castedNode = std::dynamic_pointer_cast<Operation>(node);
|
||||
if (!castedNode)
|
||||
THROW_IE_EXCEPTION << "Cannot create implementation for unknown operation!";
|
||||
if (castedNode->inputs().size() != 1 || castedNode->outputs().size() != 1)
|
||||
THROW_IE_EXCEPTION << "Cannot create implementation for operation with incorrect number of inputs or outputs!";
|
||||
if (castedNode->get_input_partial_shape(0).is_dynamic() || castedNode->get_output_partial_shape(0).is_dynamic())
|
||||
THROW_IE_EXCEPTION << "Cannot create implementation for op with dynamic shapes!";
|
||||
if (castedNode->get_input_shape(0).size() != 4 || castedNode->get_output_shape(0).size() != 4)
|
||||
THROW_IE_EXCEPTION << "Operation supports only 4d tensors for input and output.";
|
||||
if (castedNode->get_input_element_type(0) != ngraph::element::f32 || castedNode->get_output_element_type(0) != ngraph::element::f32)
|
||||
THROW_IE_EXCEPTION << "Operation supports only FP32 tensors.";
|
||||
add = castedNode->getAddAttr();
|
||||
} catch (InferenceEngine::details::InferenceEngineException& ex) {
|
||||
error = ex.what();
|
||||
}
|
||||
}
|
||||
//! [cpu_implementation:ctor]
|
||||
|
||||
//! [cpu_implementation:getSupportedConfigurations]
|
||||
InferenceEngine::StatusCode OpImplementation::getSupportedConfigurations(std::vector<InferenceEngine::LayerConfig> &conf,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept {
|
||||
auto createConfig = [](const InferenceEngine::SizeVector inShape, const InferenceEngine::SizeVector& outShape, bool planar) {
|
||||
InferenceEngine::LayerConfig config;
|
||||
config.dynBatchSupport = false;
|
||||
InferenceEngine::DataConfig inData;
|
||||
InferenceEngine::DataConfig outData;
|
||||
InferenceEngine::SizeVector order = {0, 1, 2, 3};
|
||||
// Allow any offset before data
|
||||
size_t offset((std::numeric_limits<size_t>::max)());
|
||||
if (planar) {
|
||||
inData.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, inShape, {inShape, order, offset});
|
||||
config.inConfs.push_back(inData);
|
||||
outData.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, outShape, {outShape, order, offset});
|
||||
config.outConfs.push_back(outData);
|
||||
} else {
|
||||
// Add blocked (nChw8c) format
|
||||
auto div_up = [](const int a, const int b) -> int {
|
||||
if (!b)
|
||||
return 0;
|
||||
return (a + b - 1) / b;
|
||||
};
|
||||
|
||||
order.push_back(1);
|
||||
InferenceEngine::SizeVector inBlkDims = inShape;
|
||||
inBlkDims[1] = div_up(inBlkDims[1], 8);
|
||||
inBlkDims.push_back(8);
|
||||
InferenceEngine::SizeVector outBlkDims = outShape;
|
||||
outBlkDims[1] = div_up(outBlkDims[1], 8);
|
||||
outBlkDims.push_back(8);
|
||||
inData.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, inShape, {inBlkDims, order, offset});
|
||||
config.inConfs.push_back(inData);
|
||||
outData.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, outShape, {outBlkDims, order, offset});
|
||||
config.outConfs.push_back(outData);
|
||||
}
|
||||
return config;
|
||||
};
|
||||
if (!error.empty()) {
|
||||
if (resp) {
|
||||
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
||||
resp->msg[sizeof(resp->msg)-1] = 0;
|
||||
}
|
||||
return InferenceEngine::GENERAL_ERROR;
|
||||
}
|
||||
// Add planar format
|
||||
conf.emplace_back(createConfig(inShape, outShape, true));
|
||||
// Add blocked format nChw8c
|
||||
conf.emplace_back(createConfig(inShape, outShape, false));
|
||||
return InferenceEngine::OK;
|
||||
}
|
||||
//! [cpu_implementation:getSupportedConfigurations]
|
||||
|
||||
//! [cpu_implementation:init]
|
||||
InferenceEngine::StatusCode OpImplementation::init(InferenceEngine::LayerConfig &config, InferenceEngine::ResponseDesc *resp) noexcept {
|
||||
try {
|
||||
if (config.inConfs.size() != 1 || config.outConfs.size() != 1) {
|
||||
THROW_IE_EXCEPTION << "Operation cannot be initialized with incorrect number of inputs/outputs!";
|
||||
}
|
||||
|
||||
if (config.inConfs[0].desc.getDims().size() != 4 || config.outConfs[0].desc.getDims().size() != 4) {
|
||||
THROW_IE_EXCEPTION << "Operation can be initialized only with 4d input/output tensors!";
|
||||
}
|
||||
|
||||
if (config.outConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32 ||
|
||||
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
|
||||
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
|
||||
}
|
||||
} catch (InferenceEngine::details::InferenceEngineException& ex) {
|
||||
if (resp) {
|
||||
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
||||
resp->msg[sizeof(resp->msg)-1] = 0;
|
||||
}
|
||||
return InferenceEngine::GENERAL_ERROR;
|
||||
}
|
||||
|
||||
return InferenceEngine::OK;
|
||||
}
|
||||
//! [cpu_implementation:init]
|
||||
|
||||
//! [cpu_implementation:execute]
|
||||
InferenceEngine::StatusCode OpImplementation::execute(std::vector<InferenceEngine::Blob::Ptr> &inputs,
|
||||
std::vector<InferenceEngine::Blob::Ptr> &outputs,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept {
|
||||
const float* src_data = inputs[0]->cbuffer().as<const float *>() + inputs[0]->getTensorDesc().getBlockingDesc().getOffsetPadding();
|
||||
float *dst_data = outputs[0]->buffer().as<float *>() + outputs[0]->getTensorDesc().getBlockingDesc().getOffsetPadding();
|
||||
|
||||
for (size_t i = 0; i < inputs[0]->size(); i++) {
|
||||
dst_data[i] = src_data[i] + add;
|
||||
}
|
||||
return InferenceEngine::OK;
|
||||
}
|
||||
//! [cpu_implementation:execute]
|
||||
31
docs/template_extension/cpu_kernel.hpp
Normal file
31
docs/template_extension/cpu_kernel.hpp
Normal file
@@ -0,0 +1,31 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_iextension.h>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
|
||||
namespace TemplateExtension {
|
||||
|
||||
//! [cpu_implementation:header]
|
||||
class OpImplementation : public InferenceEngine::ILayerExecImpl {
|
||||
public:
|
||||
explicit OpImplementation(const std::shared_ptr<ngraph::Node>& node);
|
||||
InferenceEngine::StatusCode getSupportedConfigurations(std::vector<InferenceEngine::LayerConfig> &conf,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept override;
|
||||
InferenceEngine::StatusCode init(InferenceEngine::LayerConfig &config,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept override;
|
||||
InferenceEngine::StatusCode execute(std::vector<InferenceEngine::Blob::Ptr> &inputs,
|
||||
std::vector<InferenceEngine::Blob::Ptr> &outputs,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept override;
|
||||
private:
|
||||
int64_t add;
|
||||
ngraph::Shape inShape;
|
||||
ngraph::Shape outShape;
|
||||
std::string error;
|
||||
};
|
||||
//! [cpu_implementation:header]
|
||||
|
||||
} // namespace TemplateExtension
|
||||
73
docs/template_extension/extension.cpp
Normal file
73
docs/template_extension/extension.cpp
Normal file
@@ -0,0 +1,73 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
#include "extension.hpp"
|
||||
#include "cpu_kernel.hpp"
|
||||
#include "op.hpp"
|
||||
#include <ngraph/factory.hpp>
|
||||
#include <ngraph/opsets/opset.hpp>
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
using namespace TemplateExtension;
|
||||
|
||||
//! [extension:GetVersion]
|
||||
void Extension::GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept {
|
||||
static InferenceEngine::Version ExtensionDescription = {
|
||||
{1, 0}, // extension API version
|
||||
"1.0",
|
||||
"template_ext" // extension description message
|
||||
};
|
||||
|
||||
versionInfo = &ExtensionDescription;
|
||||
}
|
||||
//! [extension:GetVersion]
|
||||
|
||||
//! [extension:getOpSets]
|
||||
std::map<std::string, ngraph::OpSet> Extension::getOpSets() {
|
||||
std::map<std::string, ngraph::OpSet> opsets;
|
||||
ngraph::OpSet opset;
|
||||
opset.insert<Operation>();
|
||||
opsets["custom_opset"] = opset;
|
||||
return opsets;
|
||||
}
|
||||
//! [extension:getOpSets]
|
||||
|
||||
//! [extension:getImplTypes]
|
||||
std::vector<std::string> Extension::getImplTypes(const std::shared_ptr<ngraph::Node> &node) {
|
||||
if (std::dynamic_pointer_cast<Operation>(node)) {
|
||||
return {"CPU"};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
//! [extension:getImplTypes]
|
||||
|
||||
//! [extension:getImplementation]
|
||||
InferenceEngine::ILayerImpl::Ptr Extension::getImplementation(const std::shared_ptr<ngraph::Node> &node, const std::string &implType) {
|
||||
if (std::dynamic_pointer_cast<Operation>(node) && implType == "CPU") {
|
||||
return std::make_shared<OpImplementation>(node);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
//! [extension:getImplementation]
|
||||
|
||||
//! [extension:CreateExtension]
|
||||
// Exported function
|
||||
INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExtension(InferenceEngine::IExtension *&ext,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept {
|
||||
try {
|
||||
ext = new Extension();
|
||||
return OK;
|
||||
} catch (std::exception &ex) {
|
||||
if (resp) {
|
||||
std::string err = ((std::string) "Couldn't create extension: ") + ex.what();
|
||||
err.copy(resp->msg, 255);
|
||||
}
|
||||
return InferenceEngine::GENERAL_ERROR;
|
||||
}
|
||||
}
|
||||
//! [extension:CreateExtension]
|
||||
31
docs/template_extension/extension.hpp
Normal file
31
docs/template_extension/extension.hpp
Normal file
@@ -0,0 +1,31 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_iextension.h>
|
||||
#include <ie_api.h>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
//! [extension:header]
|
||||
namespace TemplateExtension {
|
||||
|
||||
class Extension : public InferenceEngine::IExtension {
|
||||
public:
|
||||
Extension() = default;
|
||||
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override;
|
||||
void Unload() noexcept override {}
|
||||
void Release() noexcept override { delete this; }
|
||||
|
||||
std::map<std::string, ngraph::OpSet> getOpSets() override;
|
||||
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override;
|
||||
InferenceEngine::ILayerImpl::Ptr getImplementation(const std::shared_ptr<ngraph::Node>& node, const std::string& implType) override;
|
||||
};
|
||||
|
||||
} // namespace TemplateExtension
|
||||
//! [extension:header]
|
||||
38
docs/template_extension/op.cpp
Normal file
38
docs/template_extension/op.cpp
Normal file
@@ -0,0 +1,38 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
#include "op.hpp"
|
||||
|
||||
using namespace TemplateExtension;
|
||||
|
||||
constexpr ngraph::NodeTypeInfo Operation::type_info;
|
||||
|
||||
//! [op:ctor]
|
||||
Operation::Operation(const ngraph::Output<ngraph::Node> &arg, int64_t add) : Op({arg}), add(add) {
|
||||
constructor_validate_and_infer_types();
|
||||
}
|
||||
//! [op:ctor]
|
||||
|
||||
//! [op:validate]
|
||||
void Operation::validate_and_infer_types() {
|
||||
// Operation doesn't change shapes end element type
|
||||
set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
|
||||
}
|
||||
//! [op:validate]
|
||||
|
||||
//! [op:copy]
|
||||
std::shared_ptr<ngraph::Node> Operation::copy_with_new_args(const ngraph::NodeVector &new_args) const {
|
||||
if (new_args.size() != 1) {
|
||||
throw ngraph::ngraph_error("Incorrect number of new arguments");
|
||||
}
|
||||
|
||||
return std::make_shared<Operation>(new_args.at(0), add);
|
||||
}
|
||||
//! [op:copy]
|
||||
|
||||
//! [op:visit_attributes]
|
||||
bool Operation::visit_attributes(ngraph::AttributeVisitor &visitor) {
|
||||
visitor.on_attribute("add", add);
|
||||
return true;
|
||||
}
|
||||
//! [op:visit_attributes]
|
||||
29
docs/template_extension/op.hpp
Normal file
29
docs/template_extension/op.hpp
Normal file
@@ -0,0 +1,29 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ngraph/ngraph.hpp>
|
||||
|
||||
//! [op:header]
|
||||
namespace TemplateExtension {
|
||||
|
||||
class Operation : public ngraph::op::Op {
|
||||
public:
|
||||
static constexpr ngraph::NodeTypeInfo type_info{"Template", 0};
|
||||
const ngraph::NodeTypeInfo& get_type_info() const override { return type_info; }
|
||||
|
||||
Operation() = default;
|
||||
Operation(const ngraph::Output<ngraph::Node>& arg, int64_t add);
|
||||
void validate_and_infer_types() override;
|
||||
std::shared_ptr<ngraph::Node> copy_with_new_args(const ngraph::NodeVector& new_args) const override;
|
||||
bool visit_attributes(ngraph::AttributeVisitor& visitor) override;
|
||||
int64_t getAddAttr() { return add; }
|
||||
|
||||
private:
|
||||
int64_t add;
|
||||
};
|
||||
//! [op:header]
|
||||
|
||||
} // namespace TemplateExtension
|
||||
39
docs/template_plugin/CMakeLists.txt
Normal file
39
docs/template_plugin/CMakeLists.txt
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
# [cmake:main]
|
||||
if (APPLE)
|
||||
# due to https://cmake.org/cmake/help/v3.12/policy/CMP0068.html
|
||||
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
|
||||
else()
|
||||
cmake_minimum_required(VERSION 3.7.2 FATAL_ERROR)
|
||||
endif()
|
||||
|
||||
project(InferenceEngineTemplatePlugin)
|
||||
|
||||
set(IE_MAIN_TEMPLATE_PLUGIN_SOURCE_DIR ${InferenceEngineTemplatePlugin_SOURCE_DIR})
|
||||
|
||||
find_package(InferenceEngineDeveloperPackage REQUIRED)
|
||||
|
||||
add_subdirectory(src)
|
||||
|
||||
if(ENABLE_TESTS)
|
||||
include(CTest)
|
||||
enable_testing()
|
||||
|
||||
if(ENABLE_FUNCTIONAL_TESTS)
|
||||
add_subdirectory(tests_deprecated/functional)
|
||||
add_subdirectory(tests/functional)
|
||||
endif()
|
||||
|
||||
if(ENABLE_BEH_TESTS)
|
||||
add_subdirectory(tests_deprecated/behavior)
|
||||
endif()
|
||||
endif()
|
||||
# [cmake:main]
|
||||
|
||||
# install
|
||||
|
||||
# ATTENTION: uncomment to install component
|
||||
# ie_cpack(template)
|
||||
18
docs/template_plugin/README.md
Normal file
18
docs/template_plugin/README.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# template-plugin
|
||||
|
||||
Template Plugin for Inference Engine which demonstrates basics of how Inference Engine plugin can be built and implemented on top of Inference Engine Developer Package and Plugin API.
|
||||
|
||||
## How to build
|
||||
|
||||
```bash
|
||||
$ cd $DLDT_HOME
|
||||
$ mkdir $DLDT_HOME/build
|
||||
$ cd $DLDT_HOME/build
|
||||
$ cmake -DENABLE_TESTS=ON -DENABLE_BEH_TESTS=ON -DENABLE_FUNCTIONAL_TESTS=ON ..
|
||||
$ make -j8
|
||||
$ cd $TEMPLATE_PLUGIN_HOME
|
||||
$ mkdir $TEMPLATE_PLUGIN_HOME/build
|
||||
$ cd $TEMPLATE_PLUGIN_HOME/build
|
||||
$ cmake -DInferenceEngineDeveloperPackage_DIR=$DLDT_HOME/build ..
|
||||
$ make -j8
|
||||
```
|
||||
59
docs/template_plugin/include/template/template_config.hpp
Normal file
59
docs/template_plugin/include/template/template_config.hpp
Normal file
@@ -0,0 +1,59 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
/**
|
||||
* @brief A header that defines advanced related properties for DLIA plugins.
|
||||
* These properties should be used in SetConfig() and LoadNetwork() methods of plugins
|
||||
*
|
||||
* @file dlia_config.hpp
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include "ie_plugin_config.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
namespace TemplateMetrics {
|
||||
|
||||
/**
|
||||
* @def TEMPLATE_METRIC_VALUE(name)
|
||||
* @brief Shortcut for defining Template metric values
|
||||
*/
|
||||
#define TEMPLATE_METRIC_VALUE(name) InferenceEngine::TemplateMetrics::name
|
||||
#define DECLARE_TEMPLATE_METRIC_VALUE(name) static constexpr auto name = #name
|
||||
|
||||
// ! [public_header:metrics]
|
||||
/**
|
||||
* @brief Defines whether current Template device instance supports hardware blocks for fast convolution computations.
|
||||
*/
|
||||
DECLARE_TEMPLATE_METRIC_VALUE(HARDWARE_CONVOLUTION);
|
||||
// ! [public_header:metrics]
|
||||
|
||||
} // namespace TemplateMetrics
|
||||
|
||||
namespace TemplateConfigParams {
|
||||
|
||||
/**
|
||||
* @def TEMPLATE_CONFIG_KEY(name)
|
||||
* @brief Shortcut for defining Template device configuration keys
|
||||
*/
|
||||
#define TEMPLATE_CONFIG_KEY(name) InferenceEngine::TemplateConfigParams::_CONFIG_KEY(TEMPLATE_##name)
|
||||
|
||||
#define DECLARE_TEMPLATE_CONFIG_KEY(name) DECLARE_CONFIG_KEY(TEMPLATE_##name)
|
||||
#define DECLARE_TEMPLATE_CONFIG_VALUE(name) DECLARE_CONFIG_VALUE(TEMPLATE_##name)
|
||||
|
||||
/**
|
||||
* @brief The key to define the type of transformations for TEMPLATE inputs and outputs.
|
||||
* TEMPLATE use custom data layout for input and output blobs. IE TEMPLATE Plugin provides custom
|
||||
* optimized version of transformation functions that do not use OpenMP and much more faster
|
||||
* than native TEMPLATE functions. Values: "NO" - optimized plugin transformations
|
||||
* are used, "YES" - native TEMPLATE transformations are used.
|
||||
*/
|
||||
DECLARE_TEMPLATE_CONFIG_KEY(ANY_CONFIG_KEY);
|
||||
|
||||
|
||||
} // namespace TemplateConfigParams
|
||||
} // namespace InferenceEngine
|
||||
43
docs/template_plugin/src/CMakeLists.txt
Normal file
43
docs/template_plugin/src/CMakeLists.txt
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
# [cmake:plugin]
|
||||
set(TARGET_NAME "templatePlugin")
|
||||
|
||||
if(ENABLE_LTO)
|
||||
ie_enable_lto()
|
||||
endif()
|
||||
|
||||
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
|
||||
file(GLOB_RECURSE HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
|
||||
|
||||
# adds a shared library with plugin
|
||||
ie_add_plugin(NAME ${TARGET_NAME}
|
||||
DEVICE_NAME "TEMPLATE"
|
||||
SOURCES ${SOURCES} ${HEADERS}
|
||||
SKIP_INSTALL # ATTENTION: uncomment to install component
|
||||
VERSION_DEFINES_FOR template_plugin.cpp)
|
||||
|
||||
target_include_directories(${TARGET_NAME} PRIVATE
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
"${IE_MAIN_TEMPLATE_PLUGIN_SOURCE_DIR}/include")
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE IE::inference_engine IE::inference_engine_transformations ${NGRAPH_LIBRARIES} ${INTEL_ITT_LIBS})
|
||||
|
||||
# ATTENTION: uncomment to register a plugin in the plugins.xml file
|
||||
# ie_register_plugins(MAIN_TARGET ${TARGET_NAME}
|
||||
# POSSIBLE_PLUGINS ${TARGET_NAME})
|
||||
# [cmake:plugin]
|
||||
|
||||
# ATTENTION: uncomment to install component
|
||||
# install
|
||||
|
||||
# set(component_name template)
|
||||
# ie_cpack_add_component(${component_name} REQUIRED)
|
||||
|
||||
# install(TARGETS ${TARGET_NAME}
|
||||
# RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH}
|
||||
# ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH}
|
||||
# LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH}
|
||||
# COMPONENT ${component_name})
|
||||
44
docs/template_plugin/src/template_async_infer_request.cpp
Normal file
44
docs/template_plugin/src/template_async_infer_request.cpp
Normal file
@@ -0,0 +1,44 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include <ie_profiling.hpp>
|
||||
|
||||
#include "template_async_infer_request.hpp"
|
||||
#include "template_executable_network.hpp"
|
||||
|
||||
using namespace TemplatePlugin;
|
||||
|
||||
// ! [async_infer_request:ctor]
|
||||
TemplateAsyncInferRequest::TemplateAsyncInferRequest(
|
||||
const TemplateInferRequest::Ptr& inferRequest,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& cpuTaskExecutor,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& waitExecutor,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& callbackExecutor) :
|
||||
AsyncInferRequestThreadSafeDefault(inferRequest, cpuTaskExecutor, callbackExecutor),
|
||||
_inferRequest(inferRequest), _waitExecutor(waitExecutor) {
|
||||
_pipeline = {
|
||||
{cpuTaskExecutor, [this] {
|
||||
IE_PROFILING_AUTO_SCOPE(PreprocessingAndStartPipeline)
|
||||
_inferRequest->inferPreprocess();
|
||||
_inferRequest->startPipeline();
|
||||
}},
|
||||
{_waitExecutor, [this] {
|
||||
IE_PROFILING_AUTO_SCOPE(WaitPipeline)
|
||||
_inferRequest->waitPipeline();
|
||||
}},
|
||||
{cpuTaskExecutor, [this] {
|
||||
IE_PROFILING_AUTO_SCOPE(Postprocessing)
|
||||
_inferRequest->inferPostprocess();
|
||||
}}
|
||||
};
|
||||
}
|
||||
// ! [async_infer_request:ctor]
|
||||
|
||||
// ! [async_infer_request:dtor]
|
||||
TemplateAsyncInferRequest::~TemplateAsyncInferRequest() {
|
||||
InferenceEngine::AsyncInferRequestThreadSafeDefault::StopAndWait();
|
||||
}
|
||||
// ! [async_infer_request:dtor]
|
||||
30
docs/template_plugin/src/template_async_infer_request.hpp
Normal file
30
docs/template_plugin/src/template_async_infer_request.hpp
Normal file
@@ -0,0 +1,30 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp>
|
||||
|
||||
#include "template_infer_request.hpp"
|
||||
|
||||
namespace TemplatePlugin {
|
||||
|
||||
// ! [async_infer_request:header]
|
||||
class TemplateAsyncInferRequest : public InferenceEngine::AsyncInferRequestThreadSafeDefault {
|
||||
public:
|
||||
TemplateAsyncInferRequest(const TemplateInferRequest::Ptr& inferRequest,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& taskExecutor,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& waitExecutor,
|
||||
const InferenceEngine::ITaskExecutor::Ptr& callbackExecutor);
|
||||
|
||||
~TemplateAsyncInferRequest() override;
|
||||
|
||||
private:
|
||||
TemplateInferRequest::Ptr _inferRequest;
|
||||
InferenceEngine::ITaskExecutor::Ptr _waitExecutor;
|
||||
};
|
||||
// ! [async_infer_request:header]
|
||||
|
||||
} // namespace TemplatePlugin
|
||||
45
docs/template_plugin/src/template_config.cpp
Normal file
45
docs/template_plugin/src/template_config.cpp
Normal file
@@ -0,0 +1,45 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
|
||||
#include <ie_util_internal.hpp>
|
||||
#include <ie_plugin_config.hpp>
|
||||
#include <file_utils.h>
|
||||
#include <cpp_interfaces/exception2status.hpp>
|
||||
|
||||
#include "template_config.hpp"
|
||||
|
||||
using namespace TemplatePlugin;
|
||||
|
||||
Configuration::Configuration() { }
|
||||
|
||||
Configuration::Configuration(const ConfigMap& config, const Configuration & defaultCfg, bool throwOnUnsupported) {
|
||||
*this = defaultCfg;
|
||||
for (auto&& c : config) {
|
||||
const auto& key = c.first;
|
||||
const auto& value = c.second;
|
||||
|
||||
if (CONFIG_KEY(DEVICE_ID) == key) {
|
||||
deviceId = std::stoi(value);
|
||||
} else if (CONFIG_KEY(PERF_COUNT) == key) {
|
||||
perfCount = (CONFIG_VALUE(YES) == value);
|
||||
} else if (throwOnUnsupported) {
|
||||
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
InferenceEngine::Parameter Configuration::Get(const std::string& name) const {
|
||||
if (name == CONFIG_KEY(DEVICE_ID)) {
|
||||
return {std::to_string(deviceId)};
|
||||
} else if (name == CONFIG_KEY(PERF_COUNT)) {
|
||||
return {perfCount};
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << name;
|
||||
}
|
||||
}
|
||||
40
docs/template_plugin/src/template_config.hpp
Normal file
40
docs/template_plugin/src/template_config.hpp
Normal file
@@ -0,0 +1,40 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
|
||||
#include <ie_parameter.hpp>
|
||||
|
||||
namespace TemplatePlugin {
|
||||
|
||||
template<typename T>
|
||||
using IOMap = std::unordered_map<std::string, T>;
|
||||
|
||||
// ! [configuration:header]
|
||||
using ConfigMap = std::map<std::string, std::string>;
|
||||
|
||||
struct Configuration {
|
||||
Configuration();
|
||||
Configuration(const Configuration&) = default;
|
||||
Configuration(Configuration&&) = default;
|
||||
Configuration& operator=(const Configuration&) = default;
|
||||
Configuration& operator=(Configuration&&) = default;
|
||||
|
||||
explicit Configuration(const ConfigMap& config, const Configuration & defaultCfg = {}, const bool throwOnUnsupported = true);
|
||||
|
||||
InferenceEngine::Parameter Get(const std::string& name) const;
|
||||
|
||||
// Plugin configuration parameters
|
||||
|
||||
int deviceId = 0;
|
||||
bool perfCount = true;
|
||||
};
|
||||
// ! [configuration:header]
|
||||
|
||||
} // namespace TemplatePlugin
|
||||
167
docs/template_plugin/src/template_executable_network.cpp
Normal file
167
docs/template_plugin/src/template_executable_network.cpp
Normal file
@@ -0,0 +1,167 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <atomic>
|
||||
#include <set>
|
||||
#include <utility>
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <ie_metric_helpers.hpp>
|
||||
#include <ie_util_internal.hpp>
|
||||
#include <ie_plugin_config.hpp>
|
||||
#include <network_serializer.h>
|
||||
#include <threading/ie_executor_manager.hpp>
|
||||
#include <details/ie_cnn_network_tools.h>
|
||||
|
||||
#include <ngraph/specialize_function.hpp>
|
||||
#include <ngraph/pass/manager.hpp>
|
||||
#include <ngraph/pass/constant_folding.hpp>
|
||||
|
||||
#include <transformations/convert_divide.hpp>
|
||||
|
||||
#include "template_plugin.hpp"
|
||||
#include "template_executable_network.hpp"
|
||||
|
||||
using namespace TemplatePlugin;
|
||||
|
||||
// ! [executable_network:ctor_cnnnetwork]
|
||||
TemplatePlugin::ExecutableNetwork::ExecutableNetwork(InferenceEngine::ICNNNetwork& network,
|
||||
const Configuration& cfg):
|
||||
_name(network.getName()),
|
||||
_cfg(cfg),
|
||||
_waitExecutor(InferenceEngine::ExecutorManager::getInstance()->getExecutor("Template")) {
|
||||
// TODO: if your plugin supports device ID (more that single instance of device can be on host machine)
|
||||
// you should select proper device based on KEY_DEVICE_ID or automatic behavior
|
||||
// In this case, _waitExecutor should also be created per device.
|
||||
|
||||
try {
|
||||
if (std::shared_ptr<const ngraph::Function> ngraphFunction = network.getFunction()) {
|
||||
CompileGraph(ngraphFunction);
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << "TEMPLATE plugin can compile only IR v10 networks";
|
||||
}
|
||||
}
|
||||
catch (const InferenceEngineException & e) {
|
||||
throw e;
|
||||
}
|
||||
catch (const std::exception & e) {
|
||||
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
|
||||
}
|
||||
catch (...) {
|
||||
THROW_IE_EXCEPTION << "Generic exception is thrown";
|
||||
}
|
||||
}
|
||||
// ! [executable_network:ctor_cnnnetwork]
|
||||
|
||||
// ! [executable_network:ctor_import_stream]
|
||||
TemplatePlugin::ExecutableNetwork::ExecutableNetwork(std::istream & model,
|
||||
const Configuration& cfg) :
|
||||
_cfg(cfg) {
|
||||
// TODO: since Import network is not a mandatory functionality, this ctor can just be removed
|
||||
}
|
||||
// ! [executable_network:ctor_import_stream]
|
||||
|
||||
// ! [executable_network:compile_graph]
|
||||
void TemplatePlugin::ExecutableNetwork::CompileGraph(const std::shared_ptr<const ngraph::Function> & ngraphFunction) {
|
||||
// TODO: perform actual graph compilation taking `_cfg` into account
|
||||
|
||||
// 1.Copy ngraph::Function first to apply some transformations later in
|
||||
// ExecutableNetwork::CompileGraph, which modify original ngraph::Function
|
||||
const bool shareConsts = false, constFolding = false;
|
||||
std::vector<::ngraph::element::Type> new_types;
|
||||
std::vector<::ngraph::PartialShape> new_shapes;
|
||||
|
||||
for (const auto ¶meter : ngraphFunction->get_parameters()) {
|
||||
new_shapes.emplace_back(parameter->get_partial_shape());
|
||||
new_types.emplace_back(parameter->get_element_type());
|
||||
}
|
||||
|
||||
auto copyFunction = ngraph::specialize_function(std::const_pointer_cast<ngraph::Function>(ngraphFunction),
|
||||
new_types, new_shapes, std::vector<void *>(new_types.size(), nullptr), constFolding, shareConsts);
|
||||
|
||||
// 2. Perform common and device-specific transformations
|
||||
ngraph::pass::Manager passManager;
|
||||
// Example: register standard ngraph transformation from ngraph::ngraph
|
||||
passManager.register_pass<ngraph::pass::ConstantFolding>();
|
||||
// Example: register inference engine optimization transformation for IE::inference_engine_transformations
|
||||
passManager.register_pass<ngraph::pass::ConvertDivide>();
|
||||
// Register any other transformations
|
||||
// ..
|
||||
|
||||
// After `run_passes`, we have the transformed function, where operations match device operations,
|
||||
// and we can create device hardware-dependent graph
|
||||
passManager.run_passes(copyFunction);
|
||||
|
||||
// 3. Iterate over operations and create hardware-specific ngraph
|
||||
for (const auto& op : copyFunction->get_ordered_ops()) {
|
||||
// TODO: map ngraph `op` to device operation
|
||||
}
|
||||
|
||||
// 4. Perform any other steps like allocation and filling device buffers, and so on
|
||||
}
|
||||
// ! [executable_network:compile_graph]
|
||||
|
||||
// ! [executable_network:create_infer_request_impl]
|
||||
InferenceEngine::InferRequestInternal::Ptr TemplatePlugin::ExecutableNetwork::CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
|
||||
InferenceEngine::OutputsDataMap networkOutputs) {
|
||||
return std::make_shared<TemplateInferRequest>(networkInputs, networkOutputs, std::static_pointer_cast<ExecutableNetwork>(shared_from_this()));
|
||||
}
|
||||
// ! [executable_network:create_infer_request_impl]
|
||||
|
||||
// ! [executable_network:create_infer_request]
|
||||
void TemplatePlugin::ExecutableNetwork::CreateInferRequest(IInferRequest::Ptr& asyncRequest) {
|
||||
auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
|
||||
auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
|
||||
_taskExecutor, _waitExecutor, _callbackExecutor);
|
||||
asyncRequest.reset(new InferenceEngine::InferRequestBase<TemplateAsyncInferRequest>(asyncThreadSafeImpl),
|
||||
[](InferenceEngine::IInferRequest *p) { p->Release(); });
|
||||
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
||||
}
|
||||
// ! [executable_network:create_infer_request]
|
||||
|
||||
// ! [executable_network:get_config]
|
||||
void TemplatePlugin::ExecutableNetwork::GetConfig(const std::string &name, Parameter &result, ResponseDesc *resp) const {
|
||||
// TODO: return more supported values for config keys
|
||||
if (name == CONFIG_KEY(DEVICE_ID) ||
|
||||
name == CONFIG_KEY(PERF_COUNT)) {
|
||||
result = _cfg.Get(name);
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork config key: " << name;
|
||||
}
|
||||
}
|
||||
// ! [executable_network:get_config]
|
||||
|
||||
// ! [executable_network:get_metric]
|
||||
void TemplatePlugin::ExecutableNetwork::GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *) const {
|
||||
// TODO: return more supported values for metrics
|
||||
if (METRIC_KEY(SUPPORTED_METRICS) == name) {
|
||||
result = IE_SET_METRIC(SUPPORTED_METRICS, std::vector<std::string>{
|
||||
METRIC_KEY(NETWORK_NAME),
|
||||
METRIC_KEY(SUPPORTED_METRICS),
|
||||
METRIC_KEY(SUPPORTED_CONFIG_KEYS),
|
||||
METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)});
|
||||
} else if (METRIC_KEY(SUPPORTED_CONFIG_KEYS) == name) {
|
||||
result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, std::vector<std::string>{
|
||||
CONFIG_KEY(DEVICE_ID),
|
||||
CONFIG_KEY(PERF_COUNT)});
|
||||
} else if (METRIC_KEY(NETWORK_NAME) == name) {
|
||||
result = IE_SET_METRIC(NETWORK_NAME, _name);
|
||||
} else if (METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS) == name) {
|
||||
// TODO: fill with actual number
|
||||
unsigned int value = 1;
|
||||
result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, value);
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork metric: " << name;
|
||||
}
|
||||
}
|
||||
// ! [executable_network:get_metric]
|
||||
|
||||
// ! [executable_network:export_impl]
|
||||
void TemplatePlugin::ExecutableNetwork::ExportImpl(std::ostream& dlaModel) {
|
||||
// TODO: Code which exports graph from std::ostream
|
||||
}
|
||||
// ! [executable_network:export_impl]
|
||||
68
docs/template_plugin/src/template_executable_network.hpp
Normal file
68
docs/template_plugin/src/template_executable_network.hpp
Normal file
@@ -0,0 +1,68 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <utility>
|
||||
#include <tuple>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
#include <list>
|
||||
|
||||
#include <ie_common.h>
|
||||
#include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
|
||||
#include <cnn_network_impl.hpp>
|
||||
#include <threading/ie_itask_executor.hpp>
|
||||
|
||||
#include <ngraph/function.hpp>
|
||||
|
||||
#include "template_config.hpp"
|
||||
#include "template_infer_request.hpp"
|
||||
#include "template_async_infer_request.hpp"
|
||||
|
||||
namespace TemplatePlugin {
|
||||
|
||||
class Engine;
|
||||
|
||||
/**
|
||||
* @class ExecutableNetwork
|
||||
* @brief Interface of executable network
|
||||
*/
|
||||
// ! [executable_network:header]
|
||||
class ExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
|
||||
public:
|
||||
ExecutableNetwork(InferenceEngine::ICNNNetwork& network,
|
||||
const Configuration& cfg);
|
||||
|
||||
ExecutableNetwork(std::istream & model,
|
||||
const Configuration& cfg);
|
||||
|
||||
~ExecutableNetwork() override = default;
|
||||
|
||||
// Methods from a base class ExecutableNetworkThreadSafeDefault
|
||||
|
||||
void ExportImpl(std::ostream& model) override;
|
||||
InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
|
||||
InferenceEngine::OutputsDataMap networkOutputs) override;
|
||||
void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
|
||||
void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
|
||||
void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
|
||||
|
||||
std::atomic<std::size_t> _requestId = {0};
|
||||
std::string _name;
|
||||
Configuration _cfg;
|
||||
|
||||
private:
|
||||
void CompileGraph(const std::shared_ptr<const ngraph::Function> & ngraphFunction);
|
||||
|
||||
std::shared_ptr<Engine> _plugin;
|
||||
InferenceEngine::ITaskExecutor::Ptr _waitExecutor;
|
||||
};
|
||||
// ! [executable_network:header]
|
||||
|
||||
} // namespace TemplatePlugin
|
||||
224
docs/template_plugin/src/template_infer_request.cpp
Normal file
224
docs/template_plugin/src/template_infer_request.cpp
Normal file
@@ -0,0 +1,224 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#include <utility>
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
#include <ie_blob.h>
|
||||
#include <ie_plugin.hpp>
|
||||
#include <description_buffer.hpp>
|
||||
#include <debug.h>
|
||||
#include <ie_layouts.h>
|
||||
#include <threading/ie_executor_manager.hpp>
|
||||
#include <blob_transform.hpp>
|
||||
#include <ie_parallel.hpp>
|
||||
#include <ie_memcpy.h>
|
||||
#include <precision_utils.h>
|
||||
#include <template/template_config.hpp>
|
||||
|
||||
#include "template_infer_request.hpp"
|
||||
#include "template_executable_network.hpp"
|
||||
#include "template_plugin.hpp"
|
||||
|
||||
using namespace TemplatePlugin;
|
||||
|
||||
using Time = std::chrono::high_resolution_clock;
|
||||
using ns = std::chrono::nanoseconds;
|
||||
using fsec = std::chrono::duration<float>;
|
||||
|
||||
// ! [infer_request:ctor]
|
||||
TemplateInferRequest::TemplateInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
|
||||
const InferenceEngine::OutputsDataMap& networkOutputs,
|
||||
const std::shared_ptr<TemplatePlugin::ExecutableNetwork>& executableNetwork) :
|
||||
InferRequestInternal(networkInputs, networkOutputs),
|
||||
_executableNetwork(executableNetwork) {
|
||||
// TODO: allocate infer request device and host buffers if needed, fill actual list of profiling tasks
|
||||
|
||||
auto requestID = std::to_string(_executableNetwork->_requestId);
|
||||
_executableNetwork->_requestId++;
|
||||
|
||||
std::string name = _executableNetwork->_name + "_Req" + requestID;
|
||||
_profilingTask = { {
|
||||
{ ProfilingTask("Template" + std::to_string(_executableNetwork->_cfg.deviceId) + "_" + name + "_Preprocess") },
|
||||
{ ProfilingTask("Template" + std::to_string(_executableNetwork->_cfg.deviceId) + "_" + name + "_Postprocess") },
|
||||
{ ProfilingTask("Template" + std::to_string(_executableNetwork->_cfg.deviceId) + "_" + name + "_StartPipline") },
|
||||
{ ProfilingTask("Template" + std::to_string(_executableNetwork->_cfg.deviceId) + "_" + name + "_WaitPipline") },
|
||||
} };
|
||||
|
||||
allocateDeviceBuffers();
|
||||
allocateInputBlobs();
|
||||
allocateOutputBlobs();
|
||||
}
|
||||
// ! [infer_request:ctor]
|
||||
|
||||
// ! [infer_request:dtor]
|
||||
TemplateInferRequest::~TemplateInferRequest() {
|
||||
_executableNetwork->_requestId--;
|
||||
}
|
||||
// ! [infer_request:dtor]
|
||||
|
||||
void TemplateInferRequest::allocateDeviceBuffers() {
|
||||
// TODO: allocate device buffers if Template device is a remote one
|
||||
}
|
||||
|
||||
void TemplateInferRequest::allocateInputBlobs() {
|
||||
for (auto &networkInput : _networkInputs) {
|
||||
SizeVector dims = networkInput.second->getTensorDesc().getDims();
|
||||
Precision precision = networkInput.second->getTensorDesc().getPrecision();
|
||||
Layout input_layout = networkInput.second->getInputData()->getLayout();
|
||||
Blob::Ptr inputBlob;
|
||||
Blob::Ptr inputBlobNCHW;
|
||||
switch (precision) {
|
||||
case Precision::FP32 :
|
||||
inputBlobNCHW = inputBlob = InferenceEngine::make_shared_blob<float>({ precision, dims, input_layout });
|
||||
if (input_layout == Layout::NHWC) {
|
||||
inputBlobNCHW = InferenceEngine::make_shared_blob<float>({ precision, dims, Layout::NCHW });
|
||||
}
|
||||
break;
|
||||
case Precision::FP16 :
|
||||
case Precision::I16 :
|
||||
inputBlobNCHW = inputBlob = InferenceEngine::make_shared_blob<int16_t>({ precision, dims, input_layout });
|
||||
if (input_layout == Layout::NHWC) {
|
||||
inputBlobNCHW = InferenceEngine::make_shared_blob<int16_t>({ precision, dims, Layout::NCHW });
|
||||
}
|
||||
break;
|
||||
case Precision::U8 :
|
||||
inputBlobNCHW = inputBlob = InferenceEngine::make_shared_blob<uint8_t>({ precision, dims, input_layout });
|
||||
if (input_layout == Layout::NHWC) {
|
||||
inputBlobNCHW = InferenceEngine::make_shared_blob<uint8_t>({ precision, dims, Layout::NCHW });
|
||||
}
|
||||
break;
|
||||
default:
|
||||
THROW_IE_EXCEPTION << "Unsupported network precision: " << precision
|
||||
<< precision << "! Supported precisions are: FP32, FP16, I16, U8";
|
||||
}
|
||||
// allocate the input blob
|
||||
inputBlob->allocate();
|
||||
_inputs[networkInput.first] = inputBlob;
|
||||
if (inputBlobNCHW != inputBlob) {
|
||||
inputBlobNCHW->allocate();
|
||||
}
|
||||
_inputsNCHW[networkInput.first] = inputBlobNCHW;
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateInferRequest::allocateOutputBlobs() {
|
||||
for (auto &networkOutput : _networkOutputs) {
|
||||
SizeVector dims = networkOutput.second->getTensorDesc().getDims();
|
||||
Precision precision = networkOutput.second->getPrecision();
|
||||
Blob::Ptr outputBlob;
|
||||
|
||||
// allocate the output blob
|
||||
Blob::Ptr outputBlobNCHW;
|
||||
switch (precision) {
|
||||
case Precision::FP32 :
|
||||
outputBlobNCHW = outputBlob = InferenceEngine::make_shared_blob<float>({ precision, dims, networkOutput.second->getLayout() });
|
||||
if (networkOutput.second->getLayout() == Layout::NHWC) {
|
||||
outputBlobNCHW = InferenceEngine::make_shared_blob<float>({ precision, dims, Layout::NCHW });
|
||||
}
|
||||
break;
|
||||
case Precision::FP16 :
|
||||
outputBlobNCHW = outputBlob = InferenceEngine::make_shared_blob<int16_t>({ precision, dims, networkOutput.second->getLayout() });
|
||||
if (networkOutput.second->getLayout() == Layout::NHWC) {
|
||||
outputBlobNCHW = InferenceEngine::make_shared_blob<int16_t>({ precision, dims, Layout::NCHW });
|
||||
}
|
||||
break;
|
||||
default:
|
||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported output precision: "
|
||||
<< precision << "! Supported precisions are: FP32, FP16";
|
||||
}
|
||||
// allocate the output blob
|
||||
outputBlob->allocate();
|
||||
_outputs[networkOutput.first] = outputBlob;
|
||||
if (outputBlobNCHW != outputBlob) {
|
||||
outputBlobNCHW->allocate();
|
||||
}
|
||||
_outputsNCHW[networkOutput.first] = outputBlobNCHW;
|
||||
}
|
||||
|
||||
if (_networkOutputs.empty() || _networkInputs.empty()) {
|
||||
THROW_IE_EXCEPTION << "Internal error: no information about network's output/input";
|
||||
}
|
||||
}
|
||||
|
||||
// ! [infer_request:infer_impl]
|
||||
void TemplateInferRequest::InferImpl() {
|
||||
// TODO: fill with actual list of pipeline stages, which are executed syncronously for sync infer requests
|
||||
inferPreprocess();
|
||||
startPipeline();
|
||||
waitPipeline();
|
||||
inferPostprocess();
|
||||
}
|
||||
// ! [infer_request:infer_impl]
|
||||
|
||||
// ! [infer_request:infer_preprocess]
|
||||
void TemplateInferRequest::inferPreprocess() {
|
||||
auto prev = Time::now();
|
||||
|
||||
// execute input pre-processing.
|
||||
InferRequestInternal::execDataPreprocessing(_inputs);
|
||||
|
||||
for (auto &input : InferRequestInternal::_inputs) {
|
||||
auto& src = input.second;
|
||||
auto& dst = _inputsNCHW[input.first];
|
||||
if (src != dst) {
|
||||
if (src->getTensorDesc().getPrecision() == dst->getTensorDesc().getPrecision()
|
||||
&& src->getTensorDesc().getDims() == dst->getTensorDesc().getDims()
|
||||
&& src->getTensorDesc().getLayout() == dst->getTensorDesc().getLayout()) {
|
||||
_inputsNCHW[input.first] = input.second;
|
||||
} else { // Convert Layout to NCHW
|
||||
InferenceEngine::blob_copy(src, dst);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Preprocessing on inputs if needed: work _inputsNCHW
|
||||
|
||||
_inputPreprocessTime = static_cast<double>(std::chrono::duration_cast<ns>(Time::now() - prev).count());
|
||||
}
|
||||
// ! [infer_request:infer_preprocess]
|
||||
|
||||
void TemplateInferRequest::startPipeline() {
|
||||
IE_PROFILING_AUTO_SCOPE_TASK(_profilingTask[StartPipeline])
|
||||
// TODO: Start pipeline and fill _inputTransferTime, _executeTime, _outputTransferTime
|
||||
}
|
||||
|
||||
void TemplateInferRequest::waitPipeline() {
|
||||
IE_PROFILING_AUTO_SCOPE_TASK(_profilingTask[WaitPipeline])
|
||||
auto prev = Time::now();
|
||||
// TODO: Wait pipeline using driver API or other synronizations methods
|
||||
_inputPreprocessTime = static_cast<double>(std::chrono::duration_cast<ns>(Time::now() - prev).count());
|
||||
}
|
||||
|
||||
void TemplateInferRequest::inferPostprocess() {
|
||||
IE_PROFILING_AUTO_SCOPE_TASK(_profilingTask[Postprocess])
|
||||
auto prev = Time::now();
|
||||
// TODO: perform post-processing and convert to NHWC layout
|
||||
_outputPostProcessTime = static_cast<double>(std::chrono::duration_cast<ns>(Time::now() - prev).count());
|
||||
}
|
||||
|
||||
// ! [infer_request:get_performance_counts]
|
||||
void TemplateInferRequest::GetPerformanceCounts(std::map<std::string, InferenceEngineProfileInfo> &perfMap) const {
|
||||
InferenceEngineProfileInfo info;
|
||||
info.execution_index = 0;
|
||||
info.status = InferenceEngineProfileInfo::EXECUTED;
|
||||
info.cpu_uSec = info.realTime_uSec = _inputPreprocessTime / 1000;
|
||||
perfMap["1. input preprocessing"] = info;
|
||||
info.cpu_uSec = 0;
|
||||
info.realTime_uSec = _inputTransferTime / 1000;
|
||||
perfMap["2. input transfer to a device"] = info;
|
||||
info.cpu_uSec = 0;
|
||||
info.realTime_uSec = _executeTime / 1000;
|
||||
perfMap["3. execution time"] = info;
|
||||
info.cpu_uSec = 0;
|
||||
info.realTime_uSec = _outputTransferTime / 1000;
|
||||
perfMap["4. output transfer from a device"] = info;
|
||||
info.cpu_uSec = info.realTime_uSec = _outputPostProcessTime / 1000;
|
||||
perfMap["5. output postprocessing"] = info;
|
||||
}
|
||||
// ! [infer_request:get_performance_counts]
|
||||
74
docs/template_plugin/src/template_infer_request.hpp
Normal file
74
docs/template_plugin/src/template_infer_request.hpp
Normal file
@@ -0,0 +1,74 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <unordered_map>
|
||||
|
||||
#include <ie_common.h>
|
||||
#include <ie_profiling.hpp>
|
||||
#include <cpp_interfaces/impl/ie_infer_request_internal.hpp>
|
||||
#include <cpp_interfaces/impl/ie_executable_network_internal.hpp>
|
||||
#include <threading/ie_itask_executor.hpp>
|
||||
|
||||
#include "template_config.hpp"
|
||||
|
||||
namespace TemplatePlugin {
|
||||
|
||||
class ExecutableNetwork;
|
||||
|
||||
// ! [infer_request:header]
|
||||
class TemplateInferRequest : public InferenceEngine::InferRequestInternal {
|
||||
public:
|
||||
typedef std::shared_ptr<TemplateInferRequest> Ptr;
|
||||
|
||||
TemplateInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
|
||||
const InferenceEngine::OutputsDataMap& networkOutputs,
|
||||
const std::shared_ptr<ExecutableNetwork>& executableNetwork);
|
||||
~TemplateInferRequest() override;
|
||||
|
||||
void InferImpl() override;
|
||||
void GetPerformanceCounts(std::map<std::string, InferenceEngine::InferenceEngineProfileInfo>& perfMap) const override;
|
||||
|
||||
// pipeline methods-stages which are used in async infer request implementation and assigned to particular executor
|
||||
void inferPreprocess();
|
||||
void startPipeline();
|
||||
void waitPipeline();
|
||||
void inferPostprocess();
|
||||
|
||||
std::shared_ptr<ExecutableNetwork> _executableNetwork;
|
||||
|
||||
private:
|
||||
void allocateDeviceBuffers();
|
||||
void allocateInputBlobs();
|
||||
void allocateOutputBlobs();
|
||||
|
||||
enum {
|
||||
Preprocess,
|
||||
Postprocess,
|
||||
StartPipeline,
|
||||
WaitPipeline,
|
||||
numOfStages
|
||||
};
|
||||
|
||||
std::array<InferenceEngine::ProfilingTask, numOfStages> _profilingTask;
|
||||
|
||||
InferenceEngine::BlobMap _inputsNCHW;
|
||||
InferenceEngine::BlobMap _outputsNCHW;
|
||||
|
||||
// for performance counts
|
||||
double _inputPreprocessTime = 0.0;
|
||||
double _inputTransferTime = 0.0;
|
||||
double _executeTime = 0.0;
|
||||
double _outputTransferTime = 0.0;
|
||||
double _outputPostProcessTime = 0.0;
|
||||
};
|
||||
// ! [infer_request:header]
|
||||
|
||||
} // namespace TemplatePlugin
|
||||
193
docs/template_plugin/src/template_plugin.cpp
Normal file
193
docs/template_plugin/src/template_plugin.cpp
Normal file
@@ -0,0 +1,193 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
|
||||
#include <utility>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
#include <regex>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
#include <ie_metric_helpers.hpp>
|
||||
#include <details/ie_cnn_network_tools.h>
|
||||
#include <ie_plugin_config.hpp>
|
||||
#include <ie_util_internal.hpp>
|
||||
#include <inference_engine.hpp>
|
||||
#include <file_utils.h>
|
||||
#include <cpp_interfaces/base/ie_plugin_base.hpp>
|
||||
#include <cpp_interfaces/interface/ie_internal_plugin_config.hpp>
|
||||
#include <threading/ie_executor_manager.hpp>
|
||||
#include <graph_tools.hpp>
|
||||
#include <ie_input_info.hpp>
|
||||
#include <ie_layouts.h>
|
||||
#include <hetero/hetero_plugin_config.hpp>
|
||||
#include <template/template_config.hpp>
|
||||
|
||||
#include "template_plugin.hpp"
|
||||
#include "template_executable_network.hpp"
|
||||
#include "template_infer_request.hpp"
|
||||
|
||||
using namespace TemplatePlugin;
|
||||
|
||||
// ! [plugin:ctor]
|
||||
Plugin::Plugin() {
|
||||
// TODO: fill with actual device name
|
||||
_pluginName = "TEMPLATE";
|
||||
}
|
||||
// ! [plugin:ctor]
|
||||
|
||||
// ! [plugin:load_exe_network_impl]
|
||||
InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork & network,
|
||||
const ConfigMap &config) {
|
||||
auto cfg = Configuration{ config, _cfg };
|
||||
InferenceEngine::InputsDataMap networkInputs;
|
||||
InferenceEngine::OutputsDataMap networkOutputs;
|
||||
|
||||
network.getInputsInfo(networkInputs);
|
||||
network.getOutputsInfo(networkOutputs);
|
||||
|
||||
// TODO: check with precisions supported by Template device
|
||||
|
||||
for (auto networkOutput : networkOutputs) {
|
||||
auto output_precision = networkOutput.second->getPrecision();
|
||||
|
||||
if (output_precision != Precision::FP32 &&
|
||||
output_precision != Precision::FP16) {
|
||||
THROW_IE_EXCEPTION << "Template device supports only FP16 and FP32 output precision.";
|
||||
}
|
||||
}
|
||||
|
||||
for (auto networkInput : networkInputs) {
|
||||
auto input_precision = networkInput.second->getTensorDesc().getPrecision();
|
||||
|
||||
if (input_precision != InferenceEngine::Precision::FP32 &&
|
||||
input_precision != InferenceEngine::Precision::FP16 &&
|
||||
input_precision != InferenceEngine::Precision::I16 &&
|
||||
input_precision != InferenceEngine::Precision::U8) {
|
||||
THROW_IE_EXCEPTION << "Input image format " << input_precision << " is not supported yet.\n"
|
||||
<< "Supported formats are: FP32, FP16, I16 and U8.";
|
||||
}
|
||||
}
|
||||
|
||||
auto clonedNetwork = cloneNet(network);
|
||||
ConstTransformer transformator(clonedNetwork.get());
|
||||
transformator.fullTrim();
|
||||
|
||||
return std::make_shared<ExecutableNetwork>(*clonedNetwork, cfg);
|
||||
}
|
||||
// ! [plugin:load_exe_network_impl]
|
||||
|
||||
// ! [plugin:import_network_impl]
|
||||
InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model, const std::map<std::string, std::string>& config) {
|
||||
// TODO: Import network from stream is not mandatory functionality;
|
||||
// Can just throw an exception and remove the code below
|
||||
Configuration exportedCfg;
|
||||
|
||||
// some code below which reads exportedCfg from `model` stream
|
||||
// ..
|
||||
|
||||
auto cfg = Configuration(config, exportedCfg);
|
||||
|
||||
IExecutableNetwork::Ptr executableNetwork;
|
||||
auto exec_network_impl = std::make_shared<ExecutableNetwork>(model, cfg);
|
||||
executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(exec_network_impl),
|
||||
[](InferenceEngine::details::IRelease *p) {p->Release(); });
|
||||
|
||||
return InferenceEngine::ExecutableNetwork{ executableNetwork };
|
||||
}
|
||||
// ! [plugin:import_network_impl]
|
||||
|
||||
// ! [plugin:query_network]
|
||||
void Plugin::QueryNetwork(const ICNNNetwork &network, const ConfigMap& config, QueryNetworkResult &res) const {
|
||||
Configuration cfg{config, _cfg, false};
|
||||
res.rc = StatusCode::OK;
|
||||
|
||||
if (std::shared_ptr<const ngraph::Function> ngraphFunction = network.getFunction()) {
|
||||
auto ops = ngraphFunction->get_ordered_ops();
|
||||
for (auto&& op : ops) {
|
||||
// TODO: investigate if an op is actually supported by Template device
|
||||
bool supported = true;
|
||||
if (supported) {
|
||||
res.supportedLayersMap.insert({ op->get_friendly_name(), GetName() });
|
||||
}
|
||||
}
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << "TEMPLATE plugin can query only IR v10 networks";
|
||||
}
|
||||
}
|
||||
// ! [plugin:query_network]
|
||||
|
||||
// ! [plugin:add_extension]
|
||||
void Plugin::AddExtension(InferenceEngine::IExtensionPtr /*extension*/) {
|
||||
// TODO: add extensions if plugin supports extensions
|
||||
}
|
||||
// ! [plugin:add_extension]
|
||||
|
||||
// ! [plugin:set_config]
|
||||
void Plugin::SetConfig(const ConfigMap &config) {
|
||||
_cfg = Configuration{config, _cfg};
|
||||
}
|
||||
// ! [plugin:set_config]
|
||||
|
||||
// ! [plugin:get_config]
|
||||
InferenceEngine::Parameter Plugin::GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & /*options*/) const {
|
||||
return _cfg.Get(name);
|
||||
}
|
||||
// ! [plugin:get_config]
|
||||
|
||||
// ! [plugin:get_metric]
|
||||
InferenceEngine::Parameter Plugin::GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & options) const {
|
||||
if (METRIC_KEY(SUPPORTED_METRICS) == name) {
|
||||
std::vector<std::string> supportedMetrics = {
|
||||
METRIC_KEY(AVAILABLE_DEVICES),
|
||||
METRIC_KEY(SUPPORTED_METRICS),
|
||||
METRIC_KEY(SUPPORTED_CONFIG_KEYS),
|
||||
METRIC_KEY(FULL_DEVICE_NAME),
|
||||
METRIC_KEY(OPTIMIZATION_CAPABILITIES),
|
||||
METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS) };
|
||||
IE_SET_METRIC_RETURN(SUPPORTED_METRICS, supportedMetrics);
|
||||
} else if (METRIC_KEY(SUPPORTED_CONFIG_KEYS) == name) {
|
||||
std::vector<std::string> confiKeys = {
|
||||
CONFIG_KEY(DEVICE_ID),
|
||||
CONFIG_KEY(PERF_COUNT) };
|
||||
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, confiKeys);
|
||||
} else if (METRIC_KEY(AVAILABLE_DEVICES) == name) {
|
||||
// TODO: fill list of available devices
|
||||
std::vector<std::string> availableDevices = { "" };
|
||||
IE_SET_METRIC_RETURN(AVAILABLE_DEVICES, availableDevices);
|
||||
} else if (METRIC_KEY(FULL_DEVICE_NAME) == name) {
|
||||
std::string name = "Template Device Full Name";
|
||||
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, name);
|
||||
} else if (METRIC_KEY(OPTIMIZATION_CAPABILITIES) == name) {
|
||||
// TODO: fill actual list of supported capabilities: e.g. Template device supports only FP32
|
||||
std::vector<std::string> capabilities = { METRIC_VALUE(FP32), TEMPLATE_METRIC_VALUE(HARDWARE_CONVOLUTION) };
|
||||
IE_SET_METRIC_RETURN(OPTIMIZATION_CAPABILITIES, capabilities);
|
||||
} else if (METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS) == name) {
|
||||
// TODO: fill with actual values
|
||||
using uint = unsigned int;
|
||||
IE_SET_METRIC_RETURN(RANGE_FOR_ASYNC_INFER_REQUESTS, std::make_tuple(uint{1}, uint{1}, uint{1}));
|
||||
} else {
|
||||
THROW_IE_EXCEPTION << "Unsupported device metric: " << name;
|
||||
}
|
||||
}
|
||||
// ! [plugin:get_metric]
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
// ! [plugin:create_plugin_engine]
|
||||
INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept {
|
||||
try {
|
||||
plugin = make_ie_compatible_plugin({2, 1, CI_BUILD_NUMBER, "templatePlugin"},
|
||||
std::make_shared<Plugin>());
|
||||
return OK;
|
||||
}
|
||||
catch (std::exception &ex) {
|
||||
return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what();
|
||||
}
|
||||
}
|
||||
// ! [plugin:create_plugin_engine]
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
47
docs/template_plugin/src/template_plugin.hpp
Normal file
47
docs/template_plugin/src/template_plugin.hpp
Normal file
@@ -0,0 +1,47 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <inference_engine.hpp>
|
||||
#include <description_buffer.hpp>
|
||||
#include <cpp_interfaces/impl/ie_plugin_internal.hpp>
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include "template_executable_network.hpp"
|
||||
#include "template_config.hpp"
|
||||
|
||||
//! [plugin:header]
|
||||
namespace TemplatePlugin {
|
||||
|
||||
class Plugin : public InferenceEngine::InferencePluginInternal {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<Plugin>;
|
||||
|
||||
Plugin();
|
||||
~Plugin() override = default;
|
||||
|
||||
void SetConfig(const std::map<std::string, std::string> &config) override;
|
||||
void QueryNetwork(const InferenceEngine::ICNNNetwork &network,
|
||||
const std::map<std::string, std::string>& config,
|
||||
InferenceEngine::QueryNetworkResult &res) const override;
|
||||
InferenceEngine::ExecutableNetworkInternal::Ptr
|
||||
LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork &network,
|
||||
const std::map<std::string, std::string> &config) override;
|
||||
void AddExtension(InferenceEngine::IExtensionPtr extension) override;
|
||||
InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & options) const override;
|
||||
InferenceEngine::Parameter GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & options) const override;
|
||||
InferenceEngine::ExecutableNetwork ImportNetworkImpl(std::istream& model, const std::map<std::string, std::string>& config) override;
|
||||
|
||||
private:
|
||||
Configuration _cfg;
|
||||
};
|
||||
|
||||
} // namespace TemplatePlugin
|
||||
//! [plugin:header]
|
||||
18
docs/template_plugin/tests/functional/CMakeLists.txt
Normal file
18
docs/template_plugin/tests/functional/CMakeLists.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright (C) 2019 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME TemplateFuncTests)
|
||||
|
||||
addIeTargetTest(
|
||||
NAME ${TARGET_NAME}
|
||||
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
DEPENDENCIES
|
||||
templatePlugin
|
||||
LINK_LIBRARIES
|
||||
IE::funcSharedTests
|
||||
ADD_CPPLINT
|
||||
LABELS
|
||||
TEMPLATE
|
||||
)
|
||||
@@ -0,0 +1,49 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/config.hpp"
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
IncorrectConfigTests::getTestCaseName);
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
IncorrectConfigAPITests::getTestCaseName);
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigAPITests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
CorrectConfigAPITests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Multi_BehaviorTests, CorrectConfigTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
CorrectConfigAPITests::getTestCaseName);
|
||||
} // namespace
|
||||
@@ -0,0 +1,25 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/exec_graph_info.hpp"
|
||||
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, ExecGraphTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
ExecGraphTests::getTestCaseName);
|
||||
} // namespace
|
||||
@@ -0,0 +1,24 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request.hpp"
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestTests::getTestCaseName);
|
||||
} // namespace
|
||||
@@ -0,0 +1,25 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request_callback.hpp"
|
||||
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CallbackTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
CallbackTests::getTestCaseName);
|
||||
} // namespace
|
||||
@@ -0,0 +1,25 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request_config.hpp"
|
||||
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferConfigTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferConfigTests::getTestCaseName);
|
||||
} // namespace
|
||||
@@ -0,0 +1,28 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/infer_request_input.hpp"
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestInputTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestInputTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
@@ -0,0 +1,28 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/infer_request_output.hpp"
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestOutputTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestOutputTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
@@ -0,0 +1,26 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/set_preprocess.hpp"
|
||||
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values("TEMPLATE"),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
13
docs/template_plugin/tests/functional/skip_tests_config.cpp
Normal file
13
docs/template_plugin/tests/functional/skip_tests_config.cpp
Normal file
@@ -0,0 +1,13 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
std::vector<std::string> disabledTestPatterns() {
|
||||
return {
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
disable_deprecated_warnings()
|
||||
|
||||
set(TARGET_NAME TemplateBehaviorTests)
|
||||
|
||||
file(GLOB_RECURSE TEST_INCLUDE
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
|
||||
|
||||
file(GLOB_RECURSE TEST_SRC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/*.cpp
|
||||
)
|
||||
|
||||
list(APPEND DEPENDENCIES
|
||||
templatePlugin)
|
||||
|
||||
source_group("src" FILES ${TEST_SRC})
|
||||
source_group("include" FILES ${TEST_INCLUDE})
|
||||
|
||||
add_executable(${TARGET_NAME}
|
||||
${TEST_SRC}
|
||||
${TEST_INCLUDE})
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE IE::IEBehaviorSharedTests)
|
||||
|
||||
add_test(NAME ${TARGET_NAME}
|
||||
COMMAND ${TARGET_NAME})
|
||||
|
||||
add_dependencies(${TARGET_NAME} ${DEPENDENCIES})
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
@@ -0,0 +1,19 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "holders_tests.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(ReleaseOrderTests, CPP_HoldersTests, testing::Combine(testing::ValuesIn(std::vector<std::vector<int>> {
|
||||
// 0 - plugin
|
||||
// 1 - executable_network
|
||||
// 2 - infer_request
|
||||
{0, 1, 2},
|
||||
{0, 2, 1},
|
||||
{1, 0, 2},
|
||||
{1, 2, 0},
|
||||
{2, 0, 1},
|
||||
{2, 1, 0},
|
||||
}), testing::Values("TEMPLATE")));
|
||||
@@ -0,0 +1,93 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior_test_plugin_layers.hpp"
|
||||
|
||||
|
||||
conv_test_params deconv_test_cases[] = {
|
||||
conv_test_params("TEMPLATE", conv_case),
|
||||
};
|
||||
|
||||
conv_test_params conv_test_cases[] = {
|
||||
conv_test_params("TEMPLATE", conv_dw_case),
|
||||
};
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, DeconvolutionLayerTest,
|
||||
::testing::ValuesIn(deconv_test_cases),
|
||||
getTestName<conv_test_params>);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ConvolutionLayerTest,
|
||||
::testing::ValuesIn(conv_test_cases),
|
||||
getTestName<conv_test_params>);
|
||||
|
||||
|
||||
pool_test_params roi_pool_test_cases[] = {
|
||||
pool_test_params("TEMPLATE", "FP32", pool_case),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ROIPoolingLayerTest,
|
||||
::testing::ValuesIn(roi_pool_test_cases),
|
||||
getTestName<pool_test_params>);
|
||||
|
||||
activ_test_params activ_test_cases[] = {
|
||||
activ_test_params("TEMPLATE", "FP16", activation_case),
|
||||
};
|
||||
|
||||
activ_test_params clamp_test_cases[] = {
|
||||
activ_test_params("TEMPLATE", "FP16", clamp_case),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ActivationLayerTest,
|
||||
::testing::ValuesIn(activ_test_cases),
|
||||
getTestName<activ_test_params>);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ReLULayerTest,
|
||||
::testing::Values(activ_test_params("TEMPLATE", "FP32", activation_case)),
|
||||
getTestName<activ_test_params>);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ClampLayerTest,
|
||||
::testing::ValuesIn(clamp_test_cases),
|
||||
getTestName<activ_test_params>);
|
||||
|
||||
norm_test_params norm_test_cases[] = {
|
||||
norm_test_params("TEMPLATE", "FP32", norm_case),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, NormalizeLayerTest,
|
||||
::testing::ValuesIn(norm_test_cases),
|
||||
getTestName<norm_test_params>);
|
||||
|
||||
scale_test_params scale_test_cases[] = {
|
||||
scale_test_params("TEMPLATE", "FP32", scale_case),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ScalingLayerTest,
|
||||
::testing::ValuesIn(scale_test_cases),
|
||||
getTestName<scale_test_params>);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ShapingLayerTest,
|
||||
::testing::Values(shaping_test_params("TEMPLATE", "FP32", shape_case)),
|
||||
getTestName<shaping_test_params>);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ElementWiseLayerTest,
|
||||
::testing::Values(element_test_params("TEMPLATE", "FP32", shape_case)),
|
||||
getTestName<element_test_params>);
|
||||
|
||||
object_test_params object_test_cases[] = {
|
||||
object_test_params("TEMPLATE", "FP32", object_case),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, ObjectDetectionLayerTest,
|
||||
::testing::ValuesIn(object_test_cases),
|
||||
getTestName<object_test_params>);
|
||||
|
||||
memory_test_params memory_test_cases[] = {
|
||||
memory_test_params("TEMPLATE", "FP32", memory_case),
|
||||
};
|
||||
|
||||
// FIXME
|
||||
// #if (defined INSTANTIATE_TESTS)
|
||||
// INSTANTIATE_TEST_CASE_P(BehaviorTest, MemoryLayerTest,
|
||||
// ::testing::ValuesIn(memory_test_cases),
|
||||
// getTestName<memory_test_params>);
|
||||
// #endif
|
||||
@@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior_test_plugin_layout.hpp"
|
||||
|
||||
layout_test_params power_test_cases[] = {
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::NCHW, power_params({ { 1, 3, 16, 16 } }, 1, 2, 2)),
|
||||
};
|
||||
|
||||
layout_test_params conv_test_cases_1[] = {
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::NCHW, power_params({ { 1, 3, 16, 16 } }, 1, 2, 2)),
|
||||
};
|
||||
|
||||
layout_test_params power_neg_test_cases[] = {
|
||||
// Graph Error Description: Error: Tensor size should not be 0.
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::NC, power_params({ { 1, 3 } }, 1, 2, 2)),
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::CHW, power_params({ { 3, 32, 16 } }, 1, 2, 2)),
|
||||
};
|
||||
|
||||
layout_test_params conv_neg_test_cases[] = {
|
||||
// LoadNetwork hangs if Network has 1 dims format: CVS-8508
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::C, power_params({ { 3 } }, 1, 2, 2)),
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::NC, power_params({ { 1, 3 } }, 1, 2, 2)),
|
||||
layout_test_params("TEMPLATE", "FP16", Layout::CHW, power_params({ { 3, 32, 16 } }, 1, 2, 2)),
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, LayoutTestCanLoadPower,
|
||||
::testing::ValuesIn(power_test_cases), getTestName);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, LayoutTestCanLoadConv,
|
||||
::testing::ValuesIn(conv_test_cases_1), getTestName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, LayoutTestCanNotLoadPower,
|
||||
::testing::ValuesIn(power_neg_test_cases), getTestName);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, LayoutTestCanNotLoadConv,
|
||||
::testing::ValuesIn(conv_neg_test_cases), getTestName);
|
||||
@@ -0,0 +1,14 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior_test_plugin_unsupported.hpp"
|
||||
#include "template_test_data.hpp"
|
||||
|
||||
// INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestAllUnsupported, ValuesIn(allUnSupportedValues),
|
||||
// getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestTypeUnsupported, ValuesIn(typeUnSupportedValues),
|
||||
getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestBatchUnsupported, ValuesIn(batchUnSupportedValues),
|
||||
getTestCaseName);
|
||||
@@ -0,0 +1,8 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior_test_plugin_version.hpp"
|
||||
#include "template_test_data.hpp"
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestVersion, ValuesIn(add_element_into_array(supportedValues, BEH_HETERO)), getTestCaseName);
|
||||
@@ -0,0 +1,12 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior_test_plugin.h"
|
||||
#include "behavior_test_plugins.hpp"
|
||||
#include "template_test_data.hpp"
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestInput, ValuesIn(allInputSupportedValues),
|
||||
getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(BehaviorTest, BehaviorPluginTestOutput, ValuesIn(allOutputSupportedValues),
|
||||
getOutputTestCaseName);
|
||||
@@ -0,0 +1,71 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior_test_plugin.h"
|
||||
|
||||
// correct params
|
||||
#define BEH_HETERO BehTestParams("HETERO", \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelFP32.model_xml_str, \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelFP32.weights_blob, \
|
||||
Precision::FP32)
|
||||
|
||||
#define BEH_TEMPLATE BehTestParams("TEMPLATE", \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelFP16.model_xml_str, \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelFP16.weights_blob, \
|
||||
Precision::FP32)
|
||||
|
||||
// all parameters are unsupported - reversed
|
||||
#define BEH_US_ALL_TEMPLATE BehTestParams("TEMPLATE", \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelQ78.model_xml_str, \
|
||||
FuncTestUtils::TestModel::convReluNormPoolFcModelQ78.weights_blob, \
|
||||
Precision::Q78)
|
||||
|
||||
const BehTestParams supportedValues[] = {
|
||||
BEH_TEMPLATE,
|
||||
};
|
||||
|
||||
const BehTestParams requestsSupportedValues[] = {
|
||||
BEH_TEMPLATE,
|
||||
};
|
||||
|
||||
const BehTestParams allInputSupportedValues[] = {
|
||||
BEH_TEMPLATE,
|
||||
BEH_TEMPLATE.withIn(Precision::FP16),
|
||||
BEH_TEMPLATE.withIn(Precision::U8),
|
||||
BEH_TEMPLATE.withIn(Precision::I16),
|
||||
};
|
||||
|
||||
const BehTestParams allOutputSupportedValues[] = {
|
||||
BEH_TEMPLATE,
|
||||
BEH_TEMPLATE.withOut(Precision::FP16),
|
||||
};
|
||||
|
||||
const BehTestParams typeUnSupportedValues[] = {
|
||||
BEH_TEMPLATE.withIn(Precision::Q78),
|
||||
BEH_TEMPLATE.withIn(Precision::U16),
|
||||
BEH_TEMPLATE.withIn(Precision::I8),
|
||||
BEH_TEMPLATE.withIn(Precision::I32),
|
||||
};
|
||||
|
||||
const BehTestParams batchUnSupportedValues[] = {
|
||||
BEH_TEMPLATE.withBatchSize(0),
|
||||
};
|
||||
|
||||
const BehTestParams allUnSupportedValues[] = {
|
||||
BEH_US_ALL_TEMPLATE,
|
||||
};
|
||||
|
||||
const std::vector<BehTestParams> withCorrectConfValuesNetworkOnly = {
|
||||
BEH_TEMPLATE.withConfig({ { KEY_DEVICE_ID, "0" } }),
|
||||
};
|
||||
|
||||
const BehTestParams withIncorrectConfValues[] = {
|
||||
BEH_TEMPLATE.withConfig({ { KEY_CPU_BIND_THREAD, "ON" } }),
|
||||
};
|
||||
|
||||
const std::vector<BehTestParams> withCorrectConfValues = {
|
||||
BEH_TEMPLATE.withConfig({}),
|
||||
};
|
||||
@@ -0,0 +1,13 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
std::vector<std::string> disabledTestPatterns() {
|
||||
return {
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2019 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
disable_deprecated_warnings()
|
||||
|
||||
# [cmake:functional_tests]
|
||||
set(TARGET_NAME TemplateFunctionalTests)
|
||||
|
||||
file(GLOB_RECURSE TEST_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
|
||||
|
||||
add_executable(${TARGET_NAME} ${TEST_SOURCES})
|
||||
|
||||
# link a library with common Inference Engine tests
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE IE::IESharedTests)
|
||||
|
||||
# make sure plugin is built before tests are run
|
||||
add_dependencies(${TARGET_NAME} templatePlugin)
|
||||
# [cmake:functional_tests]
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
@@ -0,0 +1,205 @@
|
||||
// Copyright (C) 2018-2019 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <utility>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "ie_class.hpp"
|
||||
|
||||
//
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassBasicTestP, IEClassBasicTestP,
|
||||
::testing::Values(std::make_pair("templatePlugin", "TEMPLATE")));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassNetworkTestP, IEClassNetworkTestP,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
//
|
||||
// IE Class GetMetric
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_AVAILABLE_DEVICES,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_FULL_DEVICE_NAME,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetMetricTest, IEClassGetMetricTest_ThrowUnsupported,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetConfigTest, IEClassGetConfigTest_ThrowUnsupported,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetAvailableDevices, IEClassGetAvailableDevices,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
|
||||
//
|
||||
// IE Class SetConfig
|
||||
//
|
||||
|
||||
using IEClassSetConfigTestHETERO = IEClassNetworkTest;
|
||||
TEST_F(IEClassSetConfigTestHETERO, nightly_SetConfigNoThrow) {
|
||||
{
|
||||
Core ie;
|
||||
Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(ie.SetConfig({ { HETERO_CONFIG_KEY(DUMP_GRAPH_DOT), CONFIG_VALUE(YES) } }, "HETERO"));
|
||||
ASSERT_NO_THROW(p = ie.GetConfig("HETERO", HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)));
|
||||
bool dump = p.as<bool>();
|
||||
|
||||
ASSERT_TRUE(dump);
|
||||
}
|
||||
|
||||
{
|
||||
Core ie;
|
||||
Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(ie.SetConfig({ { HETERO_CONFIG_KEY(DUMP_GRAPH_DOT), CONFIG_VALUE(NO) } }, "HETERO"));
|
||||
ASSERT_NO_THROW(p = ie.GetConfig("HETERO", HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)));
|
||||
bool dump = p.as<bool>();
|
||||
|
||||
ASSERT_FALSE(dump);
|
||||
}
|
||||
|
||||
{
|
||||
Core ie;
|
||||
Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(ie.GetMetric("HETERO", METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(ie.SetConfig({ { HETERO_CONFIG_KEY(DUMP_GRAPH_DOT), CONFIG_VALUE(YES) } }, "HETERO"));
|
||||
ASSERT_NO_THROW(p = ie.GetConfig("HETERO", HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)));
|
||||
bool dump = p.as<bool>();
|
||||
|
||||
ASSERT_TRUE(dump);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// IE Class GetConfig
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassGetConfigTest, IEClassGetConfigTest,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
using IEClassGetConfigTestTEMPLATE = IEClassNetworkTest;
|
||||
TEST_F(IEClassGetConfigTestTEMPLATE, nightly_GetConfigNoThrow) {
|
||||
Core ie;
|
||||
Parameter p;
|
||||
std::string deviceName = "TEMPLATE";
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto && confKey : configValues) {
|
||||
if (CONFIG_KEY(DEVICE_ID) == confKey) {
|
||||
std::string defaultDeviceID = ie.GetConfig(deviceName, CONFIG_KEY(DEVICE_ID));
|
||||
std::cout << CONFIG_KEY(DEVICE_ID) << " : " << defaultDeviceID << std::endl;
|
||||
} else if (CONFIG_KEY(PERF_COUNT) == confKey) {
|
||||
bool defaultPerfCount = ie.GetConfig(deviceName, CONFIG_KEY(PERF_COUNT));
|
||||
std::cout << CONFIG_KEY(PERF_COUNT) << " : " << defaultPerfCount << std::endl;
|
||||
} else if (CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS) == confKey) {
|
||||
bool defaultExclusive = ie.GetConfig(deviceName, CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS));
|
||||
std::cout << CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS) << " : " << defaultExclusive << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Executable Network GetMetric
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values("TEMPLATE", "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values("TEMPLATE", "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values("TEMPLATE", "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS,
|
||||
::testing::Values("TEMPLATE", "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, IEClassExecutableNetworkGetMetricTest,
|
||||
::testing::Values("TEMPLATE", "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
//
|
||||
// Executable Network GetConfig / SetConfig
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkGetConfigTest, IEClassExecutableNetworkGetConfigTest,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassExecutableNetworkSetConfigTest, IEClassExecutableNetworkSetConfigTest,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
// IE Class Query network
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassQueryNetworkTest, IEClassQueryNetworkTest,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
// IE Class Load network
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassLoadNetworkTest, IEClassLoadNetworkTest,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
//
|
||||
// Hetero Executable Network GetMetric
|
||||
//
|
||||
|
||||
#ifdef ENABLE_MKL_DNN
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
nightly_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK,
|
||||
::testing::Values("TEMPLATE"));
|
||||
|
||||
#endif // ENABLE_MKL_DNN
|
||||
@@ -0,0 +1,17 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "test_model_repo.hpp"
|
||||
|
||||
std::string get_model_repo() {
|
||||
return ":";
|
||||
}
|
||||
|
||||
const char* TestDataHelpers::getModelPathNonFatal() noexcept {
|
||||
return TestDataHelpers::getModelPathNonFatalDefault();
|
||||
}
|
||||
|
||||
std::string TestDataHelpers::get_data_path() {
|
||||
return TestDataHelpers::get_data_path_default();
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
# Get Started with OpenVINO™ Deep Learning Deployment Toolkit (DLDT) on Linux*
|
||||
# Get Started with OpenVINO™ Toolkit on Linux*
|
||||
|
||||
This guide provides you with the information that will help you to start using
|
||||
the DLDT on Linux\*. With this guide, you will learn how to:
|
||||
the OpenVINO™ Toolkit on Linux\*. With this guide, you will learn how to:
|
||||
|
||||
1. [Configure the Model Optimizer](#configure-the-model-optimizer)
|
||||
2. [Prepare a model for sample inference](#prepare-a-model-for-sample-inference)
|
||||
@@ -10,13 +10,13 @@ the DLDT on Linux\*. With this guide, you will learn how to:
|
||||
3. [Run the Image Classification Sample Application with the model](#run-the-image-classification-sample-application)
|
||||
|
||||
## Prerequisites
|
||||
1. This guide assumes that you have already cloned the `dldt` repo and
|
||||
1. This guide assumes that you have already cloned the `openvino` repo and
|
||||
successfully built the Inference Engine and Samples using the
|
||||
[build instructions](inference-engine/README.md).
|
||||
2. The original structure of the repository directories remains unchanged.
|
||||
|
||||
> **NOTE**: Below, the directory to which the `dldt` repository is cloned is
|
||||
referred to as `<DLDT_DIR>`.
|
||||
> **NOTE**: Below, the directory to which the `openvino` repository is cloned is
|
||||
referred to as `<OPENVINO_DIR>`.
|
||||
|
||||
## Configure the Model Optimizer
|
||||
|
||||
@@ -53,7 +53,7 @@ If you see error messages, check for any missing dependencies.
|
||||
|
||||
1. Go to the Model Optimizer prerequisites directory:
|
||||
```sh
|
||||
cd <DLDT_DIR>/model_optimizer/install_prerequisites
|
||||
cd <OPENVINO_DIR>/model_optimizer/install_prerequisites
|
||||
```
|
||||
2. Run the script to configure the Model Optimizer for Caffe,
|
||||
TensorFlow, MXNet, Kaldi\*, and ONNX:
|
||||
@@ -68,7 +68,7 @@ Configure individual frameworks separately **ONLY** if you did not select
|
||||
|
||||
1. Go to the Model Optimizer prerequisites directory:
|
||||
```sh
|
||||
cd <DLDT_DIR>/model_optimizer/install_prerequisites
|
||||
cd <OPENVINO_DIR>/model_optimizer/install_prerequisites
|
||||
```
|
||||
2. Run the script for your model framework. You can run more than one script:
|
||||
|
||||
@@ -162,20 +162,20 @@ as `<models_dir>` below) with the Model Downloader:
|
||||
|
||||
**For CPU (FP32):**
|
||||
```sh
|
||||
python3 <DLDT_DIR>/model_optimizer/mo.py --input_model <models_dir>/classification/squeezenet/1.1/caffe/squeezenet1.1.caffemodel --data_type FP32 --output_dir <ir_dir>
|
||||
python3 <OPENVINO_DIR>/model_optimizer/mo.py --input_model <models_dir>/classification/squeezenet/1.1/caffe/squeezenet1.1.caffemodel --data_type FP32 --output_dir <ir_dir>
|
||||
```
|
||||
|
||||
**For GPU and MYRIAD (FP16):**
|
||||
```sh
|
||||
python3 <DLDT_DIR>/model_optimizer/mo.py --input_model <models_dir>/classification/squeezenet/1.1/caffe/squeezenet1.1.caffemodel --data_type FP16 --output_dir <ir_dir>
|
||||
python3 <OPENVINO_DIR>/model_optimizer/mo.py --input_model <models_dir>/classification/squeezenet/1.1/caffe/squeezenet1.1.caffemodel --data_type FP16 --output_dir <ir_dir>
|
||||
```
|
||||
After the Model Optimizer script is completed, the produced IR files (`squeezenet1.1.xml`, `squeezenet1.1.bin`) are in the specified `<ir_dir>` directory.
|
||||
|
||||
3. Copy the `squeezenet1.1.labels` file from the `<DLDT_DIR>/inference-engine/samples/sample_data/`
|
||||
3. Copy the `squeezenet1.1.labels` file from the `<OPENVINO_DIR>/scripts/demo/`
|
||||
folder to the model IR directory. This file contains the classes that ImageNet
|
||||
uses so that the inference results show text instead of classification numbers:
|
||||
```sh
|
||||
cp <DLDT_DIR>/inference-engine/samples/sample_data/squeezenet1.1.labels <ir_dir>
|
||||
cp <OPENVINO_DIR>/scripts/demo/squeezenet1.1.labels <ir_dir>
|
||||
```
|
||||
|
||||
Now you are ready to run the Image Classification Sample Application.
|
||||
@@ -184,28 +184,28 @@ Now you are ready to run the Image Classification Sample Application.
|
||||
|
||||
The Inference Engine sample applications are automatically compiled when you
|
||||
built the Inference Engine using the [build instructions](inference-engine/README.md).
|
||||
The binary files are located in the `<DLDT_DIR>/inference-engine/bin/intel64/Release`
|
||||
The binary files are located in the `<OPENVINO_DIR>/inference-engine/bin/intel64/Release`
|
||||
directory.
|
||||
|
||||
To run the Image Classification sample application with an input image on the prepared IR:
|
||||
|
||||
1. Go to the samples build directory:
|
||||
```sh
|
||||
cd <DLDT_DIR>/inference-engine/bin/intel64/Release
|
||||
cd <OPENVINO_DIR>/inference-engine/bin/intel64/Release
|
||||
|
||||
2. Run the sample executable with specifying the `car.png` file from the
|
||||
`<DLDT_DIR>/inference-engine/samples/sample_data/` directory as an input
|
||||
`<OPENVINO_DIR>/scripts/demo/` directory as an input
|
||||
image, the IR of your model and a plugin for a hardware device to perform
|
||||
inference on:
|
||||
|
||||
**For CPU:**
|
||||
```sh
|
||||
./classification_sample -i <DLDT_DIR>/inference-engine/samples/sample_data/car.png -m <ir_dir>/squeezenet1.1.xml -d CPU
|
||||
./classification_sample -i <OPENVINO_DIR>/scripts/demo/car.png -m <ir_dir>/squeezenet1.1.xml -d CPU
|
||||
```
|
||||
|
||||
**For GPU:**
|
||||
```sh
|
||||
./classification_sample -i <DLDT_DIR>/inference-engine/samples/sample_data/car.png -m <ir_dir>/squeezenet1.1.xml -d GPU
|
||||
./classification_sample -i <OPENVINO_DIR>/scripts/demo/car.png -m <ir_dir>/squeezenet1.1.xml -d GPU
|
||||
```
|
||||
|
||||
**For MYRIAD:**
|
||||
@@ -214,14 +214,14 @@ To run the Image Classification sample application with an input image on the pr
|
||||
Stick or Intel® Neural Compute Stick 2) with the MYRIAD plugin requires
|
||||
performing [additional hardware configuration steps](inference-engine/README.md#optional-additional-installation-steps-for-the-intel-movidius-neural-compute-stick-and-neural-compute-stick-2).
|
||||
```sh
|
||||
./classification_sample -i <DLDT_DIR>/inference-engine/samples/sample_data/car.png -m <ir_dir>/squeezenet1.1.xml -d MYRIAD
|
||||
./classification_sample -i <OPENVINO_DIR>/scripts/demo/car.png -m <ir_dir>/squeezenet1.1.xml -d MYRIAD
|
||||
```
|
||||
|
||||
When the Sample Application completes, you will have the label and confidence for the top-10 categories printed on the screen. Below is a sample output with inference results on CPU:
|
||||
```sh
|
||||
Top 10 results:
|
||||
|
||||
Image /home/user/dldt/inference-engine/samples/sample_data/car.png
|
||||
Image /home/user/openvino/scripts/demo/car.png
|
||||
|
||||
classid probability label
|
||||
------- ----------- -----
|
||||
|
||||
@@ -25,12 +25,6 @@ if (ENABLE_FUZZING)
|
||||
enable_fuzzing()
|
||||
endif()
|
||||
|
||||
find_package(ngraph QUIET)
|
||||
if(NOT ngraph_FOUND)
|
||||
set(ngraph_DIR ${CMAKE_BINARY_DIR}/ngraph)
|
||||
endif()
|
||||
find_package(ngraph REQUIRED)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
unset(IEDeveloperPackageTargets CACHE)
|
||||
@@ -60,7 +54,7 @@ function(ie_developer_export)
|
||||
APPEND FILE "${CMAKE_BINARY_DIR}/targets_developer.cmake")
|
||||
|
||||
# Custom target to build only Inference Engine Developer Package targets
|
||||
add_custom_target(ie_dev_targets ALL DEPENDS ${IEDeveloperPackageTargets})
|
||||
add_custom_target(ie_dev_targets ALL DEPENDS ${IEDeveloperPackageTargets} gflags)
|
||||
endfunction()
|
||||
|
||||
add_subdirectory(thirdparty)
|
||||
@@ -74,9 +68,24 @@ endif()
|
||||
|
||||
add_subdirectory(tools)
|
||||
|
||||
function(ie_build_samples)
|
||||
# samples should be build with the same flags as from OpenVINO package,
|
||||
# so unset all flags
|
||||
foreach(var CMAKE_CXX_FLAGS CMAKE_C_FLAGS CMAKE_CXX_STANDARD
|
||||
CMAKE_EXE_LINKER_FLAGS CMAKE_POLICY_DEFAULT_CMP0063
|
||||
CMAKE_CXX_VISIBILITY_PRESET CMAKE_C_VISIBILITY_PRESET
|
||||
CMAKE_VISIBILITY_INLINES_HIDDEN CMAKE_POSITION_INDEPENDENT_CODE
|
||||
THREADS_PREFER_PTHREAD_FLAG X86_64 X86 ARM AARCH64 LINUX
|
||||
MINGW64 CMAKE_BUILD_TYPE CMAKE_MACOSX_RPATH)
|
||||
unset(${var})
|
||||
endforeach()
|
||||
include(sanitizer)
|
||||
add_subdirectory(samples)
|
||||
endfunction()
|
||||
|
||||
# gflags and format_reader targets are kept inside of samples directory and
|
||||
# they must be built even if samples build is disabled (required for tests and tools).
|
||||
add_subdirectory(samples)
|
||||
ie_build_samples()
|
||||
|
||||
file(GLOB_RECURSE SAMPLES_SOURCES samples/*.cpp samples/*.hpp samples/*.h)
|
||||
add_cpplint_target(sample_cpplint
|
||||
@@ -109,7 +118,7 @@ if(UNIX)
|
||||
PATTERN *.bat EXCLUDE
|
||||
PATTERN speech_libs_and_demos EXCLUDE)
|
||||
elseif(WIN32)
|
||||
install(DIRECTORY samples
|
||||
install(DIRECTORY samples/
|
||||
DESTINATION ${IE_CPACK_IE_DIR}/samples/cpp
|
||||
COMPONENT cpp_samples
|
||||
USE_SOURCE_PERMISSIONS
|
||||
@@ -150,6 +159,17 @@ if(ENABLE_PYTHON)
|
||||
COMPONENT python_samples)
|
||||
endif()
|
||||
|
||||
# install speech demo files
|
||||
|
||||
if(SPEECH_LIBS_AND_DEMOS)
|
||||
ie_cpack_add_component(speech_demo_files REQUIRED)
|
||||
|
||||
install(DIRECTORY ${TEMP}/deployment_tools
|
||||
${TEMP}/data_processing
|
||||
DESTINATION .
|
||||
COMPONENT speech_demo_files)
|
||||
endif()
|
||||
|
||||
#
|
||||
# Developer package
|
||||
#
|
||||
@@ -173,7 +193,7 @@ configure_file(
|
||||
# Coverage
|
||||
#
|
||||
|
||||
if(COVERAGE)
|
||||
if(ENABLE_COVERAGE)
|
||||
include(coverage_ie)
|
||||
endif()
|
||||
|
||||
@@ -197,6 +217,7 @@ function(register_extra_plugins)
|
||||
|
||||
# automatically import plugins from the 'plugins' folder
|
||||
file(GLOB local_extra_plugins "plugins/*")
|
||||
list(APPEND local_extra_plugins "${OpenVINO_MAIN_SOURCE_DIR}/docs/template_plugin")
|
||||
|
||||
foreach(plugin_path IN LISTS IE_EXTRA_PLUGINS local_extra_plugins)
|
||||
get_filename_component(plugin_dir "${plugin_path}" NAME)
|
||||
|
||||
@@ -25,9 +25,7 @@ endif()
|
||||
if(DEFINED INTEL_VTUNE_DIR)
|
||||
message(STATUS "INTEL_VTUNE_DIR = ${INTEL_VTUNE_DIR}")
|
||||
|
||||
find_path(ITT_INCLUDE_DIR
|
||||
FILES
|
||||
ittnotify.h
|
||||
find_path(ITT_INCLUDE_DIR ittnotify.h
|
||||
PATHS "${INTEL_VTUNE_DIR}/include/")
|
||||
|
||||
find_library(ITT_LIB
|
||||
|
||||
@@ -118,7 +118,6 @@ function(addIeTarget)
|
||||
if (ARG_ADD_CPPLINT)
|
||||
# code style
|
||||
add_cpplint_target(${ARG_NAME}_cpplint FOR_TARGETS ${ARG_NAME})
|
||||
add_clang_format_target(${ARG_NAME}_clang_format FOR_TARGETS ${ARG_NAME})
|
||||
endif()
|
||||
if (ARG_DEVELOPER_PACKAGE)
|
||||
# developer package
|
||||
|
||||
@@ -22,6 +22,8 @@ endif()
|
||||
if(ENABLE_CLANG_FORMAT)
|
||||
add_custom_target(clang_format_check_all)
|
||||
add_custom_target(clang_format_fix_all)
|
||||
set_target_properties(clang_format_check_all clang_format_fix_all
|
||||
PROPERTIES FOLDER clang_format)
|
||||
set(CLANG_FORMAT_ALL_OUTPUT_FILES "" CACHE INTERNAL "All clang-format output files")
|
||||
endif()
|
||||
|
||||
@@ -35,10 +37,6 @@ function(add_clang_format_target TARGET_NAME)
|
||||
set(multiValueArgs "FOR_TARGETS" "FOR_SOURCES" "EXCLUDE_PATTERNS")
|
||||
cmake_parse_arguments(CLANG_FORMAT "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if(CLANG_FORMAT_ALL)
|
||||
set(all ALL)
|
||||
endif()
|
||||
|
||||
foreach(target IN LISTS CLANG_FORMAT_FOR_TARGETS)
|
||||
get_target_property(target_sources "${target}" SOURCES)
|
||||
list(APPEND CLANG_FORMAT_FOR_SOURCES ${target_sources})
|
||||
@@ -95,7 +93,6 @@ function(add_clang_format_target TARGET_NAME)
|
||||
"All clang-format output files")
|
||||
|
||||
add_custom_target(${TARGET_NAME}
|
||||
${all}
|
||||
DEPENDS ${all_output_files}
|
||||
COMMENT "[clang-format] ${TARGET_NAME}")
|
||||
|
||||
@@ -113,6 +110,9 @@ function(add_clang_format_target TARGET_NAME)
|
||||
"[clang-format] ${TARGET_NAME}_fix"
|
||||
VERBATIM)
|
||||
|
||||
set_target_properties(${TARGET_NAME} ${TARGET_NAME}_fix
|
||||
PROPERTIES FOLDER clang_format)
|
||||
|
||||
# if(CLANG_FORMAT_FOR_TARGETS)
|
||||
# foreach(target IN LISTS CLANG_FORMAT_FOR_TARGETS)
|
||||
# add_dependencies(${target} ${TARGET_NAME})
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
if(DEFINED IE_MAIN_SOURCE_DIR AND TARGET inference_engine)
|
||||
set(InferenceEngine_LIBRARIES inference_engine_legacy inference_engine
|
||||
inference_engine_c_api inference_engine_nn_builder)
|
||||
inference_engine_c_api)
|
||||
else()
|
||||
include("${CMAKE_CURRENT_LIST_DIR}/targets.cmake")
|
||||
if(NOT WIN32)
|
||||
@@ -30,5 +30,5 @@ else()
|
||||
|
||||
get_target_property(InferenceEngine_INCLUDE_DIRS IE::inference_engine INTERFACE_INCLUDE_DIRECTORIES)
|
||||
set(InferenceEngine_LIBRARIES IE::inference_engine_legacy IE::inference_engine
|
||||
IE::inference_engine_c_api IE::inference_engine_nn_builder)
|
||||
IE::inference_engine_c_api)
|
||||
endif()
|
||||
|
||||
@@ -13,14 +13,17 @@ ie_coverage_capture(INFO_FILE "dldt"
|
||||
|
||||
# Generate reports
|
||||
|
||||
ie_coverage_extract(INPUT "dldt" OUTPUT "inference_engine_with_builders"
|
||||
ie_coverage_extract(INPUT "dldt" OUTPUT "inference_engine"
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/inference_engine/*"
|
||||
"${DLDT_COVERAGE_BASE_DIRECTORY}/plugin_api/*")
|
||||
ie_coverage_remove(INPUT "inference_engine_with_builders" OUTPUT "inference_engine"
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/inference_engine/builders/*")
|
||||
ie_coverage_genhtml(INFO_FILE "inference_engine"
|
||||
PREFIX "${DLDT_COVERAGE_BASE_DIRECTORY}")
|
||||
|
||||
ie_coverage_extract(INPUT "dldt" OUTPUT "inference_engine_ir_reader"
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/readers/*")
|
||||
ie_coverage_genhtml(INFO_FILE "inference_engine_ir_reader"
|
||||
PREFIX "${DLDT_COVERAGE_BASE_DIRECTORY}")
|
||||
|
||||
ie_coverage_extract(INPUT "dldt" OUTPUT "inference_engine_legacy"
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/legacy_api/*")
|
||||
ie_coverage_genhtml(INFO_FILE "inference_engine_legacy"
|
||||
|
||||
@@ -3,16 +3,17 @@
|
||||
#
|
||||
|
||||
if(ENABLE_CPPLINT)
|
||||
find_host_package(PythonInterp)
|
||||
find_package(Python3 COMPONENTS Interpreter)
|
||||
|
||||
if(NOT PYTHONINTERP_FOUND)
|
||||
message(WARNING "Python interpreter was not found (required for cpplint check)")
|
||||
if(NOT Python3_Interpreter_FOUND)
|
||||
message(WARNING "Python3 interpreter was not found (required for cpplint check)")
|
||||
set(ENABLE_CPPLINT OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(ENABLE_CPPLINT)
|
||||
add_custom_target(cpplint_all ALL)
|
||||
set_target_properties(cpplint_all PROPERTIES FOLDER cpplint)
|
||||
set(CPPLINT_ALL_OUTPUT_FILES "" CACHE INTERNAL "All cpplint output files")
|
||||
endif()
|
||||
|
||||
@@ -93,6 +94,7 @@ function(add_cpplint_target TARGET_NAME)
|
||||
add_custom_target(${TARGET_NAME} ALL
|
||||
DEPENDS ${all_output_files}
|
||||
COMMENT "[cpplint] ${TARGET_NAME}")
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER cpplint)
|
||||
|
||||
if(CPPLINT_FOR_TARGETS)
|
||||
foreach(target IN LISTS CPPLINT_FOR_TARGETS)
|
||||
@@ -168,4 +170,5 @@ function(add_cpplint_report_target)
|
||||
add_custom_target(cpplint_report
|
||||
DEPENDS "${html_output_file}"
|
||||
COMMENT "[cpplint] Generate report")
|
||||
set_target_properties(cpplint_report PROPERTIES FOLDER cpplint)
|
||||
endfunction()
|
||||
|
||||
@@ -47,7 +47,7 @@ file(WRITE "${OUTPUT_FILE}" "${formatted_output}")
|
||||
|
||||
if(NOT SKIP_RETURN_CODE)
|
||||
# Pass through the cpplint return code
|
||||
if(NOT result EQUAL 0)
|
||||
if(NOT result EQUAL "0")
|
||||
# Display the cpplint output to console (to parse it form IDE)
|
||||
message("${output}")
|
||||
message(FATAL_ERROR "[cpplint] Code style check failed for : ${INPUT_FILE}")
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
cmake_policy(SET CMP0054 NEW)
|
||||
|
||||
include(models)
|
||||
|
||||
#we have number of dependencies stored on ftp
|
||||
include(dependency_solver)
|
||||
|
||||
@@ -13,6 +15,23 @@ endif()
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
if (ENABLE_SAME_BRANCH_FOR_MODELS)
|
||||
branchName(MODELS_BRANCH)
|
||||
else()
|
||||
set(MODELS_BRANCH "master")
|
||||
endif()
|
||||
|
||||
|
||||
if (ENABLE_DATA)
|
||||
add_models_repo(${ENABLE_DATA} "data:https://github.com/openvinotoolkit/testdata.git")
|
||||
set(MODELS_PATH "${TEMP}/models/src/data")
|
||||
set(DATA_PATH "${MODELS_PATH}")
|
||||
endif()
|
||||
|
||||
message(STATUS "MODELS_PATH=" ${MODELS_PATH})
|
||||
|
||||
fetch_models_and_validation_set()
|
||||
|
||||
include(linux_name)
|
||||
if(COMMAND get_linux_name)
|
||||
get_linux_name(LINUX_OS_NAME)
|
||||
@@ -25,25 +44,58 @@ if (ENABLE_MYRIAD)
|
||||
endif()
|
||||
|
||||
## enable cblas_gemm from OpenBLAS package
|
||||
if (GEMM STREQUAL "OPENBLAS")
|
||||
if (ENABLE_MKL_DNN AND GEMM STREQUAL "OPENBLAS")
|
||||
if(AARCH64)
|
||||
if(DEFINED ENV{THIRDPARTY_SERVER_PATH})
|
||||
set(IE_PATH_TO_DEPS "$ENV{THIRDPARTY_SERVER_PATH}")
|
||||
elseif(DEFINED THIRDPARTY_SERVER_PATH)
|
||||
set(IE_PATH_TO_DEPS "${THIRDPARTY_SERVER_PATH}")
|
||||
else()
|
||||
message(WARNING "OpenBLAS is not found!")
|
||||
endif()
|
||||
|
||||
if(DEFINED IE_PATH_TO_DEPS)
|
||||
reset_deps_cache(OpenBLAS_DIR)
|
||||
|
||||
RESOLVE_DEPENDENCY(OpenBLAS
|
||||
ARCHIVE_LIN "keembay/openblas_0.3.7_yocto_kmb.tar.xz"
|
||||
TARGET_PATH "${TEMP}/openblas_0.3.7_yocto_kmb"
|
||||
ENVIRONMENT "OpenBLAS_DIR")
|
||||
|
||||
update_deps_cache(OpenBLAS_DIR "${OpenBLAS}/lib/cmake/openblas" "Path to OpenBLAS package folder")
|
||||
|
||||
find_package(OpenBLAS QUIET)
|
||||
|
||||
if(OpenBLAS_FOUND)
|
||||
set(BLAS_FOUND TRUE)
|
||||
set(BLAS_INCLUDE_DIRS ${OpenBLAS_INCLUDE_DIRS})
|
||||
set(BLAS_LIBRARIES ${OpenBLAS_LIBRARIES})
|
||||
endif()
|
||||
|
||||
unset(IE_PATH_TO_DEPS)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT BLAS_LIBRARIES OR NOT BLAS_INCLUDE_DIRS)
|
||||
find_package(BLAS REQUIRED)
|
||||
|
||||
if(BLAS_FOUND)
|
||||
find_path(BLAS_INCLUDE_DIRS cblas.h)
|
||||
else()
|
||||
message(ERROR "OpenBLAS not found: install OpenBLAS or set -DBLAS_INCLUDE_DIRS=<path to dir with cblas.h> and -DBLAS_LIBRARIES=<path to libopenblas.so or openblas.lib>")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
debug_message(STATUS "openblas=" ${BLAS_LIBRARIES})
|
||||
endif ()
|
||||
|
||||
#MKL-ml package
|
||||
## MKL-ML package
|
||||
if (GEMM STREQUAL "MKL")
|
||||
if(NOT MKLROOT)
|
||||
message(FATAL_ERROR "MKLROOT not found: install MKL and set -DMKLROOT=<path_to_MKL>")
|
||||
endif()
|
||||
set(MKL ${MKLROOT})
|
||||
debug_message(STATUS "mkl_ml=" ${MKLROOT})
|
||||
if(NOT MKLROOT)
|
||||
message(FATAL_ERROR "MKLROOT not found: install MKL and set -DMKLROOT=<path_to_MKL>")
|
||||
endif()
|
||||
set(MKL ${MKLROOT})
|
||||
debug_message(STATUS "mkl_ml=" ${MKLROOT})
|
||||
endif ()
|
||||
|
||||
## Intel OMP package
|
||||
@@ -83,24 +135,29 @@ if (THREADING STREQUAL "TBB" OR THREADING STREQUAL "TBB_AUTO")
|
||||
if (WIN32 AND X86_64)
|
||||
#TODO: add target_path to be platform specific as well, to avoid following if
|
||||
RESOLVE_DEPENDENCY(TBB
|
||||
ARCHIVE_WIN "tbb2020_20200214_win.zip"
|
||||
ARCHIVE_WIN "tbb2020_20200415_win.zip"
|
||||
TARGET_PATH "${TEMP}/tbb"
|
||||
ENVIRONMENT "TBBROOT"
|
||||
VERSION_REGEX ".*_([a-z]*_([a-z0-9]+\\.)*[0-9]+).*")
|
||||
elseif(ANDROID) # Should be before LINUX due LINUX is detected as well
|
||||
RESOLVE_DEPENDENCY(TBB
|
||||
ARCHIVE_ANDROID "tbb2020_20191023_android.tgz"
|
||||
ARCHIVE_ANDROID "tbb2020_20200404_android.tgz"
|
||||
TARGET_PATH "${TEMP}/tbb"
|
||||
ENVIRONMENT "TBBROOT"
|
||||
VERSION_REGEX ".*_([a-z]*_([a-z0-9]+\\.)*[0-9]+).*")
|
||||
elseif(LINUX AND X86_64)
|
||||
RESOLVE_DEPENDENCY(TBB
|
||||
ARCHIVE_LIN "tbb2020_20200327_lin_strip.tgz"
|
||||
ARCHIVE_LIN "tbb2020_20200415_lin_strip.tgz"
|
||||
TARGET_PATH "${TEMP}/tbb"
|
||||
ENVIRONMENT "TBBROOT")
|
||||
elseif(LINUX AND AARCH64)
|
||||
RESOLVE_DEPENDENCY(TBB
|
||||
ARCHIVE_LIN "keembay/tbb2020_38404_kmb.tgz"
|
||||
TARGET_PATH "${TEMP}/tbb_yocto"
|
||||
ENVIRONMENT "TBBROOT")
|
||||
elseif(APPLE AND X86_64)
|
||||
RESOLVE_DEPENDENCY(TBB
|
||||
ARCHIVE_MAC "tbb2020_20191023_mac.tgz"
|
||||
ARCHIVE_MAC "tbb2020_20200404_mac.tgz"
|
||||
TARGET_PATH "${TEMP}/tbb"
|
||||
ENVIRONMENT "TBBROOT"
|
||||
VERSION_REGEX ".*_([a-z]*_([a-z0-9]+\\.)*[0-9]+).*")
|
||||
@@ -131,35 +188,64 @@ if (ENABLE_OPENCV)
|
||||
|
||||
set(OPENCV_VERSION "4.3.0")
|
||||
set(OPENCV_BUILD "060")
|
||||
if (WIN32 AND X86_64)
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_WIN "opencv_${OPENCV_VERSION}-${OPENCV_BUILD}.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
elseif(APPLE AND X86_64)
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_MAC "opencv_${OPENCV_VERSION}-${OPENCV_BUILD}_osx.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}_osx/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
elseif(LINUX)
|
||||
if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "armv7l")
|
||||
set(OPENCV_SUFFIX "debian9arm")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "CentOS 7" OR CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9")
|
||||
set(OPENCV_SUFFIX "centos7")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "Ubuntu 16.04")
|
||||
set(OPENCV_SUFFIX "ubuntu16")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "Ubuntu 18.04")
|
||||
set(OPENCV_SUFFIX "ubuntu18")
|
||||
set(OPENCV_BUILD_YOCTO "073")
|
||||
|
||||
if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
|
||||
if(DEFINED ENV{THIRDPARTY_SERVER_PATH})
|
||||
set(IE_PATH_TO_DEPS "$ENV{THIRDPARTY_SERVER_PATH}")
|
||||
elseif(DEFINED THIRDPARTY_SERVER_PATH)
|
||||
set(IE_PATH_TO_DEPS "${THIRDPARTY_SERVER_PATH}")
|
||||
else()
|
||||
message(FATAL_ERROR "OpenCV is not available on current platform")
|
||||
message(WARNING "OpenCV is not found!")
|
||||
endif()
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_LIN "opencv_${OPENCV_VERSION}-${OPENCV_BUILD}_${OPENCV_SUFFIX}.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}_${OPENCV_SUFFIX}/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
|
||||
if(DEFINED IE_PATH_TO_DEPS)
|
||||
set(OPENCV_SUFFIX "yocto_kmb")
|
||||
set(OPENCV_BUILD "${OPENCV_BUILD_YOCTO}")
|
||||
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_LIN "opencv/opencv_${OPENCV_VERSION}-${OPENCV_BUILD}_${OPENCV_SUFFIX}.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}_${OPENCV_SUFFIX}/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
|
||||
unset(IE_PATH_TO_DEPS)
|
||||
endif()
|
||||
else()
|
||||
if (WIN32 AND X86_64)
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_WIN "opencv/opencv_${OPENCV_VERSION}-${OPENCV_BUILD}.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
elseif(APPLE AND X86_64)
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_MAC "opencv/opencv_${OPENCV_VERSION}-${OPENCV_BUILD}_osx.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}_osx/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
elseif(LINUX)
|
||||
if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
|
||||
set(OPENCV_SUFFIX "yocto_kmb")
|
||||
set(OPENCV_BUILD "${OPENCV_BUILD_YOCTO}")
|
||||
elseif (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "armv7l")
|
||||
set(OPENCV_SUFFIX "debian9arm")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "CentOS 7" OR CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9")
|
||||
set(OPENCV_SUFFIX "centos7")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "Ubuntu 16.04")
|
||||
set(OPENCV_SUFFIX "ubuntu16")
|
||||
elseif (${LINUX_OS_NAME} STREQUAL "Ubuntu 18.04")
|
||||
set(OPENCV_SUFFIX "ubuntu18")
|
||||
else()
|
||||
message(FATAL_ERROR "OpenCV is not available on current platform")
|
||||
endif()
|
||||
RESOLVE_DEPENDENCY(OPENCV
|
||||
ARCHIVE_LIN "opencv/opencv_${OPENCV_VERSION}-${OPENCV_BUILD}_${OPENCV_SUFFIX}.txz"
|
||||
TARGET_PATH "${TEMP}/opencv_${OPENCV_VERSION}_${OPENCV_SUFFIX}/opencv"
|
||||
ENVIRONMENT "OpenCV_DIR"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+).*")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
if(ANDROID)
|
||||
@@ -193,17 +279,17 @@ if (ENABLE_GNA)
|
||||
libGNA_LIBRARIES_BASE_PATH)
|
||||
if (GNA_LIBRARY_VERSION STREQUAL "GNA1")
|
||||
RESOLVE_DEPENDENCY(GNA
|
||||
ARCHIVE_UNIFIED "gna_20181120.zip"
|
||||
ARCHIVE_UNIFIED "GNA/gna_20181120.zip"
|
||||
TARGET_PATH "${TEMP}/gna")
|
||||
else()
|
||||
if(GNA_LIBRARY_VERSION STREQUAL "GNA1_1401")
|
||||
set(GNA_VERSION "01.00.00.1401")
|
||||
endif()
|
||||
if(GNA_LIBRARY_VERSION STREQUAL "GNA2")
|
||||
set(GNA_VERSION "02.00.00.0654")
|
||||
set(GNA_VERSION "02.00.00.0925")
|
||||
endif()
|
||||
RESOLVE_DEPENDENCY(GNA
|
||||
ARCHIVE_UNIFIED "GNA_${GNA_VERSION}.zip"
|
||||
ARCHIVE_UNIFIED "GNA/GNA_${GNA_VERSION}.zip"
|
||||
TARGET_PATH "${TEMP}/gna_${GNA_VERSION}"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+.[0-9]+).*")
|
||||
endif()
|
||||
@@ -211,6 +297,44 @@ if (ENABLE_GNA)
|
||||
debug_message(STATUS "gna=" ${GNA})
|
||||
endif()
|
||||
|
||||
if (ENABLE_SPEECH_DEMO)
|
||||
reset_deps_cache(SPEECH_LIBS_AND_DEMOS)
|
||||
if(DEFINED ENV{THIRDPARTY_SERVER_PATH})
|
||||
set(IE_PATH_TO_DEPS "$ENV{THIRDPARTY_SERVER_PATH}")
|
||||
elseif(DEFINED THIRDPARTY_SERVER_PATH)
|
||||
set(IE_PATH_TO_DEPS "${THIRDPARTY_SERVER_PATH}")
|
||||
else()
|
||||
message(WARNING "Unable to locate Speech Demo")
|
||||
endif()
|
||||
if(DEFINED IE_PATH_TO_DEPS)
|
||||
if (WIN32 AND X86_64)
|
||||
RESOLVE_DEPENDENCY(SPEECH_LIBS_AND_DEMOS
|
||||
ARCHIVE_WIN "speech_demo_1.0.0.746_windows.zip"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+.[0-9]+).*"
|
||||
TARGET_PATH "${TEMP}/speech_demo_1.0.0.746")
|
||||
debug_message(STATUS "speech_libs_and_demos=" ${SPEECH_LIBS_AND_DEMOS})
|
||||
elseif (LINUX AND X86_64)
|
||||
if (${LINUX_OS_NAME} STREQUAL "CentOS 7" OR CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9")
|
||||
RESOLVE_DEPENDENCY(SPEECH_LIBS_AND_DEMOS
|
||||
ARCHIVE_LIN "speech_demo_1.0.0.746_centos.tgz"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+.[0-9]+).*"
|
||||
TARGET_PATH "${TEMP}/speech_demo_1.0.0.746")
|
||||
debug_message(STATUS "speech_libs_and_demos=" ${SPEECH_LIBS_AND_DEMOS})
|
||||
else()
|
||||
RESOLVE_DEPENDENCY(SPEECH_LIBS_AND_DEMOS
|
||||
ARCHIVE_LIN "speech_demo_1.0.0.746_linux.tgz"
|
||||
VERSION_REGEX ".*_([0-9]+.[0-9]+.[0-9]+.[0-9]+).*"
|
||||
TARGET_PATH "${TEMP}/speech_demo_1.0.0.746")
|
||||
debug_message(STATUS "speech_libs_and_demos=" ${SPEECH_LIBS_AND_DEMOS})
|
||||
endif()
|
||||
else()
|
||||
message(FATAL_ERROR "Speech Demo is not available on current platform")
|
||||
endif()
|
||||
unset(IE_PATH_TO_DEPS)
|
||||
endif()
|
||||
update_deps_cache(SPEECH_LIBS_AND_DEMOS "${SPEECH_LIBS_AND_DEMOS}" "Path to SPEECH_LIBS_AND_DEMOS root folder")
|
||||
endif()
|
||||
|
||||
configure_file(
|
||||
"${IE_MAIN_SOURCE_DIR}/cmake/share/InferenceEngineConfig.cmake.in"
|
||||
"${CMAKE_BINARY_DIR}/share/InferenceEngineConfig.cmake"
|
||||
|
||||
@@ -11,7 +11,11 @@ file(TO_CMAKE_PATH "${CMAKE_CURRENT_LIST_DIR}" cache_path)
|
||||
|
||||
set(ie_options "@IE_OPTIONS@;CMAKE_BUILD_TYPE;CMAKE_SKIP_RPATH")
|
||||
|
||||
load_cache("${cache_path}" READ_WITH_PREFIX "" ${ie_options})
|
||||
foreach(option IN LISTS ie_options)
|
||||
if(NOT DEFINED "${option}")
|
||||
load_cache("${cache_path}" READ_WITH_PREFIX "" ${option})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
message(STATUS "The following CMake options are exported from Inference Engine Developer package")
|
||||
message("")
|
||||
@@ -21,6 +25,8 @@ endforeach()
|
||||
message("")
|
||||
|
||||
set(gflags_DIR "@gflags_BINARY_DIR@")
|
||||
# GNA lib dir
|
||||
set(GNA "@GNA@")
|
||||
|
||||
# Targets
|
||||
|
||||
@@ -29,7 +35,7 @@ include("${CMAKE_CURRENT_LIST_DIR}/targets_developer.cmake")
|
||||
set_property(TARGET IE::inference_engine PROPERTY IMPORTED_GLOBAL TRUE)
|
||||
|
||||
get_target_property(InferenceEngine_INCLUDE_DIRS IE::inference_engine INTERFACE_INCLUDE_DIRECTORIES)
|
||||
set(InferenceEngine_LIBRARIES IE::inference_engine_legacy IE::inference_engine IE::inference_engine_nn_builder)
|
||||
set(InferenceEngine_LIBRARIES IE::inference_engine_legacy IE::inference_engine)
|
||||
|
||||
#
|
||||
# Common cmake includes
|
||||
@@ -39,7 +45,7 @@ list(APPEND CMAKE_MODULE_PATH "${OpenVINO_MAIN_SOURCE_DIR}/cmake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${IE_MAIN_SOURCE_DIR}/cmake")
|
||||
|
||||
# generic stuff from developer package
|
||||
include(developer_package NO_POLICY_SCOPE)
|
||||
include(developer_package)
|
||||
include(developer_package_ie)
|
||||
|
||||
# Don't threat deprecated API warnings as errors in 3rd party apps
|
||||
|
||||
@@ -50,7 +50,7 @@ if (ENABLE_GNA)
|
||||
if (UNIX AND NOT APPLE AND CMAKE_COMPILER_IS_GNUCC AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 5.4)
|
||||
set (DEFAULT_GNA_LIB GNA1)
|
||||
else()
|
||||
set (DEFAULT_GNA_LIB GNA1_1401)
|
||||
set (DEFAULT_GNA_LIB GNA2)
|
||||
endif()
|
||||
set(GNA_LIBRARY_VERSION "${DEFAULT_GNA_LIB}" CACHE STRING "GNAVersion")
|
||||
set_property(CACHE GNA_LIBRARY_VERSION PROPERTY STRINGS "GNA1" "GNA1_1401" "GNA2")
|
||||
@@ -62,30 +62,30 @@ if (ENABLE_GNA)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
ie_option (ENABLE_IR_READER "Compile with IR readers / parsers" ON)
|
||||
|
||||
ie_option (ENABLE_VPU "vpu targeted plugins for inference engine" ON)
|
||||
|
||||
ie_dependent_option (ENABLE_MYRIAD "myriad targeted plugin for inference engine" ON "ENABLE_VPU" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_MYRIAD_NO_BOOT "myriad plugin will skip device boot" OFF "ENABLE_MYRIAD" OFF)
|
||||
|
||||
ie_option (ENABLE_TESTS "unit, behavior and functional tests" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_GAPI_TESTS "tests for GAPI kernels" OFF "ENABLE_TESTS" OFF)
|
||||
ie_dependent_option (ENABLE_GAPI_TESTS "tests for GAPI kernels" ON "ENABLE_TESTS" OFF)
|
||||
|
||||
ie_dependent_option (GAPI_TEST_PERF "if GAPI unit tests should examine performance" OFF "ENABLE_GAPI_TESTS" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_MYRIAD_MVNC_TESTS "functional and behavior tests for mvnc api" OFF "ENABLE_TESTS;ENABLE_MYRIAD" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_SAMPLES "console samples are part of inference engine package" ON "NOT MINGW" OFF)
|
||||
ie_dependent_option (ENABLE_DATA "fetch models from testdata repo" ON "ENABLE_FUNCTIONAL_TESTS;NOT ANDROID" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_SAME_BRANCH_FOR_MODELS "uses same branch for models and for inference engine, if not enabled models are taken from master" OFF "ENABLE_TESTS" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_BEH_TESTS "tests oriented to check inference engine API corecteness" ON "ENABLE_TESTS" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_FUNCTIONAL_TESTS "functional tests" ON "ENABLE_TESTS;ENABLE_IR_READER" OFF)
|
||||
ie_dependent_option (ENABLE_FUNCTIONAL_TESTS "functional tests" ON "ENABLE_TESTS" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_SAMPLES "console samples are part of inference engine package" ON "NOT MINGW" OFF)
|
||||
|
||||
ie_dependent_option (ENABLE_SPEECH_DEMO "enable speech demo integration" ON "NOT APPLE;NOT ANDROID;X86 OR X86_64" OFF)
|
||||
|
||||
ie_option (ENABLE_FUZZING "instrument build for fuzzing" OFF)
|
||||
|
||||
ie_option (VERBOSE_BUILD "shows extra information about build" OFF)
|
||||
@@ -96,18 +96,15 @@ ie_option (ENABLE_ALTERNATIVE_TEMP "in case of dependency conflict, to avoid mod
|
||||
|
||||
ie_option (ENABLE_OPENCV "enables OpenCV" ON)
|
||||
|
||||
ie_option (ENABLE_DEBUG_SYMBOLS "generates symbols for debugging" OFF)
|
||||
|
||||
ie_option (ENABLE_PYTHON "enables ie python bridge build" OFF)
|
||||
|
||||
ie_option (ENABLE_CPP_CCT "enables C++ version of Cross Check Tool" OFF)
|
||||
|
||||
ie_option (ENABLE_C "enables ie c bridge build" ON)
|
||||
|
||||
ie_dependent_option(ENABLE_CPPLINT "Enable cpplint checks during the build" OFF "OFF;UNIX;NOT APPLE;NOT ANDROID" OFF)
|
||||
ie_dependent_option(ENABLE_CPPLINT "Enable cpplint checks during the build" ON "UNIX;NOT ANDROID" OFF)
|
||||
|
||||
ie_dependent_option(ENABLE_CPPLINT_REPORT "Build cpplint report instead of failing the build" OFF "ENABLE_CPPLINT" OFF)
|
||||
|
||||
ie_option(ENABLE_CLANG_FORMAT "Enable clang-format checks during the build" OFF)
|
||||
ie_option(ENABLE_CLANG_FORMAT "Enable clang-format checks during the build" ON)
|
||||
|
||||
set(IE_EXTRA_PLUGINS "" CACHE STRING "Extra paths for plugins to include into DLDT build tree")
|
||||
|
||||
|
||||
81
inference-engine/cmake/models.cmake
Normal file
81
inference-engine/cmake/models.cmake
Normal file
@@ -0,0 +1,81 @@
|
||||
# Copyright (C) 2018-2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
if(ENABLE_DOCKER)
|
||||
cmake_minimum_required(VERSION 3.3 FATAL_ERROR)
|
||||
else()
|
||||
cmake_minimum_required(VERSION 3.8 FATAL_ERROR)
|
||||
endif()
|
||||
|
||||
cmake_policy(SET CMP0054 NEW)
|
||||
|
||||
find_package(Git REQUIRED)
|
||||
|
||||
set(MODELS_LST "")
|
||||
set(MODELS_LST_TO_FETCH "")
|
||||
|
||||
function (add_models_repo add_to_fetcher model_name)
|
||||
list(LENGTH ARGV add_models_args)
|
||||
if (add_models_args EQUAL 3)
|
||||
list(GET ARGV 2 branch_name)
|
||||
else()
|
||||
set(branch_name ${MODELS_BRANCH})
|
||||
endif()
|
||||
if (add_to_fetcher)
|
||||
set(model_name "${model_name}:${branch_name}")
|
||||
list(APPEND MODELS_LST_TO_FETCH ${model_name})
|
||||
endif()
|
||||
|
||||
list(APPEND MODELS_LST ${model_name})
|
||||
|
||||
set(MODELS_LST_TO_FETCH ${MODELS_LST_TO_FETCH} PARENT_SCOPE)
|
||||
set(MODELS_LST ${MODELS_LST} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(add_lfs_repo name prefix url tag)
|
||||
if(TARGET ${name})
|
||||
return()
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${name}
|
||||
PREFIX ${prefix}
|
||||
GIT_REPOSITORY ${url}
|
||||
GIT_TAG ${tag}
|
||||
GIT_CONFIG "http.sslverify=false"
|
||||
GIT_PROGRESS 1
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD ON)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${GIT_EXECUTABLE} lfs install --local --force
|
||||
WORKING_DIRECTORY ${prefix}/src/${name}
|
||||
OUTPUT_VARIABLE lfs_output
|
||||
RESULT_VARIABLE lfs_var)
|
||||
if(lfs_var)
|
||||
message(FATAL_ERROR [=[
|
||||
Failed to setup Git LFS: ${lfs_output}
|
||||
Git lfs must be installed in order to fetch models
|
||||
Please install it from https://git-lfs.github.com/
|
||||
]=])
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
function (fetch_models_and_validation_set)
|
||||
foreach(loop_var ${MODELS_LST_TO_FETCH})
|
||||
string(REPLACE ":" ";" MODEL_CONFIG_LST ${loop_var})
|
||||
|
||||
list(GET MODEL_CONFIG_LST 0 folder_name)
|
||||
list(GET MODEL_CONFIG_LST 1 git_url)
|
||||
list(GET MODEL_CONFIG_LST 2 repo_name)
|
||||
list(GET MODEL_CONFIG_LST 3 branch_name)
|
||||
|
||||
add_lfs_repo(
|
||||
"${folder_name}"
|
||||
"${TEMP}/models"
|
||||
"${git_url}:${repo_name}"
|
||||
"${branch_name}")
|
||||
endforeach(loop_var)
|
||||
endfunction()
|
||||
@@ -83,6 +83,15 @@ function(ie_add_plugin)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_preproc)
|
||||
endif()
|
||||
|
||||
# fake dependencies to build in the following order:
|
||||
# IE -> IE readers -> IE inference plugins -> IE-based apps
|
||||
if(TARGET inference_engine_ir_reader)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_reader)
|
||||
endif()
|
||||
if(TARGET inference_engine_onnx_reader)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_onnx_reader)
|
||||
endif()
|
||||
|
||||
# install rules
|
||||
|
||||
if(NOT IE_PLUGIN_SKIP_INSTALL)
|
||||
@@ -90,8 +99,8 @@ function(ie_add_plugin)
|
||||
ie_cpack_add_component(${install_component} REQUIRED DEPENDS core)
|
||||
|
||||
install(TARGETS ${IE_PLUGIN_NAME}
|
||||
RUNTIME DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT ${install_component}
|
||||
ARCHIVE DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT ${install_component}
|
||||
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT ${install_component}
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT ${install_component}
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT ${install_component})
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
# IE::inference_engine - The Inference Engine library
|
||||
# IE::inference_engine_legacy - The Inference Engine library with legacy API for IR v7 and older.
|
||||
# IE::inference_engine_c_api - The Inference Engine C API library
|
||||
# IE::inference_engine_nn_builder - The Inference Engine NN Builder library
|
||||
#
|
||||
|
||||
macro(ext_message TRACE_LEVEL)
|
||||
@@ -40,7 +39,7 @@ if(TARGET IE::inference_engine)
|
||||
set(InferenceEngine_FOUND TRUE)
|
||||
get_target_property(InferenceEngine_INCLUDE_DIRS IE::inference_engine INTERFACE_INCLUDE_DIRECTORIES)
|
||||
set(InferenceEngine_LIBRARIES IE::inference_engine_legacy IE::inference_engine
|
||||
IE::inference_engine_c_api IE::inference_engine_nn_builder)
|
||||
IE::inference_engine_c_api)
|
||||
else()
|
||||
if (WIN32)
|
||||
set(_ARCH intel64)
|
||||
@@ -88,29 +87,26 @@ else()
|
||||
find_library(IE_RELEASE_LIBRARY inference_engine@IE_RELEASE_POSTFIX_WIN@ "${IE_LIB_REL_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_LEGACY_RELEASE_LIBRARY inference_engine_legacy@IE_RELEASE_POSTFIX_WIN@ "${IE_LIB_REL_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_C_API_RELEASE_LIBRARY inference_engine_c_api@IE_RELEASE_POSTFIX_WIN@ "${IE_LIB_REL_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_NN_BUILDER_RELEASE_LIBRARY inference_engine_nn_builder@IE_RELEASE_POSTFIX_WIN@ "${IE_LIB_REL_DIR}" NO_DEFAULT_PATH)
|
||||
elseif(APPLE)
|
||||
find_library(IE_RELEASE_LIBRARY inference_engine@IE_RELEASE_POSTFIX_MAC@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_LEGACY_RELEASE_LIBRARY inference_engine_legacy@IE_RELEASE_POSTFIX_MAC@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_C_API_RELEASE_LIBRARY inference_engine_c_api@IE_RELEASE_POSTFIX_MAC@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_NN_BUILDER_RELEASE_LIBRARY inference_engine_nn_builder@IE_RELEASE_POSTFIX_MAC@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
else()
|
||||
find_library(IE_RELEASE_LIBRARY inference_engine@IE_RELEASE_POSTFIX_LIN@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_LEGACY_RELEASE_LIBRARY inference_engine_legacy@IE_RELEASE_POSTFIX_LIN@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_C_API_RELEASE_LIBRARY inference_engine_c_api@IE_RELEASE_POSTFIX_LIN@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
find_library(IE_NN_BUILDER_RELEASE_LIBRARY inference_engine_nn_builder@IE_RELEASE_POSTFIX_LIN@ "${IE_LIB_DIR}" NO_DEFAULT_PATH)
|
||||
endif()
|
||||
|
||||
find_package_handle_standard_args( InferenceEngine
|
||||
FOUND_VAR INFERENCEENGINE_FOUND
|
||||
REQUIRED_VARS IE_RELEASE_LIBRARY IE_LEGACY_RELEASE_LIBRARY IE_C_API_RELEASE_LIBRARY IE_NN_BUILDER_RELEASE_LIBRARY IE_INCLUDE_DIR
|
||||
REQUIRED_VARS IE_RELEASE_LIBRARY IE_LEGACY_RELEASE_LIBRARY IE_C_API_RELEASE_LIBRARY IE_INCLUDE_DIR
|
||||
FAIL_MESSAGE "Some of mandatory Inference Engine components are not found. Please consult InferenceEgnineConfig.cmake module's help page.")
|
||||
|
||||
if(INFERENCEENGINE_FOUND)
|
||||
# to keep this line for successful execution in CMake 2.8
|
||||
set(InferenceEngine_FOUND TRUE)
|
||||
|
||||
foreach(ie_library_suffix "" "_legacy" "_c_api" "_nn_builder")
|
||||
foreach(ie_library_suffix "" "_legacy" "_c_api")
|
||||
string(TOUPPER "${ie_library_suffix}" ie_library_usuffix)
|
||||
add_library(IE::inference_engine${ie_library_suffix} SHARED IMPORTED GLOBAL)
|
||||
|
||||
@@ -154,7 +150,7 @@ else()
|
||||
set_target_properties(IE::inference_engine${ie_library_suffix} PROPERTIES
|
||||
IMPORTED_LOCATION "${IE${ie_library_usuffix}_RELEASE_LIBRARY}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${IE_INCLUDE_DIR}")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
set_target_properties(IE::inference_engine${ie_library_suffix} PROPERTIES
|
||||
INTERFACE_COMPILE_OPTIONS "-diag-warning=1786")
|
||||
else()
|
||||
@@ -167,7 +163,7 @@ else()
|
||||
|
||||
set(InferenceEngine_INCLUDE_DIRS ${IE_INCLUDE_DIR})
|
||||
set(InferenceEngine_LIBRARIES IE::inference_engine_legacy IE::inference_engine
|
||||
IE::inference_engine_c_api IE::inference_engine_nn_builder)
|
||||
IE::inference_engine_c_api)
|
||||
|
||||
set(IE_EXTERNAL_DIR "${IE_ROOT_DIR}/external")
|
||||
include("${IE_ROOT_DIR}/share/ie_parallel.cmake")
|
||||
|
||||
@@ -19,7 +19,8 @@ set(VPU_SUPPORTED_FIRMWARES usb-ma2450 usb-ma2x8x pcie-ma248x)
|
||||
# Default packages
|
||||
#
|
||||
|
||||
set(FIRMWARE_PACKAGE_VERSION 1076)
|
||||
set(FIRMWARE_PACKAGE_VERSION 1223)
|
||||
set(VPU_CLC_MA2X8X_VERSION "movi-cltools-20.02.0")
|
||||
|
||||
#
|
||||
# CMake variables to override default firmware files
|
||||
@@ -37,7 +38,7 @@ foreach(firmware_name IN LISTS VPU_SUPPORTED_FIRMWARES)
|
||||
reset_deps_cache(VPU_FIRMWARE_${firmware_name_upper}_FILE)
|
||||
|
||||
RESOLVE_DEPENDENCY(VPU_FIRMWARE_${firmware_name_upper}
|
||||
ARCHIVE_UNIFIED firmware_${firmware_name}_${FIRMWARE_PACKAGE_VERSION}.zip
|
||||
ARCHIVE_UNIFIED VPU/${firmware_name}/firmware_${firmware_name}_${FIRMWARE_PACKAGE_VERSION}.zip
|
||||
TARGET_PATH "${TEMP}/vpu/firmware/${firmware_name}"
|
||||
ENVIRONMENT "VPU_FIRMWARE_${firmware_name_upper}_FILE"
|
||||
FOLDER)
|
||||
@@ -82,7 +83,7 @@ foreach(firmware_name IN LISTS VPU_SUPPORTED_FIRMWARES)
|
||||
VERBATIM)
|
||||
|
||||
install(FILES ${${var_name}}
|
||||
DESTINATION ${IE_CPACK_LIBRARY_PATH}
|
||||
DESTINATION ${IE_CPACK_RUNTIME_PATH}
|
||||
COMPONENT myriad)
|
||||
endforeach()
|
||||
|
||||
@@ -104,4 +105,107 @@ if(ANDROID)
|
||||
set(LIBUSB_LIBRARY "${LIBUSB}/libs/${ANDROID_ABI}/libusb1.0.so")
|
||||
|
||||
log_rpath_from_dir(LIBUSB "${LIBUSB}/libs/${ANDROID_ABI}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
#
|
||||
# OpenCL compiler
|
||||
#
|
||||
|
||||
if(LINUX AND LINUX_OS_NAME MATCHES "Ubuntu")
|
||||
if(DEFINED ENV{THIRDPARTY_SERVER_PATH})
|
||||
set(IE_PATH_TO_DEPS "$ENV{THIRDPARTY_SERVER_PATH}")
|
||||
elseif(DEFINED THIRDPARTY_SERVER_PATH)
|
||||
set(IE_PATH_TO_DEPS "${THIRDPARTY_SERVER_PATH}")
|
||||
else()
|
||||
message(WARNING "VPU_OCL_COMPILER is not found. Some tests will skipped")
|
||||
endif()
|
||||
|
||||
if(DEFINED IE_PATH_TO_DEPS)
|
||||
message(STATUS "THIRDPARTY_SERVER_PATH=${IE_PATH_TO_DEPS}")
|
||||
|
||||
reset_deps_cache(VPU_CLC_MA2X8X_ROOT)
|
||||
reset_deps_cache(VPU_CLC_MA2X8X_COMMAND)
|
||||
|
||||
RESOLVE_DEPENDENCY(VPU_CLC_MA2X8X
|
||||
ARCHIVE_LIN "VPU_OCL_compiler/${VPU_CLC_MA2X8X_VERSION}.tar.gz"
|
||||
TARGET_PATH "${TEMP}/vpu/clc/ma2x8x/${VPU_CLC_MA2X8X_VERSION}"
|
||||
ENVIRONMENT "VPU_CLC_MA2X8X_COMMAND")
|
||||
debug_message(STATUS "VPU_CLC_MA2X8X=" ${VPU_CLC_MA2X8X})
|
||||
|
||||
update_deps_cache(
|
||||
VPU_CLC_MA2X8X_ROOT
|
||||
"${VPU_CLC_MA2X8X}"
|
||||
"[VPU] Root directory of OpenCL compiler")
|
||||
|
||||
update_deps_cache(
|
||||
VPU_CLC_MA2X8X_COMMAND
|
||||
"${VPU_CLC_MA2X8X}/bin/clc"
|
||||
"[VPU] OpenCL compiler")
|
||||
|
||||
find_program(VPU_CLC_MA2X8X_COMMAND clc)
|
||||
unset (IE_PATH_TO_DEPS)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
#
|
||||
# `vpu_custom_kernels` CMake target
|
||||
#
|
||||
|
||||
add_library(vpu_custom_kernels INTERFACE)
|
||||
|
||||
function(add_vpu_compile_custom_kernels)
|
||||
set(SRC_DIR "${IE_MAIN_SOURCE_DIR}/src/vpu/custom_kernels")
|
||||
set(DST_DIR "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/vpu_custom_kernels")
|
||||
|
||||
file(MAKE_DIRECTORY "${DST_DIR}")
|
||||
|
||||
file(GLOB XML_FILES "${SRC_DIR}/*.xml")
|
||||
file(GLOB CL_FILES "${SRC_DIR}/*.cl")
|
||||
|
||||
foreach(xml_file IN LISTS XML_FILES)
|
||||
get_filename_component(xml_file_name ${xml_file} NAME)
|
||||
|
||||
set(out_file "${DST_DIR}/${xml_file_name}")
|
||||
list(APPEND all_output_files ${out_file})
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${out_file}
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy ${xml_file} ${out_file}
|
||||
MAIN_DEPENDENCY ${xml_file}
|
||||
COMMENT "[VPU] Copy ${xml_file} to ${DST_DIR}"
|
||||
VERBATIM)
|
||||
endforeach()
|
||||
|
||||
foreach(cl_file IN LISTS CL_FILES)
|
||||
get_filename_component(cl_file_name ${cl_file} NAME_WE)
|
||||
|
||||
set(out_file "${DST_DIR}/${cl_file_name}.bin")
|
||||
list(APPEND all_output_files ${out_file})
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${out_file}
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E env
|
||||
"SHAVE_LDSCRIPT_DIR=${VPU_CLC_MA2X8X}/ldscripts/"
|
||||
"SHAVE_MA2X8XLIBS_DIR=${VPU_CLC_MA2X8X}/lib"
|
||||
"SHAVE_MOVIASM_DIR=${VPU_CLC_MA2X8X}/bin"
|
||||
"SHAVE_MYRIAD_LD_DIR=${VPU_CLC_MA2X8X}/bin"
|
||||
${VPU_CLC_MA2X8X_COMMAND} --strip-binary-header ${cl_file} -o ${out_file}
|
||||
MAIN_DEPENDENCY ${cl_file}
|
||||
DEPENDS ${VPU_CLC_MA2X8X_COMMAND}
|
||||
COMMENT "[VPU] Compile ${cl_file}"
|
||||
VERBATIM)
|
||||
endforeach()
|
||||
|
||||
add_custom_target(vpu_compile_custom_kernels
|
||||
DEPENDS ${all_output_files}
|
||||
COMMENT "[VPU] Compile custom kernels")
|
||||
|
||||
add_dependencies(vpu_custom_kernels vpu_compile_custom_kernels)
|
||||
target_compile_definitions(vpu_custom_kernels INTERFACE "VPU_HAS_CUSTOM_KERNELS")
|
||||
endfunction()
|
||||
|
||||
if(VPU_CLC_MA2X8X_COMMAND)
|
||||
add_vpu_compile_custom_kernels()
|
||||
endif()
|
||||
|
||||
@@ -6,6 +6,10 @@ project(InferenceEngine_C_API)
|
||||
|
||||
add_subdirectory(src)
|
||||
|
||||
if(ENABLE_TESTS)
|
||||
add_subdirectory(tests)
|
||||
endif()
|
||||
|
||||
if(ENABLE_SAMPLES)
|
||||
add_subdirectory(samples)
|
||||
endif()
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#define IE_NODISCARD
|
||||
#else
|
||||
#if defined(_WIN32)
|
||||
#define INFERENCE_ENGINE_C_API_CALLBACK __cdecl
|
||||
#ifdef inference_engine_c_api_EXPORTS
|
||||
#define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __declspec(dllexport) __VA_ARGS__ __cdecl
|
||||
#else
|
||||
@@ -43,6 +44,10 @@
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef INFERENCE_ENGINE_C_API_CALLBACK
|
||||
#define INFERENCE_ENGINE_C_API_CALLBACK
|
||||
#endif
|
||||
|
||||
typedef struct ie_core ie_core_t;
|
||||
typedef struct ie_network ie_network_t;
|
||||
typedef struct ie_executable ie_executable_network_t;
|
||||
@@ -284,7 +289,7 @@ typedef struct ie_blob_buffer {
|
||||
* @brief Completion callback definition about the function and args
|
||||
*/
|
||||
typedef struct ie_complete_call_back {
|
||||
void (*completeCallBackFunc)(void *args);
|
||||
void (INFERENCE_ENGINE_C_API_CALLBACK *completeCallBackFunc)(void *args);
|
||||
void *args;
|
||||
}ie_complete_call_back_t;
|
||||
|
||||
@@ -371,6 +376,19 @@ INFERENCE_ENGINE_C_API(void) ie_core_versions_free(ie_core_versions_t *vers);
|
||||
*/
|
||||
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_core_read_network(ie_core_t *core, const char *xml, const char *weights_file, ie_network_t **network);
|
||||
|
||||
/**
|
||||
* @brief Reads the model from an xml string and a blob of the bin part of the IR. Use the ie_network_free() method to free memory.
|
||||
* @ingroup Core
|
||||
* @param core A pointer to ie_core_t instance.
|
||||
* @param xml_content Xml content of the IR.
|
||||
* @param xml_content_size Number of bytes in the xml content of the IR.
|
||||
* @param weight_blob Blob containing the bin part of the IR.
|
||||
* @param network A pointer to the newly created network.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_core_read_network_from_memory(ie_core_t *core, const uint8_t *xml_content, size_t xml_content_size,
|
||||
const ie_blob_t *weight_blob, ie_network_t **network);
|
||||
|
||||
/**
|
||||
* @brief Creates an executable network from a network object. Users can create as many networks as they need and use
|
||||
* them simultaneously (up to the limitation of the hardware resources). Use the ie_exec_network_free() method to free memory.
|
||||
|
||||
@@ -24,6 +24,8 @@ target_link_libraries(${TARGET_NAME} PUBLIC ${OpenCV_LIBRARIES})
|
||||
|
||||
target_include_directories(${TARGET_NAME} PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER c_samples)
|
||||
|
||||
if(COMMAND add_cpplint_target)
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
endif()
|
||||
|
||||
@@ -2,14 +2,6 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME "hello_classification_c")
|
||||
|
||||
# create sample target
|
||||
|
||||
add_executable(${TARGET_NAME} main.c)
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE ${InferenceEngine_LIBRARIES} opencv_c_wraper)
|
||||
|
||||
if(COMMAND add_cpplint_target)
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
endif()
|
||||
ie_add_sample(NAME hello_classification_c
|
||||
SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/main.c"
|
||||
DEPENDENCIES opencv_c_wraper)
|
||||
|
||||
@@ -2,14 +2,6 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME "hello_nv12_input_classification_c")
|
||||
|
||||
# create sample target
|
||||
|
||||
add_executable(${TARGET_NAME} main.c)
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE ${InferenceEngine_LIBRARIES})
|
||||
|
||||
if(COMMAND add_cpplint_target)
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
endif()
|
||||
ie_add_sample(NAME hello_nv12_input_classification_c
|
||||
SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/main.c"
|
||||
DEPENDENCIES opencv_c_wraper)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier : Apache-2.0
|
||||
//
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -167,7 +167,7 @@ int main(int argc, char **argv) {
|
||||
// set input resize algorithm to enable input autoresize
|
||||
status |= ie_network_set_input_resize_algorithm(network, input_name, RESIZE_BILINEAR);
|
||||
// set input color format to NV12 to enable automatic input color format pre-processing
|
||||
status |= ie_network_set_color_format(network, input_name, NV12 );
|
||||
status |= ie_network_set_color_format(network, input_name, NV12);
|
||||
|
||||
if (status != OK)
|
||||
goto err;
|
||||
|
||||
@@ -2,14 +2,8 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set (TARGET_NAME "object_detection_sample_ssd_c")
|
||||
|
||||
# create sample target
|
||||
|
||||
add_executable(${TARGET_NAME} main.c)
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE ${InferenceEngine_LIBRARIES} opencv_c_wraper)
|
||||
|
||||
if(COMMAND add_cpplint_target)
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
endif()
|
||||
ie_add_sample(NAME object_detection_sample_ssd_c
|
||||
SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/main.c"
|
||||
HEADERS "${CMAKE_CURRENT_SOURCE_DIR}/object_detection_sample_ssd.h"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/c_w_dirent.h"
|
||||
DEPENDENCIES opencv_c_wraper)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier : Apache-2.0
|
||||
//
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
#define MAX_IMAGE 20
|
||||
|
||||
static const char *img_msg = NULL;
|
||||
static const char *img_msg = NULL;
|
||||
static const char *input_model = NULL;
|
||||
static const char *device_name = "CPU";
|
||||
static const char *custom_cldnn_msg = NULL;
|
||||
@@ -38,7 +38,7 @@ int ParseAndCheckCommandLine(int argc, char *argv[]) {
|
||||
printf("%sParsing input parameters\n", info);
|
||||
|
||||
while ((opt = getopt(argc, argv, string)) != -1) {
|
||||
switch(opt) {
|
||||
switch (opt) {
|
||||
case 'h':
|
||||
showUsage();
|
||||
help = 1;
|
||||
@@ -105,9 +105,9 @@ void readInputFilesArgument(const char *arg) {
|
||||
const char *fileName = ep->d_name;
|
||||
if (strcmp(fileName, ".") == 0 || strcmp(fileName, "..") == 0) continue;
|
||||
char *file_path = (char *)calloc(strlen(arg) + strlen(ep->d_name) + 2, sizeof(char));
|
||||
strcpy(file_path, arg);
|
||||
strcat(file_path, "/");
|
||||
strcat(file_path, ep->d_name);
|
||||
memcpy(file_path, arg, strlen(arg));
|
||||
memcpy(file_path + strlen(arg), "/", strlen("/"));
|
||||
memcpy(file_path + strlen(arg) + strlen("/"), ep->d_name, strlen(ep->d_name) + 1);
|
||||
|
||||
if (file_num == 0) {
|
||||
file_paths = (char **)calloc(1, sizeof(char *));
|
||||
@@ -131,7 +131,7 @@ void readInputFilesArgument(const char *arg) {
|
||||
dp = NULL;
|
||||
} else {
|
||||
char *file_path = (char *)calloc(strlen(arg) + 1, sizeof(char));
|
||||
strcpy(file_path, arg);
|
||||
memcpy(file_path, arg, strlen(arg) + 1);
|
||||
if (file_num == 0) {
|
||||
file_paths = (char **)calloc(1, sizeof(char *));
|
||||
}
|
||||
@@ -183,12 +183,12 @@ ie_config_t *parseConfig(const char *config_file, char comment) {
|
||||
|
||||
ie_config_t *cfg = NULL;
|
||||
char key[256], value[256];
|
||||
|
||||
|
||||
if (fscanf(file, "%s", key)!= EOF && fscanf(file, "%s", value) != EOF) {
|
||||
char *cfg_name = (char *)calloc(strlen(key) + 1, sizeof(char));
|
||||
char *cfg_value = (char *)calloc(strlen(value) + 1, sizeof(char));
|
||||
strcpy(cfg_name, key);
|
||||
strcpy(cfg_value, value);
|
||||
memcpy(cfg_name, key, strlen(key) + 1);
|
||||
memcpy(cfg_value, value, strlen(value) + 1);
|
||||
ie_config_t *cfg_t = (ie_config_t *)calloc(1, sizeof(ie_config_t));
|
||||
cfg_t->name = cfg_name;
|
||||
cfg_t->value = cfg_value;
|
||||
@@ -203,8 +203,8 @@ ie_config_t *parseConfig(const char *config_file, char comment) {
|
||||
}
|
||||
char *cfg_name = (char *)calloc(strlen(key) + 1, sizeof(char));
|
||||
char *cfg_value = (char *)calloc(strlen(value) + 1, sizeof(char));
|
||||
strcpy(cfg_name, key);
|
||||
strcpy(cfg_value, value);
|
||||
memcpy(cfg_name, key, strlen(key) + 1);
|
||||
memcpy(cfg_value, value, strlen(value) + 1);
|
||||
ie_config_t *cfg_t = (ie_config_t *)calloc(1, sizeof(ie_config_t));
|
||||
cfg_t->name = cfg_name;
|
||||
cfg_t->value = cfg_value;
|
||||
@@ -213,7 +213,7 @@ ie_config_t *parseConfig(const char *config_file, char comment) {
|
||||
cfg_temp = cfg_temp->next;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return cfg;
|
||||
}
|
||||
|
||||
@@ -229,11 +229,11 @@ void config_free(ie_config_t *config) {
|
||||
free((char *)config->name);
|
||||
config->name = NULL;
|
||||
}
|
||||
if(config->value) {
|
||||
if (config->value) {
|
||||
free((char *)config->value);
|
||||
config->value = NULL;
|
||||
}
|
||||
if(config->next) {
|
||||
if (config->next) {
|
||||
config = config->next;
|
||||
}
|
||||
|
||||
@@ -345,8 +345,8 @@ int main(int argc, char **argv) {
|
||||
|
||||
// --------------------------- 4. Read IR Generated by ModelOptimizer (.xml and .bin files) ------------
|
||||
input_weight = (char *)calloc(strlen(input_model) + 1, sizeof(char));
|
||||
strncpy(input_weight, input_model, strlen(input_model)-4);
|
||||
strcat(input_weight, ".bin");
|
||||
memcpy(input_weight, input_model, strlen(input_model) - 4);
|
||||
memcpy(input_weight + strlen(input_model) - 4, ".bin", strlen(".bin") + 1);
|
||||
printf("%sLoading network files:\n", info);
|
||||
printf("\t%s\n", input_model);
|
||||
printf("\t%s\n", input_weight);
|
||||
@@ -388,7 +388,7 @@ int main(int argc, char **argv) {
|
||||
goto err;
|
||||
|
||||
/** Working with first input tensor that stores image **/
|
||||
if(input_dim.ranks == 4) {
|
||||
if (input_dim.ranks == 4) {
|
||||
imageInputName = name;
|
||||
input_height = input_dim.dims[2];
|
||||
input_width = input_dim.dims[3];
|
||||
@@ -399,9 +399,9 @@ int main(int argc, char **argv) {
|
||||
goto err;
|
||||
} else if (input_dim.ranks == 2) {
|
||||
imInfoInputName = name;
|
||||
|
||||
|
||||
status = ie_network_set_input_precision(network, name, FP32);
|
||||
if(status !=OK || (input_dim.dims[1] != 3 && input_dim.dims[1] != 6)) {
|
||||
if (status !=OK || (input_dim.dims[1] != 3 && input_dim.dims[1] != 6)) {
|
||||
printf("Invalid input info. Should be 3 or 6 values length\n");
|
||||
goto err;
|
||||
}
|
||||
@@ -590,7 +590,7 @@ int main(int argc, char **argv) {
|
||||
|
||||
dimensions_t imInfoDim;
|
||||
status |= ie_blob_get_dims(input2, &imInfoDim);
|
||||
//Fill input tensor with values
|
||||
//Fill input tensor with values
|
||||
ie_blob_buffer_t info_blob_buffer;
|
||||
status |= ie_blob_get_buffer(input2, &info_blob_buffer);
|
||||
if (status != OK) {
|
||||
@@ -601,7 +601,7 @@ int main(int argc, char **argv) {
|
||||
for (image_id = 0; image_id < batchSize; ++image_id) {
|
||||
p[image_id * imInfoDim.dims[1] + 0] = (float)input_height;
|
||||
p[image_id * imInfoDim.dims[1] + 1] = (float)input_width;
|
||||
|
||||
|
||||
for (k = 2; k < imInfoDim.dims[1]; k++) {
|
||||
p[image_id * imInfoDim.dims[1] + k] = 1.0f; // all scale factors are set to 1.0
|
||||
}
|
||||
@@ -616,7 +616,7 @@ int main(int argc, char **argv) {
|
||||
status |= ie_infer_request_wait(infer_request, -1);
|
||||
if (status != OK)
|
||||
goto err;
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
// --------------------------- 11. Process output -------------------------------------------------------
|
||||
printf("%sProcessing output blobs\n", info);
|
||||
@@ -634,7 +634,7 @@ int main(int argc, char **argv) {
|
||||
int **classes = (int **)calloc(image_num, sizeof(int *));
|
||||
rectangle_t **boxes = (rectangle_t **)calloc(image_num, sizeof(rectangle_t *));
|
||||
int *object_num = (int *)calloc(image_num, sizeof(int));
|
||||
for ( i = 0; i < image_num; ++i) {
|
||||
for (i = 0; i < image_num; ++i) {
|
||||
classes[i] = (int *)calloc(maxProposalCount, sizeof(int));
|
||||
boxes[i] = (rectangle_t *)calloc(maxProposalCount, sizeof(rectangle_t));
|
||||
object_num[i] = 0;
|
||||
@@ -678,11 +678,11 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
const char *out = "out_";
|
||||
char str_num[16] = {0};
|
||||
int2str(str_num, batch_id);
|
||||
int2str(str_num, batch_id);
|
||||
char *img_path = (char *)calloc(strlen(out) + strlen(str_num) + strlen(".bmp") + 1, sizeof(char));
|
||||
strcpy(img_path, out);
|
||||
strcat(img_path, str_num);
|
||||
strcat(img_path, ".bmp");
|
||||
memcpy(img_path, out, strlen(out));
|
||||
memcpy(img_path + strlen(out), str_num, strlen(str_num));
|
||||
memcpy(img_path + strlen(out) + strlen(str_num), ".bmp", strlen(".bmp") + 1);
|
||||
image_save(img_path, &originalImages[batch_id]);
|
||||
printf("%sImage %s created!\n", info, img_path);
|
||||
free(img_path);
|
||||
|
||||
@@ -21,6 +21,12 @@ target_include_directories(${TARGET_NAME} PUBLIC "${InferenceEngine_C_API_SOURCE
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
|
||||
# Workaround to avoid warnings caused with bug in the avx512intrin.h of GCC5
|
||||
if((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND
|
||||
(CMAKE_CXX_COMPILER_VERSION VERSION_LESS_EQUAL 5.5))
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_RELEASE "-Wno-error=maybe-uninitialized -Wno-maybe-uninitialized")
|
||||
endif()
|
||||
|
||||
# export
|
||||
|
||||
export(TARGETS ${TARGET_NAME} NAMESPACE IE:: APPEND FILE "${CMAKE_BINARY_DIR}/targets.cmake")
|
||||
@@ -28,8 +34,8 @@ export(TARGETS ${TARGET_NAME} NAMESPACE IE:: APPEND FILE "${CMAKE_BINARY_DIR}/ta
|
||||
# install
|
||||
|
||||
install(TARGETS ${TARGET_NAME}
|
||||
RUNTIME DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core
|
||||
ARCHIVE DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core
|
||||
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT core
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT core
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core)
|
||||
|
||||
install(DIRECTORY ${InferenceEngine_C_API_SOURCE_DIR}/include/
|
||||
|
||||
@@ -310,6 +310,28 @@ IEStatusCode ie_core_read_network(ie_core_t *core, const char *xml, const char *
|
||||
return status;
|
||||
}
|
||||
|
||||
IEStatusCode ie_core_read_network_from_memory(ie_core_t *core, const uint8_t *xml_content, size_t xml_content_size, \
|
||||
const ie_blob_t *weight_blob, ie_network_t **network) {
|
||||
if (core == nullptr || xml_content == nullptr || network == nullptr || weight_blob == nullptr) {
|
||||
return IEStatusCode::GENERAL_ERROR;
|
||||
}
|
||||
|
||||
IEStatusCode status = IEStatusCode::OK;
|
||||
|
||||
try {
|
||||
std::unique_ptr<ie_network_t> network_result(new ie_network_t);
|
||||
network_result->object = core->object.ReadNetwork(std::string(reinterpret_cast<const char *>(xml_content),
|
||||
reinterpret_cast<const char *>(xml_content + xml_content_size)), weight_blob->object);
|
||||
*network = network_result.release();
|
||||
} catch (const IE::details::InferenceEngineException& e) {
|
||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
||||
} catch (...) {
|
||||
return IEStatusCode::UNEXPECTED;
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
IEStatusCode ie_core_load_network(ie_core_t *core, const ie_network_t *network, const char *device_name, \
|
||||
const ie_config_t *config, ie_executable_network_t **exe_network) {
|
||||
IEStatusCode status = IEStatusCode::OK;
|
||||
|
||||
40
inference-engine/ie_bridges/c/tests/CMakeLists.txt
Normal file
40
inference-engine/ie_bridges/c/tests/CMakeLists.txt
Normal file
@@ -0,0 +1,40 @@
|
||||
# Copyright (C) 2018-2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME "InferenceEngineCAPITests")
|
||||
|
||||
# Find OpenCV components if exist
|
||||
find_package(OpenCV COMPONENTS imgcodecs videoio imgproc QUIET)
|
||||
if(NOT OpenCV_FOUND)
|
||||
message(WARNING "OPENCV is disabled or not found, " ${TARGET_NAME} " is built without OPENCV support")
|
||||
else()
|
||||
add_definitions(-DUSE_OPENCV)
|
||||
endif()
|
||||
|
||||
add_executable(${TARGET_NAME} ie_c_api_test.cpp test_model_repo.hpp)
|
||||
|
||||
target_link_libraries(${TARGET_NAME}
|
||||
PRIVATE
|
||||
inference_engine
|
||||
inference_engine_c_api
|
||||
${OpenCV_LIBRARIES}
|
||||
commonTestUtils
|
||||
)
|
||||
|
||||
target_compile_definitions(${TARGET_NAME}
|
||||
PUBLIC ${ARGV}
|
||||
DATA_PATH=\"${DATA_PATH}\"
|
||||
MODELS_PATH=\"${MODELS_PATH}\" )
|
||||
|
||||
add_dependencies(${TARGET_NAME} MultiDevicePlugin)
|
||||
|
||||
if(ENABLE_MKL_DNN)
|
||||
add_dependencies(${TARGET_NAME} MKLDNNPlugin)
|
||||
endif()
|
||||
|
||||
if(ENABLE_CLDNN)
|
||||
add_dependencies(${TARGET_NAME} clDNNPlugin)
|
||||
endif()
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user