Merge remote-tracking branch 'upstream/master' into fix/achetver/redundant_results_splits

This commit is contained in:
achetver 2021-07-01 11:37:43 +03:00
commit 60ddfbc495
118 changed files with 961 additions and 581 deletions

View File

@ -96,9 +96,7 @@ jobs:
-DENABLE_SPEECH_DEMO=OFF
-DNGRAPH_ONNX_IMPORT_ENABLE=ON
-DNGRAPH_ONNX_EDITOR_ENABLE=ON
-DNGRAPH_INTERPRETER_ENABLE=ON
-DNGRAPH_DEBUG_ENABLE=OFF
-DNGRAPH_DYNAMIC_COMPONENTS_ENABLE=ON
$(REPO_DIR)
workingDirectory: $(BUILD_DIR)

View File

@ -70,9 +70,7 @@ RUN cmake .. \
-DPYTHON_EXECUTABLE=/usr/bin/python3 \
-DNGRAPH_ONNX_IMPORT_ENABLE=ON \
-DNGRAPH_ONNX_EDITOR_ENABLE=ON \
-DNGRAPH_INTERPRETER_ENABLE=ON \
-DNGRAPH_DEBUG_ENABLE=OFF \
-DNGRAPH_DYNAMIC_COMPONENTS_ENABLE=ON \
-DCMAKE_INSTALL_PREFIX=/openvino/dist \
-DNGRAPH_USE_PROTOBUF_LITE=${PROTOBUF_LITE}
RUN make -j $(nproc) install

View File

@ -28,7 +28,6 @@ endif()
message (STATUS "PROJECT ............................... " ${PROJECT_NAME})
message (STATUS "CMAKE_BINARY_DIR ...................... " ${CMAKE_BINARY_DIR})
message (STATUS "OpenVINO_MAIN_SOURCE_DIR .............. " ${OpenVINO_MAIN_SOURCE_DIR})
message (STATUS "IE_MAIN_SOURCE_DIR .................... " ${IE_MAIN_SOURCE_DIR})
message (STATUS "CMAKE_GENERATOR ....................... " ${CMAKE_GENERATOR})
message (STATUS "CMAKE_C_COMPILER_ID ................... " ${CMAKE_C_COMPILER_ID})
message (STATUS "CMAKE_BUILD_TYPE ...................... " ${CMAKE_BUILD_TYPE})
@ -64,7 +63,6 @@ function(build_ngraph)
ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE OFF)
ngraph_set(NGRAPH_PDPD_FRONTEND_ENABLE OFF)
endif()
ngraph_set(NGRAPH_INTERPRETER_ENABLE ON)
if(ENABLE_PYTHON)
ngraph_set(NGRAPH_PYTHON_BUILD_ENABLE ON)
@ -132,72 +130,18 @@ add_subdirectory(thirdparty)
add_subdirectory(openvino)
build_ngraph()
add_subdirectory(inference-engine)
# for Template plugin
openvino_developer_export_targets(COMPONENT ngraph TARGETS ngraph_backend interpreter_backend)
include(cmake/extra_modules.cmake)
add_subdirectory(model-optimizer)
add_subdirectory(docs)
add_subdirectory(tools)
add_subdirectory(scripts)
#
# Shellcheck
# CPack
#
ie_shellcheck_process(DIRECTORY "${OpenVINO_MAIN_SOURCE_DIR}"
SKIP "${OpenVINO_MAIN_SOURCE_DIR}/bin"
"${OpenVINO_MAIN_SOURCE_DIR}/build"
"${OpenVINO_MAIN_SOURCE_DIR}/thirdparty"
"${IE_MAIN_SOURCE_DIR}/tests/ie_test_utils/common_test_utils/gtest"
"${IE_MAIN_SOURCE_DIR}/samples/thirdparty"
"${IE_MAIN_SOURCE_DIR}/thirdparty"
"${IE_MAIN_SOURCE_DIR}/temp"
# TODO fix and enable back:
"${OpenVINO_MAIN_SOURCE_DIR}/inference-engine/scripts/dependencies.sh"
"${OpenVINO_MAIN_SOURCE_DIR}/scripts/install_dependencies/install_NEO_OCL_driver.sh"
"${OpenVINO_MAIN_SOURCE_DIR}/scripts/install_dependencies/install_openvino_dependencies.sh"
"${OpenVINO_MAIN_SOURCE_DIR}/ngraph/python/tests/test_onnx/model_zoo_preprocess.sh"
)
#
# cpack
#
# install setupvars
ie_cpack_add_component(setupvars REQUIRED)
if(UNIX)
install(PROGRAMS scripts/setupvars/setupvars.sh
DESTINATION bin
COMPONENT setupvars)
elseif(WIN32)
install(PROGRAMS scripts/setupvars/setupvars.bat
DESTINATION bin
COMPONENT setupvars)
endif()
# install install_dependencies
if(UNIX)
ie_cpack_add_component(install_dependencies REQUIRED)
install(DIRECTORY scripts/install_dependencies/
DESTINATION install_dependencies
COMPONENT install_dependencies)
endif()
# install files for demo
ie_cpack_add_component(demo_scripts DEPENDS core)
if(UNIX)
install(DIRECTORY scripts/demo/
DESTINATION deployment_tools/demo
COMPONENT demo_scripts
USE_SOURCE_PERMISSIONS
PATTERN *.bat EXCLUDE)
elseif(WIN32)
install(DIRECTORY scripts/demo/
DESTINATION deployment_tools/demo
COMPONENT demo_scripts
USE_SOURCE_PERMISSIONS
PATTERN *.sh EXCLUDE)
endif()
ie_cpack(${IE_CPACK_COMPONENTS_ALL})

View File

@ -67,15 +67,15 @@ function(ie_sse42_optimization_flags flags)
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# No such option for MSVC 2019
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "/arch:SSE4.2 /QxSSE4.2" PARENT_SCOPE)
set(${flags} /arch:SSE4.2 /QxSSE4.2 PARENT_SCOPE)
else()
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
endif()
else()
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "-msse4.2 -xSSE4.2" PARENT_SCOPE)
set(${flags} -msse4.2 -xSSE4.2 PARENT_SCOPE)
else()
set(${flags} "-msse4.2" PARENT_SCOPE)
set(${flags} -msse4.2 PARENT_SCOPE)
endif()
endif()
endfunction()
@ -86,17 +86,17 @@ endfunction()
function(ie_avx2_optimization_flags flags)
if(WIN32)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "/QxCORE-AVX2" PARENT_SCOPE)
set(${flags} /QxCORE-AVX2 PARENT_SCOPE)
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(${flags} "/arch:AVX2" PARENT_SCOPE)
set(${flags} /arch:AVX2 PARENT_SCOPE)
else()
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
endif()
else()
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "-march=core-avx2 -xCORE-AVX2 -mtune=core-avx2" PARENT_SCOPE)
set(${flags} -march=core-avx2 -xCORE-AVX2 -mtune=core-avx2 PARENT_SCOPE)
else()
set(${flags} "-mavx2 -mfma" PARENT_SCOPE)
set(${flags} -mavx2 -mfma PARENT_SCOPE)
endif()
endif()
endfunction()
@ -108,21 +108,21 @@ endfunction()
function(ie_avx512_optimization_flags flags)
if(WIN32)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "/QxCOMMON-AVX512" PARENT_SCOPE)
set(${flags} /QxCOMMON-AVX512 PARENT_SCOPE)
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(${flags} "/arch:AVX512" PARENT_SCOPE)
set(${flags} /arch:AVX512 PARENT_SCOPE)
else()
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
endif()
else()
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "-xCOMMON-AVX512" PARENT_SCOPE)
set(${flags} -xCOMMON-AVX512 PARENT_SCOPE)
endif()
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set(${flags} "-mavx512f -mfma" PARENT_SCOPE)
set(${flags} -mavx512f -mfma PARENT_SCOPE)
endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Clang|AppleClang)$")
set(${flags} "-mavx512f -mfma" PARENT_SCOPE)
set(${flags} -mavx512f -mfma PARENT_SCOPE)
endif()
endif()
endfunction()
@ -134,19 +134,19 @@ function(ie_arm_neon_optimization_flags flags)
# nothing
elseif(ANDROID)
if(ANDROID_ABI STREQUAL "arm64-v8a")
set(${flags} "-mfpu=neon" PARENT_SCOPE)
set(${flags} -mfpu=neon PARENT_SCOPE)
elseif(ANDROID_ABI STREQUAL "armeabi-v7a-hard with NEON")
set(${flags} "-march=armv7-a -mfloat-abi=hard -mhard-float -D_NDK_MATH_NO_SOFTFP=1 -mfpu=neon" PARENT_SCOPE)
set(${flags} -march=armv7-a -mfloat-abi=hard -mhard-float -D_NDK_MATH_NO_SOFTFP=1 -mfpu=neon PARENT_SCOPE)
elseif((ANDROID_ABI STREQUAL "armeabi-v7a with NEON") OR
(ANDROID_ABI STREQUAL "armeabi-v7a" AND
DEFINED CMAKE_ANDROID_ARM_NEON AND CMAKE_ANDROID_ARM_NEON))
set(${flags} "-march=armv7-a -mfloat-abi=softfp -mfpu=neon" PARENT_SCOPE)
set(${flags} -march=armv7-a -mfloat-abi=softfp -mfpu=neon PARENT_SCOPE)
endif()
else()
if(AARCH64)
set(${flags} "-O2 -ftree-vectorize" PARENT_SCOPE)
set(${flags} -O2 -ftree-vectorize PARENT_SCOPE)
elseif(ARM)
set(${flags} "-mfpu=neon" PARENT_SCOPE)
set(${flags} -mfpu=neon PARENT_SCOPE)
endif()
endif()
endfunction()

View File

@ -120,8 +120,8 @@ function(_clone_source_to_target TARGET SOURCE ARCH_SET)
VERBATIM
)
set_property(SOURCE ${ARCH_SOURCE} APPEND_STRING PROPERTY COMPILE_FLAGS
" ${_FLAGS_${_arch}}")
set_property(SOURCE ${ARCH_SOURCE} APPEND_STRING PROPERTY COMPILE_OPTIONS
"${_FLAGS_${_arch}}")
set_property(SOURCE ${ARCH_SOURCE} APPEND PROPERTY COMPILE_DEFINITIONS
${_DEFINE_${_arch}}

View File

@ -42,7 +42,7 @@ macro(ie_parse_ci_build_number)
message(FATAL_ERROR "repo_root is not defined")
endif()
if(DEFINED IEDevScripts_DIR AND DEFINED IE_MAIN_SOURCE_DIR AND NOT DEFINED custom_build)
if(DEFINED IEDevScripts_DIR AND DEFINED OpeenVINO_SOURCE_DIR AND NOT DEFINED custom_build)
set(ie_version_hpp "${IE_MAIN_SOURCE_DIR}/include/ie_version.hpp")
if(NOT EXISTS ${ie_version_hpp})
message(FATAL_ERROR "File ie_version.hpp with IE_VERSION definitions is not found")
@ -67,7 +67,7 @@ macro(ie_parse_ci_build_number)
endif()
endforeach()
elseif(has_ci_version)
message(WARNING "IE_MAIN_SOURCE_DIR is not defined. No way to compare versions")
message(WARNING "OpeenVINO_SOURCE_DIR is not defined. No way to compare versions")
else()
message(WARNING "No way to detect OpenVINO version. Supposing 0.0.0.0")
endif()

88
cmake/extra_modules.cmake Normal file
View File

@ -0,0 +1,88 @@
function(ie_generate_dev_package_config)
# dummy check that OpenCV is here
find_package(OpenCV QUIET)
set(all_dev_targets gflags ie_libraries)
foreach(component IN LISTS openvino_export_components)
export(TARGETS ${${component}} NAMESPACE IE::
APPEND FILE "${CMAKE_BINARY_DIR}/${component}_dev_targets.cmake")
list(APPEND all_dev_targets ${${component}})
endforeach()
add_custom_target(ie_dev_targets ALL DEPENDS ${all_dev_targets})
configure_package_config_file("${OpenVINO_MAIN_SOURCE_DIR}/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in"
"${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig.cmake"
INSTALL_DESTINATION share # not used
PATH_VARS "OpenVINO_MAIN_SOURCE_DIR;gflags_BINARY_DIR"
NO_CHECK_REQUIRED_COMPONENTS_MACRO)
configure_file("${OpenVINO_MAIN_SOURCE_DIR}/cmake/templates/InferenceEngineConfig-version.cmake.in"
"${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig-version.cmake"
@ONLY)
endfunction()
ie_generate_dev_package_config()
#
# Add extra modules
#
function(register_extra_modules)
# post export
openvino_developer_export_targets(COMPONENT inference_engine TARGETS inference_engine)
openvino_developer_export_targets(COMPONENT ngraph TARGETS ngraph)
set(InferenceEngineDeveloperPackage_DIR "${CMAKE_CURRENT_BINARY_DIR}/runtime")
function(generate_fake_dev_package)
set(iedevconfig_file "${InferenceEngineDeveloperPackage_DIR}/InferenceEngineDeveloperPackageConfig.cmake")
file(REMOVE "${iedevconfig_file}")
file(WRITE "${iedevconfig_file}" "\# !! AUTOGENERATED: DON'T EDIT !!\n\n")
file(APPEND "${iedevconfig_file}" "ie_deprecated_no_errors()\n")
foreach(target IN LISTS ${openvino_export_components})
if(target)
file(APPEND "${iedevconfig_file}" "add_library(IE::${target} ALIAS ${target})\n")
endif()
endforeach()
endfunction()
generate_fake_dev_package()
# automatically import plugins from the 'plugins' folder
file(GLOB local_extra_modules "runtime/plugins/*")
# add template plugin
list(APPEND local_extra_modules "${OpenVINO_MAIN_SOURCE_DIR}/docs/template_plugin")
# detect where IE_EXTRA_MODULES contains folders with CMakeLists.txt
# other folders are supposed to have sub-folders with CMakeLists.txt
foreach(module_path IN LISTS IE_EXTRA_MODULES)
if(EXISTS "${module_path}/CMakeLists.txt")
list(APPEND extra_modules "${module_path}")
elseif(module_path)
file(GLOB extra_modules ${extra_modules} "${module_path}/*")
endif()
endforeach()
# add each extra module
foreach(module_path IN LISTS extra_modules local_extra_modules)
if(module_path)
get_filename_component(module_name "${module_path}" NAME)
set(build_module ON)
if(NOT EXISTS "${module_path}/CMakeLists.txt") # if module is built not using cmake
set(build_module OFF)
endif()
if(NOT DEFINED BUILD_${module_name})
set(BUILD_${module_name} ${build_module} CACHE BOOL "Build ${module_name} extra module" FORCE)
endif()
if(BUILD_${module_name})
message(STATUS "Register ${module_name} to be built in build-modules/${module_name}")
add_subdirectory("${module_path}" "build-modules/${module_name}")
endif()
endif()
endforeach()
endfunction()
register_extra_modules()

View File

@ -46,10 +46,6 @@ if(NOT ENABLE_DOCKER)
install(TARGETS templatePlugin template_extension
LIBRARY DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests EXCLUDE_FROM_ALL)
if(ENABLE_FUNCTIONAL_TESTS)
install(TARGETS templateFuncTests
RUNTIME DESTINATION tests COMPONENT tests EXCLUDE_FROM_ALL)
endif()
endif()
set(LINKCHECKER_PY "" CACHE FILEPATH "Path to linkchecker.py for documentation check")

View File

@ -174,11 +174,24 @@ cd darkflow
#### <a name="yolov1-v2-to-tf"></a>Convert DarkNet\* YOLOv1 or YOLOv2 Model to TensorFlow\*
To convert YOLOv1 or YOLOv2 model to TensorFlow, go to the root directory of the cloned DarkFlow repository and run the following command:<br>
For Yolo V1:
```sh
python3 ./flow --model <path_to_model>/<model_name>.cfg --load <path_to_model>/<model_name>.weights --savepb
python3 flow --model yolov1.cfg --load yolov1.weights --savepb
```
If the model was successfully converted, you can find the `<model_name>.meta` and `<model_name>.pb` files
For Yolo V2 with VOC dataset `--labels` argument should be specified and additional changes in the original exporting script are required.
In the file [https://github.com/thtrieu/darkflow/blob/b187c65/darkflow/utils/loader.py#L121](https://github.com/thtrieu/darkflow/blob/b187c65630f9aa1bb8b809c33ec67c8cc5d60124/darkflow/utils/loader.py#L121)
change line 121 from `self.offset = 16` to `self.offset = 20`. Then run:
```sh
python3 flow --model yolov2-voc.cfg --load yolov2-voc.weights --labels voc-labels.txt --savepb
```
voc-labels can be found by the following link https://raw.githubusercontent.com/szaza/android-yolo-v2/master/assets/tiny-yolo-voc-labels.txt
General conversion command is:
```sh
python3 flow --model <path_to_model>/<model_name>.cfg --load <path_to_model>/<model_name>.weights --labels <path_to_dataset_labels_file> --savepb
```
Where argument `--labels` for Yolo V1 can be skipped. If the model was successfully converted, you can find the `<model_name>.meta` and `<model_name>.pb` files
in `built_graph` subdirectory of the cloned DarkFlow repository.
File `<model_name>.pb` is a TensorFlow representation of the YOLO model.

View File

@ -2,35 +2,32 @@
**Versioned name**: *Cos-1*
**Category**: Arithmetic unary operation
**Category**: Arithmetic unary operation
**Short description**: *Cos* performs element-wise cosine operation with given tensor.
**Short description**: *Cos* performs element-wise cosine operation on a given input tensor.
**Attributes**:
No attributes available.
**Inputs**
* **1**: An tensor of type T. **Required.**
**Outputs**
* **1**: The result of element-wise cos operation. A tensor of type T.
**Types**
* *T*: any numeric type.
*Cos* does the following with the input tensor *a*:
**Detailed description**: *Cos* performs element-wise cosine operation on a given input tensor, based on the following mathematical formula:
\f[
a_{i} = cos(a_{i})
\f]
**Examples**
**Attributes**: *Cos* operation has no attributes.
*Example 1*
**Inputs**
* **1**: A tensor of type *T* and arbitrary shape. **Required.**
**Outputs**
* **1**: The result of element-wise *Cos* operation. A tensor of type *T* and the same shape as the input tensor.
**Types**
* *T*: any numeric type.
**Example**
```xml
<layer ... type="Cos">

View File

@ -25,9 +25,8 @@ if(ENABLE_TESTS)
add_subdirectory(tests/functional)
endif()
endif()
# [cmake:main]
# install
# ATTENTION: uncomment to install component
# ie_cpack(template)
ie_cpack(template)
# [cmake:main]

View File

@ -15,6 +15,7 @@ ie_add_plugin(NAME ${TARGET_NAME}
SKIP_INSTALL # ATTENTION: uncomment to install component
VERSION_DEFINES_FOR template_plugin.cpp
ADD_CLANG_FORMAT)
# Enable support of CC for the plugin
ie_mark_target_as_cc(${TARGET_NAME})

View File

@ -15,6 +15,8 @@ endif()
add_subdirectory(tools)
add_subdirectory(samples)
openvino_developer_export_targets(COMPONENT openvino_common TARGETS format_reader ie_samples_utils)
if(ENABLE_TESTS)
add_subdirectory(tests_deprecated)
add_subdirectory(tests)
@ -50,6 +52,11 @@ install(TARGETS format_reader
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests EXCLUDE_FROM_ALL
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT tests EXCLUDE_FROM_ALL)
if(TARGET benchmark_app)
install(TARGETS benchmark_app
RUNTIME DESTINATION tests COMPONENT tests EXCLUDE_FROM_ALL)
endif()
# install C samples
ie_cpack_add_component(c_samples DEPENDS core_c)
@ -85,103 +92,3 @@ if(SPEECH_LIBS_AND_DEMOS)
USE_SOURCE_PERMISSIONS
COMPONENT speech_demo_files)
endif()
#
# Developer package
#
openvino_developer_export_targets(COMPONENT openvino_common TARGETS format_reader ie_samples_utils)
# for Template plugin
if(NGRAPH_INTERPRETER_ENABLE)
openvino_developer_export_targets(COMPONENT ngraph TARGETS ngraph_backend interpreter_backend)
endif()
function(ie_generate_dev_package_config)
# dummy check that OpenCV is here
find_package(OpenCV QUIET)
set(all_dev_targets gflags ie_libraries)
foreach(component IN LISTS openvino_export_components)
export(TARGETS ${${component}} NAMESPACE IE::
APPEND FILE "${CMAKE_BINARY_DIR}/${component}_dev_targets.cmake")
list(APPEND all_dev_targets ${${component}})
endforeach()
add_custom_target(ie_dev_targets ALL DEPENDS ${all_dev_targets})
configure_package_config_file("${OpenVINO_MAIN_SOURCE_DIR}/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in"
"${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig.cmake"
INSTALL_DESTINATION share # not used
PATH_VARS "OpenVINO_MAIN_SOURCE_DIR;IE_MAIN_SOURCE_DIR;gflags_BINARY_DIR"
NO_CHECK_REQUIRED_COMPONENTS_MACRO)
configure_file("${OpenVINO_MAIN_SOURCE_DIR}/cmake/templates/InferenceEngineConfig-version.cmake.in"
"${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig-version.cmake"
@ONLY)
endfunction()
ie_generate_dev_package_config()
#
# Add extra modules
#
function(register_extra_modules)
# post export
openvino_developer_export_targets(COMPONENT inference_engine TARGETS inference_engine)
openvino_developer_export_targets(COMPONENT ngraph TARGETS ${NGRAPH_LIBRARIES})
set(InferenceEngineDeveloperPackage_DIR "${CMAKE_CURRENT_BINARY_DIR}/build-modules")
function(generate_fake_dev_package)
set(iedevconfig_file "${InferenceEngineDeveloperPackage_DIR}/InferenceEngineDeveloperPackageConfig.cmake")
file(REMOVE "${iedevconfig_file}")
file(WRITE "${iedevconfig_file}" "\# !! AUTOGENERATED: DON'T EDIT !!\n\n")
file(APPEND "${iedevconfig_file}" "ie_deprecated_no_errors()\n")
foreach(target IN LISTS ${openvino_export_components})
if(target)
file(APPEND "${iedevconfig_file}" "add_library(IE::${target} ALIAS ${target})\n")
endif()
endforeach()
endfunction()
generate_fake_dev_package()
# automatically import plugins from the 'plugins' folder
file(GLOB local_extra_modules "plugins/*")
if(NGRAPH_INTERPRETER_ENABLE)
list(APPEND local_extra_modules "${OpenVINO_MAIN_SOURCE_DIR}/docs/template_plugin")
endif()
# detect where IE_EXTRA_MODULES contains folders with CMakeLists.txt
# other folders are supposed to have sub-folders with CMakeLists.txt
foreach(module_path IN LISTS IE_EXTRA_MODULES)
if(EXISTS "${module_path}/CMakeLists.txt")
list(APPEND extra_modules "${module_path}")
elseif(module_path)
file(GLOB extra_modules ${extra_modules} "${module_path}/*")
endif()
endforeach()
# add each extra module
foreach(module_path IN LISTS extra_modules local_extra_modules)
if(module_path)
get_filename_component(module_name "${module_path}" NAME)
set(build_module ON)
if(NOT EXISTS "${module_path}/CMakeLists.txt") # if module is built not using cmake
set(build_module OFF)
endif()
if(NOT DEFINED BUILD_${module_name})
set(BUILD_${module_name} ${build_module} CACHE BOOL "Build ${module_name} extra module" FORCE)
endif()
if(BUILD_${module_name})
message(STATUS "Register ${module_name} to be built in build-modules/${module_name}")
add_subdirectory("${module_path}" "build-modules/${module_name}")
endif()
endif()
endforeach()
endfunction()
register_extra_modules()

View File

@ -83,6 +83,14 @@ foreach(firmware_name IN LISTS VPU_SUPPORTED_FIRMWARES)
install(FILES ${${var_name}}
DESTINATION ${IE_CPACK_RUNTIME_PATH}
COMPONENT myriad)
if(ENABLE_MYRIAD AND ENABLE_BEH_TESTS)
# for MyriadBehaviorTests
install(FILES ${${var_name}}
DESTINATION tests
COMPONENT tests
EXCLUDE_FROM_ALL)
endif()
endforeach()
add_custom_target(vpu_copy_firmware

View File

@ -14,7 +14,7 @@ Hello Classification C sample application demonstrates how to use the following
| Options | Values |
|:--- |:---
| Validated Models | AlexNet and GoogLeNet (image classification networks)
| Validated Models | [alexnet](@ref omz_models_model_alexnet), [googlenet-v1](@ref omz_models_model_googlenet_v1)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png)
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -12,7 +12,7 @@ Basic Inference Engine API is covered by [Hello Classification C sample](../hell
| Options | Values |
|:--- |:---
| Validated Models | AlexNet (image classification network)
| Validated Models | [alexnet](@ref omz_models_model_alexnet)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | An uncompressed image in the NV12 color format - \*.yuv
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -20,7 +20,7 @@ Basic Inference Engine API is covered by [Hello Classification C sample](../hell
| Options | Values |
|:--- |:---
| Validated Models | Person detection SSD (object detection network)
| Validated Models | [person-detection-retail-0013](@ref omz_models_model_person_detection_retail_0013)
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx)
| Validated images | The sample uses OpenCV* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (.bmp, .png, .jpg)
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -8,7 +8,7 @@ cmake_minimum_required (VERSION 3.13)
# Set the project name
project (ie_python_api)
if(DEFINED IE_MAIN_SOURCE_DIR)
if(DEFINED OpenVINO_SOURCE_DIR)
set(InferenceEngine_LIBRARIES inference_engine)
else()
find_package(InferenceEngineDeveloperPackage REQUIRED)

View File

@ -14,10 +14,10 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Options | Values |
| :------------------------- | :-------------------------------------------------------------------------------------------------------- |
| Validated Models | [alexnet](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/alexnet/alexnet.md) |
| Validated Models | [alexnet](@ref omz_models_model_alexnet) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx) |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/classification_sample_async) |
| Other language realization | [C++](../../../../samples/classification_sample_async/README.md) |
## How It Works

View File

@ -13,10 +13,10 @@ The following Inference Engine Python API is used in the application:
| Options | Values |
| :------------------------- | :-------------------------------------------------------------------------------------------------------- |
| Validated Models | [alexnet](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/alexnet/alexnet.md) |
| Validated Models | [alexnet](@ref omz_models_model_alexnet) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx) |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/hello_classification), [C](../../../c/samples/hello_classification) |
| Other language realization | [C++](../../../../samples/hello_classification/README.md), [C](../../../c/samples/hello_classification/README.md) |
## How It Works

View File

@ -12,7 +12,7 @@ The following Inference Engine Python API is used in the application:
| Options | Values |
| :------------------------- | :---------------------------------------------------------------------- |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/hello_query_device) |
| Other language realization | [C++](../../../../samples/hello_query_device/README.md) |
## How It Works

View File

@ -14,10 +14,10 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Options | Values |
| :------------------------- | :-------------------------------------------------------------------------------------------------------------------------- |
| Validated Models | [mobilenet-ssd](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/mobilenet-ssd/mobilenet-ssd.md) |
| Validated Models | [mobilenet-ssd](@ref omz_models_model_mobilenet_ssd) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx) |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/hello_reshape_ssd) |
| Other language realization | [C++](../../../../samples/hello_reshape_ssd/README.md) |
## How It Works

View File

@ -15,11 +15,11 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Options | Values |
| :------------------------- | :---------------------------------------------------------------------- |
| Validated Models | LeNet (image classification network) |
| Validated Models | LeNet |
| Model Format | Network weights file (\*.bin) |
| Validated images | The sample uses OpenCV\* to [read input grayscale image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png) or single-channel `ubyte` image |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/ngraph_function_creation_sample) |
| Other language realization | [C++](../../../../samples/ngraph_function_creation_sample/README.md) |
## How It Works

View File

@ -14,10 +14,10 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Options | Values |
| :------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| Validated Models | [mobilenet-ssd](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/mobilenet-ssd/mobilenet-ssd.md), [face-detection-0206](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/intel/face-detection-0206/description/face-detection-0206.md) |
| Validated Models | [mobilenet-ssd](@ref omz_models_model_mobilenet_ssd), [face-detection-0206](@ref omz_models_model_face_detection_0206) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx) |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/object_detection_sample_ssd), [C](../../../c/samples/object_detection_sample_ssd) |
| Other language realization | [C++](../../../../samples/object_detection_sample_ssd/README.md), [C](../../../c/samples/object_detection_sample_ssd/README.md) |
## How It Works

View File

@ -20,7 +20,7 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Validated Models | Acoustic model based on Kaldi* neural networks (see [Model Preparation](#model-preparation) section) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin) |
| Supported devices | See [Execution Modes](#execution-modes) section below and [List Supported Devices](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/speech_sample) |
| Other language realization | [C++](../../../../samples/speech_sample/README.md) |
## How It Works

View File

@ -15,10 +15,10 @@ Basic Inference Engine API is covered by [Hello Classification Python* Sample](.
| Options | Values |
| :------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Validated Models | [fast-neural-style-mosaic-onnx](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/fast-neural-style-mosaic-onnx/fast-neural-style-mosaic-onnx.md) |
| Validated Models | [fast-neural-style-mosaic-onnx](@ref omz_models_model_fast_neural_style_mosaic_onnx) |
| Model Format | Inference Engine Intermediate Representation (.xml + .bin), ONNX (.onnx) |
| Supported devices | [All](../../../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |
| Other language realization | [C++](../../../../samples/style_transfer_sample) |
| Other language realization | [C++](../../../../samples/style_transfer_sample/README.md) |
## How It Works

View File

@ -44,7 +44,7 @@ endif()
function(python_disable_deprecated_warnings)
disable_deprecated_warnings()
set(pyx_file "${CMAKE_CURRENT_BINARY_DIR}/ie_api.cxx" "${CMAKE_CURRENT_BINARY_DIR}/constants.cxx")
set_source_files_properties(${pyx_file} PROPERTIES COMPILE_FLAGS ${ie_c_cxx_deprecated})
set_source_files_properties(${pyx_file} PROPERTIES COMPILE_OPTIONS ${ie_c_cxx_deprecated})
endfunction()
python_disable_deprecated_warnings()

View File

@ -126,11 +126,6 @@ public:
* @return NV12 remote blob
*/
static inline Blob::Ptr make_shared_blob_nv12(size_t height, size_t width, RemoteContext::Ptr ctx, ID3D11Texture2D* nv12_surf) {
auto casted = std::dynamic_pointer_cast<D3DContext>(ctx);
if (nullptr == casted) {
IE_THROW() << "Invalid remote context passed";
}
// despite of layout, blob dimensions always follow in N,C,H,W order
TensorDesc desc(Precision::U8, { 1, 1, height, width }, Layout::NHWC);
@ -139,12 +134,12 @@ static inline Blob::Ptr make_shared_blob_nv12(size_t height, size_t width, Remot
{ GPU_PARAM_KEY(DEV_OBJECT_HANDLE), static_cast<gpu_handle_param>(nv12_surf) },
{ GPU_PARAM_KEY(VA_PLANE), uint32_t(0) }
};
Blob::Ptr y_blob = std::dynamic_pointer_cast<Blob>(casted->CreateBlob(desc, blobParams));
Blob::Ptr y_blob = std::dynamic_pointer_cast<Blob>(ctx->CreateBlob(desc, blobParams));
TensorDesc uvdesc(Precision::U8, { 1, 2, height / 2, width / 2 }, Layout::NHWC);
blobParams[GPU_PARAM_KEY(MEM_HANDLE)] = static_cast<gpu_handle_param>(nv12_surf);
blobParams[GPU_PARAM_KEY(VA_PLANE)] = uint32_t(1);
Blob::Ptr uv_blob = std::dynamic_pointer_cast<Blob>(casted->CreateBlob(uvdesc, blobParams));
Blob::Ptr uv_blob = std::dynamic_pointer_cast<Blob>(ctx->CreateBlob(uvdesc, blobParams));
return InferenceEngine::make_shared_blob<NV12Blob>(y_blob, uv_blob);
}

View File

@ -90,11 +90,6 @@ public:
* The resulting compound contains two remote blobs for Y and UV planes of the surface.
*/
static inline Blob::Ptr make_shared_blob_nv12(size_t height, size_t width, RemoteContext::Ptr ctx, VASurfaceID nv12_surf) {
auto casted = std::dynamic_pointer_cast<VAContext>(ctx);
if (nullptr == casted) {
IE_THROW() << "Invalid remote context passed";
}
// despite of layout, blob dimensions always follow in N,C,H,W order
TensorDesc ydesc(Precision::U8, { 1, 1, height, width }, Layout::NHWC);
ParamMap blobParams = {
@ -102,11 +97,11 @@ static inline Blob::Ptr make_shared_blob_nv12(size_t height, size_t width, Remot
{ GPU_PARAM_KEY(DEV_OBJECT_HANDLE), nv12_surf },
{ GPU_PARAM_KEY(VA_PLANE), uint32_t(0) }
};
Blob::Ptr y_blob = std::dynamic_pointer_cast<Blob>(casted->CreateBlob(ydesc, blobParams));
Blob::Ptr y_blob = std::dynamic_pointer_cast<Blob>(ctx->CreateBlob(ydesc, blobParams));
TensorDesc uvdesc(Precision::U8, { 1, 2, height / 2, width / 2 }, Layout::NHWC);
blobParams[GPU_PARAM_KEY(VA_PLANE)] = uint32_t(1);
Blob::Ptr uv_blob = std::dynamic_pointer_cast<Blob>(casted->CreateBlob(uvdesc, blobParams));
Blob::Ptr uv_blob = std::dynamic_pointer_cast<Blob>(ctx->CreateBlob(uvdesc, blobParams));
return InferenceEngine::make_shared_blob<NV12Blob>(y_blob, uv_blob);
}

View File

@ -31,7 +31,7 @@ if (NOT BIN_FOLDER)
endif()
endif()
if(IE_MAIN_SOURCE_DIR)
if(OpenVINO_SOURCE_DIR)
# in case if samples are built from IE repo
set(IE_MAIN_SAMPLES_DIR ${OpenVINO_MAIN_SOURCE_DIR})
# hint for find_package(InferenceEngine in the samples folder)
@ -111,7 +111,7 @@ endif()
####################################
if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/thirdparty/gflags" AND
NOT DEFINED IE_MAIN_SOURCE_DIR)
NOT DEFINED OpenVINO_SOURCE_DIR)
function(add_gflags)
# common gflags settings
set(GFLAGS_IS_SUBPROJECT TRUE)
@ -152,7 +152,7 @@ elseif(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/common/opencv_c_wrapper")
endif()
# samples build can be switched off during whole IE build
if (DEFINED IE_MAIN_SOURCE_DIR AND NOT ENABLE_SAMPLES)
if (DEFINED OpenVINO_SOURCE_DIR AND NOT ENABLE_SAMPLES)
return()
endif()

View File

@ -18,7 +18,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | AlexNet and GoogLeNet (image classification networks)
| Validated Models | [alexnet](@ref omz_models_model_alexnet), [googlenet-v1](@ref omz_models_model_googlenet_v1)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png), single-channel `ubyte` images.
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -14,7 +14,7 @@ Hello Classification C++ sample application demonstrates how to use the followin
| Options | Values |
|:--- |:---
| Validated Models | AlexNet and GoogLeNet (image classification networks)
| Validated Models | [alexnet](@ref omz_models_model_alexnet), [googlenet-v1](@ref omz_models_model_googlenet_v1)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png)
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -15,7 +15,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | AlexNet (image classification network)
| Validated Models | [alexnet](@ref omz_models_model_alexnet)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | An uncompressed image in the NV12 color format - \*.yuv
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -16,7 +16,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | Person detection SSD (object detection network)
| Validated Models | [person-detection-retail-0013](@ref omz_models_model_person_detection_retail_0013)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png)
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -19,7 +19,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | LeNet (image classification network)
| Validated Models | LeNet
| Model Format | Network weights file (\*.bin)
| Validated images | single-channel `ubyte` images
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -16,7 +16,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | Person detection SSD (object detection network)
| Validated Models | [person-detection-retail-0013](@ref omz_models_model_person_detection_retail_0013)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png)
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -15,7 +15,7 @@ Basic Inference Engine API is covered by [Hello Classification C++ sample](../he
| Options | Values |
|:--- |:---
| Validated Models | [fast-neural-style-mosaic-onnx](https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/fast-neural-style-mosaic-onnx/fast-neural-style-mosaic-onnx.md) (style transfer models)
| Validated Models | [fast-neural-style-mosaic-onnx](@ref omz_models_model_fast_neural_style_mosaic_onnx)
| Model Format | Inference Engine Intermediate Representation (\*.xml + \*.bin), ONNX (\*.onnx)
| Validated images | The sample uses OpenCV\* to [read input image](https://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga288b8b3da0892bd651fce07b3bbd3a56) (\*.bmp, \*.png)
| Supported devices | [All](../../../docs/IE_DG/supported_plugins/Supported_Devices.md) |

View File

@ -32,11 +32,10 @@ target_link_libraries(${TARGET_NAME} PRIVATE clDNN_lib pugixml
inference_engine_lp_transformations
${NGRAPH_LIBRARIES})
set(CLDNN_TOP_FOLDER "${IE_MAIN_SOURCE_DIR}/thirdparty/clDNN")
target_include_directories(${TARGET_NAME} PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}
$<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>
${CLDNN_TOP_FOLDER}/api)
${clDNN_SOURCE_DIR}/api)
set_target_properties(${TARGET_NAME} PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE ${ENABLE_LTO})

View File

@ -67,7 +67,7 @@ if(ENABLE_SSE42)
list(APPEND LIBRARY_SRC ${SSE_SRC})
ie_sse42_optimization_flags(sse4_2_flags)
set_source_files_properties(${SSE_SRC} PROPERTIES COMPILE_FLAGS "${sse4_2_flags}")
set_source_files_properties(${SSE_SRC} PROPERTIES COMPILE_OPTIONS "${sse4_2_flags}")
add_definitions(-DHAVE_SSE=1)
if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.16")
@ -261,7 +261,7 @@ endif()
ie_cpack_add_component(core REQUIRED DEPENDS ${core_components})
ie_cpack_add_component(core_dev REQUIRED core ngraph_dev)
install(DIRECTORY "${IE_MAIN_SOURCE_DIR}/include" DESTINATION ${IE_CPACK_IE_DIR}
install(DIRECTORY "${PUBLIC_HEADERS_DIR}" DESTINATION ${IE_CPACK_IE_DIR}
COMPONENT core_dev)
install(TARGETS ${TARGET_NAME} EXPORT InferenceEngineTargets

View File

@ -14,32 +14,21 @@ file(GLOB LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
function(ie_avx512_core_optimization_flags flags)
if(WIN32)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "/QxCORE-AVX512" PARENT_SCOPE)
set(${flags} /QxCORE-AVX512 PARENT_SCOPE)
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(${flags} "/arch:AVX512" PARENT_SCOPE)
set(${flags} /arch:AVX512 PARENT_SCOPE)
else()
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
endif()
else()
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(${flags} "-xCORE-AVX512" PARENT_SCOPE)
set(${flags} -xCORE-AVX512 PARENT_SCOPE)
else()
set(${flags} "-mavx512f -mavx512bw -mavx512dq -mfma" PARENT_SCOPE)
set(${flags} -mavx512f -mavx512bw -mavx512dq -mfma PARENT_SCOPE)
endif()
endif()
endfunction()
function(set_ie_neon_optimization_flags neon_flags)
file(GLOB NEON_SRC ${CMAKE_CURRENT_SOURCE_DIR}/arm_neon/*.cpp)
file(GLOB NEON_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/arm_neon/*.hpp)
list(APPEND LIBRARY_HEADERS ${NEON_HEADERS})
list(APPEND LIBRARY_SRC ${NEON_SRC})
set_source_files_properties(${NEON_SRC} PROPERTIES COMPILE_FLAGS "${neon_flags}")
add_definitions(-DHAVE_NEON=1)
endfunction()
if(ENABLE_SSE42)
file(GLOB SSE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/cpu_x86_sse42/*.cpp)
file(GLOB SSE_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/cpu_x86_sse42/*.hpp)
@ -48,7 +37,7 @@ if(ENABLE_SSE42)
list(APPEND LIBRARY_SRC ${SSE_SRC})
ie_sse42_optimization_flags(sse4_2_flags)
set_source_files_properties(${SSE_SRC} PROPERTIES COMPILE_FLAGS "${sse4_2_flags}")
set_source_files_properties(${SSE_SRC} PROPERTIES COMPILE_OPTIONS "${sse4_2_flags}")
add_definitions(-DHAVE_SSE=1)
endif()
@ -60,7 +49,7 @@ if(ENABLE_AVX2)
list(APPEND LIBRARY_SRC ${AVX2_SRC})
ie_avx2_optimization_flags(avx2_flags)
set_source_files_properties(${AVX2_SRC} PROPERTIES COMPILE_FLAGS "${avx2_flags}")
set_source_files_properties(${AVX2_SRC} PROPERTIES COMPILE_OPTIONS "${avx2_flags}")
add_definitions(-DHAVE_AVX2=1)
endif()
@ -80,7 +69,7 @@ if(ENABLE_AVX512F AND NOT GNU_5_DEBUG_CASE)
list(APPEND LIBRARY_SRC ${AVX512_SRC})
ie_avx512_core_optimization_flags(avx512_flags)
set_source_files_properties(${AVX512_SRC} PROPERTIES COMPILE_FLAGS "${avx512_flags}")
set_source_files_properties(${AVX512_SRC} PROPERTIES COMPILE_OPTIONS "${avx512_flags}")
add_definitions(-DHAVE_AVX512=1)
endif()
@ -94,7 +83,7 @@ if(ARM OR AARCH64)
list(APPEND LIBRARY_HEADERS ${NEON_HEADERS})
list(APPEND LIBRARY_SRC ${NEON_SRC})
set_source_files_properties(${NEON_SRC} PROPERTIES COMPILE_FLAGS "${neon_flags}")
set_source_files_properties(${NEON_SRC} PROPERTIES COMPILE_OPTIONS "${neon_flags}")
add_definitions(-DHAVE_NEON=1)
endif()
endif()

View File

@ -26,8 +26,7 @@ ie_add_vs_version_file(NAME ${TARGET_NAME}
target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_ENGINE_PLUGIN
IR_READER_V10)
target_include_directories(${TARGET_NAME} PRIVATE "${IE_MAIN_SOURCE_DIR}/src/inference_engine" # for CNNNetworkNgraphImpl
"${CMAKE_CURRENT_SOURCE_DIR}")
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
target_link_libraries(${TARGET_NAME} PRIVATE ${NGRAPH_LIBRARIES}
inference_engine_reader_api

View File

@ -28,7 +28,6 @@ ie_add_vs_version_file(NAME ${TARGET_NAME}
target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_ENGINE_PLUGIN)
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/"
"${IE_MAIN_SOURCE_DIR}/src/inference_engine"
"${IE_MAIN_SOURCE_DIR}/src/readers/ir_reader")
target_link_libraries(${TARGET_NAME} PRIVATE inference_engine_reader_api inference_engine_plugin_api

View File

@ -39,8 +39,7 @@ function(add_graph_transformer_target TARGET_NAME STATIC_IE)
target_include_directories(${TARGET_NAME}
SYSTEM PUBLIC
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>
"${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc/include")
$<TARGET_PROPERTY:mvnc,INTERFACE_INCLUDE_DIRECTORIES>)
if(STATIC_IE)
target_link_libraries(${TARGET_NAME} PUBLIC vpu_common_lib_test_static)
@ -51,6 +50,7 @@ function(add_graph_transformer_target TARGET_NAME STATIC_IE)
target_link_libraries(${TARGET_NAME}
PUBLIC
${NGRAPH_LIBRARIES}
inference_engine_plugin_api
PRIVATE
openvino::itt
mvnc # TODO: remove once all options are migrated
@ -58,13 +58,11 @@ function(add_graph_transformer_target TARGET_NAME STATIC_IE)
if(WIN32)
target_compile_definitions(${TARGET_NAME} PRIVATE NOMINMAX)
set_target_properties(${TARGET_NAME} PROPERTIES COMPILE_PDB_NAME ${TARGET_NAME})
endif()
if(NOT STATIC_IE)
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME} CUSTOM_FILTERS "+runtime/explicit")
openvino_developer_export_targets(COMPONENT inference_engine_vpu TARGETS ${TARGET_NAME})
endif()

View File

@ -90,9 +90,11 @@ function(ie_headers_compilation_with_custom_flags)
cmake_parse_arguments(IE_TEST "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(IE_TEST_PLUGIN_API)
set(IE_TEST_INCLUDE_DIRECTORY "${IE_MAIN_SOURCE_DIR}/src/plugin_api")
set(IE_TEST_INCLUDE_DIRECTORY
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>)
else()
set(IE_TEST_INCLUDE_DIRECTORY "${IE_MAIN_SOURCE_DIR}/include")
set(IE_TEST_INCLUDE_DIRECTORY
$<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES>)
endif()
file(GLOB_RECURSE header_files RELATIVE "${IE_TEST_INCLUDE_DIRECTORY}"
@ -177,7 +179,7 @@ function(ie_headers_compilation_with_custom_flags)
if(IE_TEST_FLAGS)
set_target_properties(${target_name} PROPERTIES
COMPILE_FLAGS ${IE_TEST_FLAGS})
COMPILE_OPTIONS "${IE_TEST_FLAGS}")
endif()
if(IE_TEST_DEFINITIONS)
@ -201,16 +203,16 @@ ie_headers_compilation_with_custom_flags(TEST_SUFFIX Cxx17
if(UNIX)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
ie_headers_compilation_with_custom_flags(TEST_SUFFIX WarningsAreErrors
FLAGS "-Werror-all -Werror -Wall")
FLAGS -Werror-all -Werror -Wall)
else()
ie_headers_compilation_with_custom_flags(TEST_SUFFIX Pedantic FLAGS "-Wpedantic")
ie_headers_compilation_with_custom_flags(TEST_SUFFIX Pedantic FLAGS -Wpedantic)
endif()
else()
ie_headers_compilation_with_custom_flags(TEST_SUFFIX WindowsAreErrors
HEADERS_TO_SKIP "gpu/gpu_ocl_wrapper.hpp"
"gpu/gpu_context_api_ocl.hpp"
"gpu/gpu_context_api_dx.hpp"
FLAGS "/we4996 /W4 /WX")
FLAGS /we4996 /W4 /WX)
endif()
# compilation with -Wweak-vtables
@ -228,15 +230,15 @@ ie_headers_compilation_with_custom_flags(TEST_SUFFIX PluginApiCxx17
if(UNIX)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
ie_headers_compilation_with_custom_flags(TEST_SUFFIX PluginApiWarningsAreErrors
FLAGS "-Werror-all -Werror -Wall"
FLAGS -Werror-all -Werror -Wall
PLUGIN_API)
else()
ie_headers_compilation_with_custom_flags(TEST_SUFFIX PluginApiPedantic FLAGS "-Wpedantic"
ie_headers_compilation_with_custom_flags(TEST_SUFFIX PluginApiPedantic FLAGS -Wpedantic
PLUGIN_API)
endif()
else()
ie_headers_compilation_with_custom_flags(TEST_SUFFIX PluginApiWindowsAreErrors
FLAGS "/we4996 /W4 /WX"
FLAGS /we4996 /W4 /WX
PLUGIN_API)
endif()

View File

@ -19,8 +19,8 @@ std::vector<InferenceEngine::SizeVector> inputDims = {
{1, 1000}, {223, 217, 21}, {3, 4, 5, 1}, {3, 4, 1, 5, 1}};
std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::U8, InferenceEngine::Precision::FP32,
InferenceEngine::Precision::I32,
InferenceEngine::Precision::BOOL, InferenceEngine::Precision::U8,
InferenceEngine::Precision::FP32, InferenceEngine::Precision::I32,
};
ConfigMap config;

View File

@ -67,6 +67,7 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
const std::map<ActivationTypes, std::vector<std::vector<float>>> intActivationTypes = {
{Negative, {}},
{Ceiling, {}},
{Cos, {}},
{Sqrt, {}},
{Tanh, {}},
};

View File

@ -0,0 +1,36 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "single_layer_tests/nonzero.hpp"
#include "common_test_utils/test_constants.hpp"
#include <vector>
using namespace ngraph::helpers;
using namespace LayerTestsDefinitions;
namespace {
std::vector<std::vector<size_t>> inShapes = {
{1000},
{4, 1000},
{2, 4, 1000},
{2, 4, 4, 1000},
{2, 4, 4, 2, 1000},
};
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::I32,
InferenceEngine::Precision::FP16,
InferenceEngine::Precision::U8,
};
ConfigMap config;
INSTANTIATE_TEST_CASE_P(smoke_nonzero, NonZeroLayerTest,
::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(inputPrecisions),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
NonZeroLayerTest::getTestCaseName);
} // namespace

View File

@ -69,6 +69,8 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*smoke_SetBlobOfKindAUTO.*SetBlobOfKindTest.CompareWithRefs.*)",
// reference doesn't cover I8, U8 cases. Issue: 55842
R"(.*Gather7LayerTest.*netPRC=I8.*)",
// TODO: 57562 No dynamic output shape support
R"(.*NonZeroLayerTest.*)",
// need to implement Export / Import
R"(.*IEClassImportExportTestP.*)"
};

View File

@ -0,0 +1,50 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "common_test_utils/test_common.hpp"
#include "ngraph_functions/utils/ngraph_helpers.hpp"
#include "functional_test_utils/plugin_cache.hpp"
#include "ngraph_functions/subgraph_builders.hpp"
#include <ie_core.hpp>
#include <ie_plugin_config.hpp>
inline std::shared_ptr<ngraph::Function> makeTestModel(std::vector<size_t> inputShape = {1, 1, 32, 32}) {
ngraph::Shape in_shape(inputShape);
auto et = ngraph::element::Type_t::f16;
auto in = std::make_shared<ngraph::opset1::Parameter>(et, in_shape);
auto gelu = std::make_shared<ngraph::opset7::Gelu>(in);
auto swish_const = ngraph::op::Constant::create(et, ngraph::Shape{}, {2.5f});
auto swish = std::make_shared<ngraph::opset4::Swish>(gelu, swish_const);
ngraph::Shape reluShape = swish->outputs()[0].get_tensor().get_shape();
std::vector<size_t> constShape2 = {1, ngraph::shape_size(reluShape)};
auto const2 = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{2}, constShape2);
auto reshape2 = std::make_shared<ngraph::opset1::Reshape>(swish, const2, false);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(reshape2)};
std::shared_ptr<ngraph::Function> fnPtr = std::make_shared<ngraph::Function>(results, ngraph::ParameterVector{in});
return fnPtr;
}
class CustomLocaleTest : public CommonTestUtils::TestsCommon {
protected:
std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name();
std::shared_ptr<ngraph::Function> function;
void SetUp() override {
function = makeTestModel();
}
};
TEST_F(CustomLocaleTest, CanLoadNetworkWithCustomLocale) {
auto prev = std::locale();
try {
std::locale::global(std::locale("ru_RU.UTF-8"));
} catch (...) {
GTEST_SKIP();
}
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
InferenceEngine::CNNNetwork cnnNet(function);
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, "GPU"));
std::locale::global(prev);
}

View File

@ -13,7 +13,6 @@ addIeTargetTest(
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
INCLUDES
${CMAKE_CURRENT_SOURCE_DIR}
${IE_MAIN_SOURCE_DIR}/src/vpu/graph_transformer/include
${IE_MAIN_SOURCE_DIR}/tests_deprecated/behavior/vpu/myriad_tests/helpers
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE}

View File

@ -21,6 +21,7 @@ VERIFIED_OP_REFERENCES = [
'ConvertLike-1',
'Convolution-1',
'Constant-1',
'Cos-1',
'DeformableConvolution-1',
'DeformablePSROIPooling-1',
'DetectionOutput-1',
@ -57,6 +58,7 @@ VERIFIED_OP_REFERENCES = [
'Negative-1',
'NonMaxSuppression-4',
'NonMaxSuppression-5',
'NonZero-3',
'PSROIPooling-1',
'Proposal-1',
'Proposal-4',

View File

@ -9,7 +9,6 @@ addIeTargetTest(
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
INCLUDES
${IE_MAIN_SOURCE_DIR}/src/mkldnn_plugin
${IE_MAIN_SOURCE_DIR}/src/transformations/include
OBJECT_FILES
$<TARGET_OBJECTS:MKLDNNPlugin_obj>
LINK_LIBRARIES

View File

@ -14,8 +14,6 @@ addIeTargetTest(
"${IE_TESTS_ROOT}/ngraph_helpers/ngraph_functions/src"
INCLUDES
"${IE_MAIN_SOURCE_DIR}/src/vpu/myriad_plugin"
"${IE_MAIN_SOURCE_DIR}/thirdparty/movidius"
"${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc/include/watchdog"
"${XLINK_INCLUDE}"
"${XLINK_PLATFORM_INCLUDE}"
"${CMAKE_CURRENT_SOURCE_DIR}/base"

View File

@ -20,8 +20,7 @@ target_link_libraries(${TARGET_NAME} PUBLIC
target_link_libraries(${TARGET_NAME} PUBLIC gmock)
target_include_directories(${TARGET_NAME} PUBLIC
${IE_MAIN_SOURCE_DIR}/src/vpu/graph_transformer/include
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES>
${CMAKE_CURRENT_SOURCE_DIR}/plugin_tests)
# developer package

View File

@ -55,11 +55,10 @@ function(enable_vpu TARGET_NAME FLAG_NAME PLUGIN_NAME)
target_compile_definitions(${TARGET_NAME} PRIVATE __PC__)
target_include_directories(${TARGET_NAME} PRIVATE
${IE_MAIN_SOURCE_DIR}/src/vpu/common/include
$<TARGET_PROPERTY:vpu_graph_transformer,INTERFACE_INCLUDE_DIRECTORIES>
${CMAKE_CURRENT_SOURCE_DIR}/myriad_tests
${IE_MAIN_SOURCE_DIR}/thirdparty/movidius
${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc/include
${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc/include/watchdog
${IE_MAIN_SOURCE_DIR}/src/inference_engine
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE})
target_link_libraries(${TARGET_NAME} PRIVATE mvnc)

View File

@ -59,8 +59,7 @@ addIeTargetTest(
ADDITIONAL_SOURCE_DIRS
${CMAKE_CURRENT_SOURCE_DIR}/graph_transformer
INCLUDES
${IE_MAIN_SOURCE_DIR}/include/vpu
${IE_MAIN_SOURCE_DIR}/src/vpu/graph_transformer/include
$<TARGET_PROPERTY:vpu_graph_transformer,INTERFACE_INCLUDE_DIRECTORIES>
${CMAKE_CURRENT_BINARY_DIR}
LINK_LIBRARIES
IESharedTests

View File

@ -10,7 +10,7 @@
#include <iomanip> // std::setw
#include <vpu/utils/ie_helpers.hpp>
#include <graph_transformer/include/vpu/model/data_desc.hpp>
#include <vpu/model/data_desc.hpp>
typedef std::map<std::string, std::string> ParamsStruct;

View File

@ -12,7 +12,7 @@
#include "ie_memcpy.h"
#include "common_layers_params.hpp"
#include <common/include/vpu/utils/error.hpp>
#include <vpu/utils/error.hpp>
#include "blob_factory.hpp"
#include "debug.h"

View File

@ -19,17 +19,13 @@ message("configuring file: ${CMAKE_CURRENT_BINARY_DIR}/test_model_repo.h")
function(add_helpers target_name)
add_library(${target_name} STATIC ${HELPERS_SRC})
target_include_directories(${target_name} PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}"
"${IE_MAIN_SOURCE_DIR}/src/inference_engine"
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>
"${IE_MAIN_SOURCE_DIR}/src/vpu/"
"${IE_MAIN_SOURCE_DIR}/src/plugin_api"
"${IE_MAIN_SOURCE_DIR}/src/legacy_api/include"
PRIVATE "${CMAKE_CURRENT_BINARY_DIR}")
# TODO: eliminate dependency on samples
target_include_directories(${target_name} PUBLIC
"${IE_MAIN_SOURCE_DIR}/samples/common/utils/include/samples/os/windows")
target_include_directories(${target_name}
PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}"
$<TARGET_PROPERTY:ie_samples_utils,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_legacy,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>
PRIVATE "${CMAKE_CURRENT_BINARY_DIR}")
set_property(TARGET ${target_name} PROPERTY COMPILE_PDB_NAME ${target_name})

View File

@ -13,7 +13,7 @@
# include <libgen.h>
# include <dirent.h>
#else
# include <w_dirent.h>
# include <samples/os/windows/w_dirent.h>
#endif
#include <utility>

View File

@ -15,7 +15,6 @@ SET (CMAKE_SKIP_RPATH OFF)
file(GLOB
TEST_SRC
graph_tools/*.cpp
inference_engine_tests/*.cpp
)
@ -24,6 +23,7 @@ if (ENABLE_GNA)
GNA_TESTS
engines/gna/*cpp
engines/gna/layers/*cpp
engines/gna/graph_tools/*cpp
)
source_group("gna" FILES ${GNA_TESTS})
list(APPEND TEST_SRC ${GNA_TESTS})
@ -69,7 +69,6 @@ if (ENABLE_MYRIAD)
engines/vpu/myriad_tests/helpers
${IE_MAIN_SOURCE_DIR}/src/vpu/myriad_plugin
${IE_MAIN_SOURCE_DIR}/thirdparty/movidius
${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc/include/watchdog
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE})
@ -86,7 +85,6 @@ source_group("include" FILES ${TEST_INCLUDE})
add_executable(${TARGET_NAME} ${TEST_SRC} ${TEST_INCLUDE})
target_include_directories(${TARGET_NAME} PRIVATE
${IE_MAIN_SOURCE_DIR}/src/gna_plugin
${CMAKE_CURRENT_SOURCE_DIR})
set_target_properties(${TARGET_NAME} PROPERTIES COMPILE_PDB_NAME ${TARGET_NAME})

View File

@ -4,8 +4,8 @@
#include <gtest/gtest.h>
#include <legacy/graph_tools.hpp>
#include <gna_graph_tools.hpp>
#include "graph_test_base.hpp"
#include <gna_graph_tools.hpp>
#include <unordered_set>
#include <gmock/gmock.h>
#include "ie_common.h"

View File

@ -13,12 +13,14 @@ if (ENABLE_MYRIAD)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE ${ENABLE_LTO})
endif()
set(XLINK_DIR "${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/XLink" CACHE PATH "path to Xlink")
add_subdirectory("${XLINK_DIR}" "${CMAKE_BINARY_DIR}/thirdparty/movidius/XLink")
set(XLINK_DIR "${CMAKE_CURRENT_SOURCE_DIR}/XLink" CACHE PATH "path to Xlink")
add_subdirectory(
"${XLINK_DIR}"
"${CMAKE_CURRENT_BINARY_DIR}/XLink")
add_subdirectory(
"${IE_MAIN_SOURCE_DIR}/thirdparty/movidius/mvnc"
"${CMAKE_BINARY_DIR}/thirdparty/movidius/mvnc")
"${CMAKE_CURRENT_SOURCE_DIR}/mvnc"
"${CMAKE_CURRENT_BINARY_DIR}/mvnc")
if(ENABLE_TESTS)
add_subdirectory(tests)

View File

@ -15,11 +15,11 @@ add_library(${TARGET_NAME} STATIC ${MVNC_SOURCES})
target_include_directories(${TARGET_NAME}
PUBLIC
"include"
${WATCHDOG_INCLUDE}
"${CMAKE_CURRENT_SOURCE_DIR}/include"
${WATCHDOG_INCLUDE}
PRIVATE
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE})
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE})
target_compile_definitions(${TARGET_NAME}
PRIVATE

View File

@ -16,8 +16,6 @@ file(GLOB MVNC_TESTS_COMMON_SOURCES
"cases/mvnc_common_test_cases.cpp")
set(PRIVATE_INCLUDE
${IE_MAIN_SOURCE_DIR}/tests_new/gtest/googletest/include
${IE_MAIN_SOURCE_DIR}/tests_new/gtest/googletest/
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE}
${WATCHDOG_INCLUDE})

View File

@ -14,8 +14,6 @@ target_include_directories(${TARGET_NAME}
PRIVATE
helpers
cases
${IE_MAIN_SOURCE_DIR}/tests_new/gtest/googletest/include
${IE_MAIN_SOURCE_DIR}/tests_new/gtest/googletest/
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE})

View File

@ -11,10 +11,8 @@ file(GLOB SRCS
add_executable(${TARGET_NAME} ${SRCS})
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE
${IE_MAIN_SOURCE_DIR}/include
${IE_MAIN_SOURCE_DIR}/src/vpu/graph_transformer/include
${IE_MAIN_SOURCE_DIR}/src/vpu/common/include
${IE_MAIN_SOURCE_DIR}/src/plugin_api
)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
@ -25,6 +23,7 @@ endif()
target_link_libraries(${TARGET_NAME} PRIVATE
inference_engine
inference_engine_plugin_api
gflags
ie_samples_utils
)

View File

@ -5,7 +5,6 @@
set(TARGET_NAME myriad_compile)
file(GLOB SRCS
${IE_MAIN_SOURCE_DIR}/tools/vpu/common/*.cpp
${CMAKE_CURRENT_SOURCE_DIR}/*.cpp
)
@ -17,13 +16,9 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
)
endif()
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE
${IE_MAIN_SOURCE_DIR}/tools/vpu/common
${IE_MAIN_SOURCE_DIR}/samples/common
)
target_link_libraries(${TARGET_NAME} PRIVATE
inference_engine vpu_graph_transformer
inference_engine
vpu_graph_transformer
gflags
ie_samples_utils
)

View File

@ -16,13 +16,13 @@ function(add_perfcheck_target TARGET_NAME PLUGIN_NAME)
target_include_directories(${TARGET_NAME}
SYSTEM PRIVATE
"${IE_MAIN_SOURCE_DIR}/src/vpu/graph_transformer/include"
"${IE_MAIN_SOURCE_DIR}/src/plugin_api"
"${IE_MAIN_SOURCE_DIR}/samples/common/format_reader")
$<TARGET_PROPERTY:vpu_graph_transformer,INTERFACE_INCLUDE_DIRECTORIES>)
target_link_libraries(${TARGET_NAME}
PRIVATE
inference_engine format_reader
inference_engine
inference_engine_plugin_api
format_reader
Threads::Threads
ie_samples_utils)

View File

@ -1,20 +1,37 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import logging as log
import os
import sys
import subprocess
import sys
from mo.utils.versions_checker import check_python_version # pylint: disable=no-name-in-module
def log_ie_not_found():
log.error("Could not find the Inference Engine or nGraph Python API.\n"
"Consider building the Inference Engine and nGraph Python APIs"
" from sources or try to install OpenVINO (TM) Toolkit using \"install_prerequisites.{}\""
.format("bat" if sys.platform == "windows" else "sh"))
def setup_env():
ret_code = check_python_version()
if ret_code:
sys.exit(ret_code)
from mo.utils.find_ie_version import find_ie_version
find_ie_version(silent=True)
ie_found = True
try:
ie_found = find_ie_version(silent=True)
except Exception:
ie_found = False
if not ie_found:
log_ie_not_found()
sys.exit(1)
mo_root_path = os.path.join(os.path.dirname(__file__), os.pardir)
@ -23,6 +40,7 @@ def setup_env():
os.environ[python_path_key] = mo_root_path
else:
os.environ[python_path_key] = os.pathsep.join([os.environ[python_path_key], mo_root_path])
return True
def subprocess_main(framework=None):
@ -45,4 +63,4 @@ def subprocess_main(framework=None):
for arg in sys.argv[1:]:
args.append(arg)
status = subprocess.run(args, env=os.environ)
sys.exit(status.returncode)
sys.exit(status.returncode)

View File

@ -5,3 +5,4 @@ pyenchant==1.6.11
test-generator==0.1.1
defusedxml>=0.5.0
requests>=2.20.0
pytest>=6.2.4

View File

@ -1,11 +1,29 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import logging as log
import os
import subprocess
import sys
import unittest
from unittest.mock import patch
from mo.subprocess_main import setup_env, subprocess_main
import pytest
class TestNoInferenceEngine(unittest.TestCase):
@patch('mo.utils.find_ie_version.find_ie_version')
def test_no_ie_ngraph(self, mock_find):
mock_find.return_value = False
with pytest.raises(SystemExit) as e, self.assertLogs(log.getLogger(), level="ERROR") as cm:
subprocess_main()
assert e.value.code == 1
res = [i for i in cm.output if
'Consider building the Inference Engine and nGraph Python APIs from sources' in i]
assert res
from mo.subprocess_main import setup_env
def test_frontends():
setup_env()

View File

@ -19,13 +19,11 @@ project (ngraph)
option(NGRAPH_UNIT_TEST_ENABLE "Control the building of unit tests" ON)
option(NGRAPH_UNIT_TEST_BACKENDS_ENABLE "Control the building of unit tests using backends" ON)
option(NGRAPH_INTERPRETER_ENABLE "Control the building of the INTERPRETER backend" ON)
option(NGRAPH_DEBUG_ENABLE "Enable output for NGRAPH_DEBUG statements" OFF)
option(NGRAPH_ONNX_IMPORT_ENABLE "Enable ONNX importer" OFF)
option(NGRAPH_ONNX_EDITOR_ENABLE "Enable ONNX Editor" OFF)
option(NGRAPH_PDPD_FRONTEND_ENABLE "Enable PaddlePaddle FrontEnd" OFF)
option(NGRAPH_PYTHON_BUILD_ENABLE "Enable build nGraph python package wheel" OFF)
option(NGRAPH_DYNAMIC_COMPONENTS_ENABLE "Enable dynamic loading of components" ON)
option(NGRAPH_USE_PROTOBUF_LITE "Compiles and links with protobuf-lite" OFF)
if (NGRAPH_ONNX_IMPORT_ENABLE OR NGRAPH_PDPD_FRONTEND_ENABLE)
@ -36,8 +34,6 @@ if(NGRAPH_ONNX_EDITOR_ENABLE AND NOT NGRAPH_ONNX_IMPORT_ENABLE)
endif()
message(STATUS "NGRAPH_DEBUG_ENABLE: ${NGRAPH_DEBUG_ENABLE}")
message(STATUS "NGRAPH_DYNAMIC_COMPONENTS_ENABLE: ${NGRAPH_DYNAMIC_COMPONENTS_ENABLE}")
message(STATUS "NGRAPH_INTERPRETER_ENABLE: ${NGRAPH_INTERPRETER_ENABLE}")
message(STATUS "NGRAPH_ONNX_IMPORT_ENABLE: ${NGRAPH_ONNX_IMPORT_ENABLE}")
message(STATUS "NGRAPH_ONNX_EDITOR_ENABLE: ${NGRAPH_ONNX_EDITOR_ENABLE}")
message(STATUS "NGRAPH_PDPD_FRONTEND_ENABLE: ${NGRAPH_PDPD_FRONTEND_ENABLE}")
@ -95,19 +91,11 @@ else()
set(CMAKE_INSTALL_BINDIR "bin" CACHE STRING "User executables (bin)")
set(CMAKE_INSTALL_LIBDIR "lib" CACHE STRING "Object code libraries (lib)")
set(CMAKE_INSTALL_INCLUDEDIR "include" CACHE STRING "C header files (include)")
set(CMAKE_INSTALL_DOCDIR "doc" CACHE STRING "Document files (doc)")
mark_as_advanced(CMAKE_INSTALL_BINDIR CMAKE_INSTALL_LIBDIR CMAKE_INSTALL_INCLUDEDIR, CMAKE_INSTALL_DOCDIR)
endif()
if (DEFINED NGRAPH_INSTALL_PREFIX)
set(CMAKE_INSTALL_PREFIX ${NGRAPH_INSTALL_PREFIX})
endif()
message(STATUS "Installation directory: ${CMAKE_INSTALL_PREFIX}")
# Destinations
set(NGRAPH_INSTALL_LIB "deployment_tools/ngraph/${CMAKE_INSTALL_LIBDIR}")
set(NGRAPH_INSTALL_INCLUDE "deployment_tools/ngraph/${CMAKE_INSTALL_INCLUDEDIR}")
set(NGRAPH_INSTALL_DOC "deployment_tools/ngraph/${CMAKE_INSTALL_DOCDIR}")
set(NGRAPH_INSTALL_BIN "deployment_tools/ngraph/${CMAKE_INSTALL_BINDIR}")
#-----------------------------------------------------------------------------------------------
@ -118,19 +106,6 @@ if (WIN32)
string(REPLACE "/W3" "/W0" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()
if (NOT CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Werror")
endif()
if(WIN32)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DNOMINMAX")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_CRT_SECURE_NO_WARNINGS")
endif()
if (NGRAPH_DYNAMIC_COMPONENTS_ENABLE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DNGRAPH_DYNAMIC_COMPONENTS_ENABLE")
endif()
add_definitions(-DPROJECT_ROOT_DIR="${CMAKE_CURRENT_SOURCE_DIR}")
#-----------------------------------------------------------------------------------------------

View File

@ -113,15 +113,17 @@ else()
set(_proto_libs ${Protobuf_LIBRARIES})
if(TARGET libprotoc)
list(APPEND _proto_libs libprotoc)
set_target_properties(libprotoc PROPERTIES
COMPILE_FLAGS "-Wno-all -Wno-unused-variable")
target_compile_options(libprotoc PRIVATE -Wno-all -Wno-unused-variable)
endif()
set_target_properties(${_proto_libs} PROPERTIES
CXX_VISIBILITY_PRESET default
C_VISIBILITY_PRESET default
VISIBILITY_INLINES_HIDDEN OFF)
set_target_properties(libprotobuf libprotobuf-lite PROPERTIES
COMPILE_FLAGS "-Wno-all -Wno-unused-variable -Wno-inconsistent-missing-override")
foreach(target libprotobuf libprotobuf-lite)
target_compile_options(${target}
PRIVATE -Wno-all -Wno-unused-variable -Wno-inconsistent-missing-override
PUBLIC -Wno-undef)
endforeach()
endif()
if(NGRAPH_USE_PROTOBUF_LITE)

View File

@ -16,8 +16,8 @@ namespace ngraph
class NGRAPH_API Cos : public util::UnaryElementwiseArithmetic
{
public:
static constexpr NodeTypeInfo type_info{"Cos", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs a cosine operation.
Cos() = default;
/// \brief Constructs a cosine operation.

View File

@ -23,8 +23,7 @@ namespace ngraph
class NGRAPH_API NonZero : public Op
{
public:
static constexpr NodeTypeInfo type_info{"NonZero", 3};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs a NonZero operation.
NonZero() = default;
/// \brief Constructs a NonZero operation.

View File

@ -13,7 +13,8 @@ namespace ngraph
{
namespace reference
{
template <typename T>
template <typename T,
typename std::enable_if<!std::is_integral<T>::value, bool>::type = true>
void cos(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
@ -21,6 +22,16 @@ namespace ngraph
out[i] = std::cos(arg[i]);
}
}
template <typename T,
typename std::enable_if<std::is_integral<T>::value, bool>::type = true>
void cos(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
{
out[i] = std::roundl(std::cos(arg[i]));
}
}
} // namespace reference
} // namespace runtime
} // namespace ngraph

View File

@ -4,6 +4,10 @@
#pragma once
#if defined _WIN32 && !defined NOMINMAX
#define NOMINMAX
#endif
#include <functional>
#include <xbyak/xbyak.h>

View File

@ -3,6 +3,9 @@
//
#ifdef _WIN32
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <windows.h>
#else
#include <dirent.h>

View File

@ -5,17 +5,16 @@
#include "itt.hpp"
#include "ngraph/op/cos.hpp"
#include "ngraph/op/multiply.hpp"
#include "ngraph/op/negative.hpp"
#include "ngraph/op/sin.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/runtime/reference/cos.hpp"
#include "ngraph/validation_util.hpp"
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::Cos::type_info;
NGRAPH_RTTI_DEFINITION(op::v0::Cos, "Cos", 0, util::UnaryElementwiseArithmetic);
op::Cos::Cos(const Output<Node>& arg)
: UnaryElementwiseArithmetic(arg)
@ -69,6 +68,7 @@ namespace cosop
bool op::Cos::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const
{
NGRAPH_OP_SCOPE(v0_Cos_evaluate);
NGRAPH_CHECK(validate_host_tensor_vector(outputs, 1) && validate_host_tensor_vector(inputs, 1));
return cosop::evaluate_cos(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}

View File

@ -15,7 +15,7 @@
using namespace ngraph;
using namespace std;
constexpr NodeTypeInfo op::v3::NonZero::type_info;
NGRAPH_RTTI_DEFINITION(op::v3::NonZero, "NonZero", 3);
op::v3::NonZero::NonZero(const Output<Node>& arg)
: Op({arg})
@ -47,25 +47,20 @@ bool ngraph::op::v3::NonZero::visit_attributes(AttributeVisitor& visitor)
void op::v3::NonZero::validate_and_infer_types()
{
NGRAPH_OP_SCOPE(v3_NonZero_validate_and_infer_types);
const PartialShape& input_shape = get_input_partial_shape(0);
NODE_VALIDATION_CHECK(this,
m_output_type == element::i64 || m_output_type == element::i32,
"Output type must be i32 or i64");
// For scalar non-zero value case, onnx test case expects output shape {1, 1}
if (input_shape.rank() == 0)
const PartialShape& input_shape = get_input_partial_shape(0);
if (input_shape.rank().compatible(0))
{
set_output_type(0, m_output_type, PartialShape{Dimension::dynamic(), Dimension::dynamic()});
}
else
{
const Dimension dim = input_shape.is_static()
? std::accumulate(begin(input_shape),
end(input_shape),
Dimension(0, 1),
std::multiplies<Dimension>())
: Dimension();
const Dimension dim = std::accumulate(
begin(input_shape), end(input_shape), Dimension(0, 1), std::multiplies<Dimension>());
set_output_type(0, m_output_type, PartialShape{input_shape.rank(), dim});
}

View File

@ -3,6 +3,9 @@
//
#ifdef _WIN32
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <Windows.h>
#include <direct.h>
#else // _WIN32

View File

@ -224,7 +224,13 @@ namespace ngraph
const auto& scales_shape = scales.get_partial_shape();
auto attrs = get_resize_attrs(node);
if (attrs.mode == InterpolateMode::linear_onnx)
if (attrs.mode == InterpolateMode::nearest)
{
attrs.nearest_mode = Nearest_mode::floor;
attrs.coordinate_transformation_mode = Transform_mode::asymmetric;
}
else if (attrs.mode == InterpolateMode::linear_onnx)
{
attrs.coordinate_transformation_mode = Transform_mode::asymmetric;
}

View File

@ -52,7 +52,7 @@ add_custom_target(${TARGET_NAME}_proto DEPENDS ${PROTO_SRCS} ${PROTO_HDRS})
set_source_files_properties(${PROTO_SRCS} ${PROTO_HDRS} PROPERTIES GENERATED TRUE)
# Disable all warnings for generated code
set_source_files_properties(${PROTO_SRCS} ${PROTO_HDRS} PROPERTIES COMPILE_FLAGS -w)
set_source_files_properties(${PROTO_SRCS} ${PROTO_HDRS} PROPERTIES COMPILE_OPTIONS -w)
# Create shared library
add_library(${TARGET_NAME} SHARED ${LIBRARY_SRC} ${LIBRARY_HEADERS} ${LIBRARY_PUBLIC_HEADERS} ${PROTO_SRCS} ${PROTO_HDRS})

View File

@ -110,6 +110,7 @@ set(SRC
type_prop/convert.cpp
type_prop/convolution.cpp
type_prop/convolution_backprop_data.cpp
type_prop/cos.cpp
type_prop/ctc_greedy_decoder.cpp
type_prop/ctc_greedy_decoder_seq_len.cpp
type_prop/ctc_loss.cpp
@ -231,6 +232,7 @@ set(SRC
visitors/op/constant.cpp
visitors/op/convert.cpp
visitors/op/convolution_backprop.cpp
visitors/op/cos.cpp
visitors/op/cum_sum.cpp
visitors/op/deformable_psroi_pooling.cpp
visitors/op/depth_to_space.cpp
@ -257,6 +259,7 @@ set(SRC
visitors/op/mvn.cpp
visitors/op/negative.cpp
visitors/op/non_max_suppression.cpp
visitors/op/non_zero.cpp
visitors/op/normalize_l2.cpp
visitors/op/one_hot.cpp
visitors/op/pad.cpp
@ -326,7 +329,7 @@ if (ENABLE_CLDNN AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
endif()
endif()
if (NGRAPH_INTERPRETER_ENABLE AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
if (NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
list(APPEND SRC
builder.cpp
backend_api.cpp)
@ -630,10 +633,7 @@ if (NGRAPH_ONNX_EDITOR_ENABLE)
target_link_libraries(unit-test PRIVATE onnx_editor)
endif()
if (NGRAPH_INTERPRETER_ENABLE)
target_compile_definitions(unit-test PRIVATE NGRAPH_INTERPRETER_ENABLE)
target_link_libraries(unit-test PRIVATE interpreter_backend)
endif()
target_link_libraries(unit-test PRIVATE interpreter_backend)
install(TARGETS unit-test
RUNTIME DESTINATION tests

View File

@ -2,23 +2,6 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <algorithm>
#include <cinttypes>
#include <cmath>
#include <cstdlib>
#include <random>
#include <string>
// clang-format off
#ifdef ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#define DEFAULT_FLOAT_TOLERANCE_BITS ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS
#endif
#ifdef ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#define DEFAULT_DOUBLE_TOLERANCE_BITS ${BACKEND_NAME}_DOUBLE_TOLERANCE_BITS
#endif
// clang-format on
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/engine/test_engines.hpp"
@ -31,7 +14,7 @@ using namespace ngraph;
static string s_manifest = "${MANIFEST}";
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
NGRAPH_TEST(${BACKEND_NAME}, cos)
NGRAPH_TEST(${BACKEND_NAME}, cos_float)
{
Shape shape{11};
auto A = make_shared<op::Parameter>(element::f32, shape);
@ -53,3 +36,16 @@ NGRAPH_TEST(${BACKEND_NAME}, cos)
-0.65364362f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, cos_int)
{
Shape shape{5};
auto A = make_shared<op::Parameter>(element::i32, shape);
auto f = make_shared<Function>(make_shared<op::Cos>(A), ParameterVector{A});
auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<int32_t>({1, 2, 3, 4, 5});
test_case.add_expected_output<int32_t>(shape,
{1, 0, -1, -1, 0});
test_case.run();
}

View File

@ -350,8 +350,6 @@ NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_2d_to_scalar_int8)
EXPECT_EQ(std::vector<int8_t>{45}, read_vector<int8_t>(result));
}
#if NGRAPH_INTERPRETER_ENABLE
#ifndef _WIN32
NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_stable_acc)
{
@ -418,8 +416,6 @@ NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_stable_simple_float)
test::all_close_f(ref_results.at(0), bk_results.at(0), DEFAULT_FLOAT_TOLERANCE_BITS - 1));
}
#endif
NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_inf)
{
Shape shape{7, 4};
@ -782,8 +778,6 @@ NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_keep_2d_to_scalar_int8)
EXPECT_EQ(std::vector<int8_t>{45}, read_vector<int8_t>(result));
}
#if NGRAPH_INTERPRETER_ENABLE
#ifndef _WIN32
NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_keep_stable_acc)
{
@ -851,8 +845,6 @@ NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_keep_stable_simple_float)
test::all_close_f(ref_results.at(0), bk_results.at(0), DEFAULT_FLOAT_TOLERANCE_BITS - 1));
}
#endif
NGRAPH_TEST(${BACKEND_NAME}, reduce_sum_keep_inf)
{
Shape shape{7, 4};

View File

@ -362,8 +362,6 @@ NGRAPH_TEST(${BACKEND_NAME}, builder_reshape_to_same_shape)
test_case.run();
}
#if NGRAPH_INTERPRETER_ENABLE
NGRAPH_TEST(${BACKEND_NAME}, reshape_shufflenet_5d)
{
Shape shape_a{1, 112, 56, 56};
@ -396,5 +394,3 @@ NGRAPH_TEST(${BACKEND_NAME}, reshape_shufflenet_5d)
EXPECT_TRUE(test::all_close_f(ref_results.at(0), bk_results.at(0), MIN_FLOAT_TOLERANCE_BITS));
}
#endif // NGRAPH_INTERPRETER_ENABLE

View File

@ -183,7 +183,6 @@ TEST(eval, evaluate_dynamic_range_sum)
ASSERT_EQ(cval, seq);
}
#ifdef NGRAPH_INTERPRETER_ENABLE
TEST(eval, interpret_dynamic_range_sum)
{
auto p_start = make_shared<op::Parameter>(element::f32, PartialShape{});
@ -212,7 +211,6 @@ TEST(eval, interpret_dynamic_range_sum)
vector<float> seq{8.0f, 11.0f, 14.0f};
ASSERT_EQ(result_val, seq);
}
#endif
TEST(eval, evaluate_broadcast_v3_bidirectional)
{

View File

@ -0,0 +1,82 @@
ir_version: 7
producer_name: "backend-test"
graph {
node {
output: "scales"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 4
data_type: 1
float_data: 1.0
float_data: 1.0
float_data: 1.9
float_data: 1.0
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "scales"
output: "Y"
op_type: "Resize"
attribute {
name: "mode"
s: "nearest"
type: STRING
}
}
name: "test_resize_downsample_scales_nearest"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 10
}
dim {
dim_value: 1
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 19
}
dim {
dim_value: 1
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@ -0,0 +1,82 @@
ir_version: 7
producer_name: "backend-test"
graph {
node {
output: "scales"
op_type: "Constant"
attribute {
name: "value"
t {
dims: 4
data_type: 1
float_data: 1.0
float_data: 1.0
float_data: 1.0
float_data: 1.9
name: "const_tensor"
}
type: TENSOR
}
}
node {
input: "X"
input: "scales"
output: "Y"
op_type: "Resize"
attribute {
name: "mode"
s: "nearest"
type: STRING
}
}
name: "test_resize_downsample_scales_nearest"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 10
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 1
}
dim {
dim_value: 19
}
}
}
}
}
}
opset_import {
version: 10
}

View File

@ -1398,6 +1398,37 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_model_reduce_sum_13_axes_empty_without_noop)
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_resize10_asymertic_last_dim)
{
const auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/resize10_asymertic_last_dim.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
std::vector<float> input_data{1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f};
test_case.add_input<float>(input_data);
test_case.add_expected_output<float>(
Shape{1,1,1,19},{1.0f, 1.0f, 2.0f, 2.0f, 3.0f, 3.0f, 4.0f, 4.0f, 5.0f, 5.0f, 6.0f, 6.0f, 7.0f, 7.0f, 8.0f, 8.0f, 9.0f, 9.0f, 10.0f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_resize10_asymertic_dim_in_the_middle)
{
const auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/resize10_asymertic_dim_in_the_middle.prototxt"));
auto test_case = test::TestCase<TestEngine>(function);
std::vector<float> input_data{1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f};
test_case.add_input<float>(input_data);
test_case.add_expected_output<float>(
Shape{1,1,19,1},{1.0f, 1.0f, 2.0f, 2.0f, 3.0f, 3.0f, 4.0f, 4.0f, 5.0f, 5.0f, 6.0f, 6.0f, 7.0f, 7.0f, 8.0f, 8.0f, 9.0f, 9.0f, 10.0f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, onnx_resize11_empty_constant_as_input)
{
// this model contains a Constant node with an empty underlying tensor

View File

@ -3,6 +3,9 @@
//
#ifdef _WIN32
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <windows.h>
#if defined(WINAPI_FAMILY) && !WINAPI_PARTITION_DESKTOP
#error "Only WINAPI_PARTITION_DESKTOP is supported, because of LoadLibrary[A|W]"
@ -28,9 +31,8 @@ std::string runtime::Backend::s_backend_shared_library_search_directory;
// This finds the full path of the containing shared library
static string find_my_pathname()
{
#ifdef NGRAPH_DYNAMIC_COMPONENTS_ENABLE
#ifdef _WIN32
HMODULE hModule = GetModuleHandleW(L"ngraph.dll");
HMODULE hModule = GetModuleHandleW(SHARED_LIB_PREFIX L"ngraph" SHARED_LIB_SUFFIX);
WCHAR wpath[MAX_PATH];
GetModuleFileNameW(hModule, wpath, MAX_PATH);
wstring ws(wpath);
@ -46,9 +48,6 @@ static string find_my_pathname()
#else
#error "Unsupported OS"
#endif
#else
return {};
#endif
}
runtime::Backend::~Backend() {}

View File

@ -3,6 +3,9 @@
//
#ifdef _WIN32
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <windows.h>
#else
#include <dlfcn.h>
@ -18,7 +21,6 @@
using namespace std;
using namespace ngraph;
#ifdef NGRAPH_DYNAMIC_COMPONENTS_ENABLE
#ifdef _WIN32
#define CLOSE_LIBRARY(a) FreeLibrary(a)
#define DLSYM(a, b) GetProcAddress(a, b)
@ -32,9 +34,6 @@ string DLERROR()
return error == nullptr ? "" : error;
}
#endif
#else
#define DLERROR() ""
#endif
unordered_map<string, runtime::BackendConstructor>& runtime::BackendManager::get_registry()
{
@ -80,7 +79,6 @@ shared_ptr<runtime::Backend> runtime::BackendManager::create_backend(std::string
auto& registry = get_registry();
auto it = registry.find(type);
string error;
#ifdef NGRAPH_DYNAMIC_COMPONENTS_ENABLE
if (it == registry.end())
{
DL_HANDLE handle = open_shared_library(type);
@ -115,7 +113,6 @@ shared_ptr<runtime::Backend> runtime::BackendManager::create_backend(std::string
}
}
}
#endif
if (it == registry.end())
{
@ -133,7 +130,6 @@ shared_ptr<runtime::Backend> runtime::BackendManager::create_backend(std::string
DL_HANDLE runtime::BackendManager::open_shared_library(string type)
{
DL_HANDLE handle = nullptr;
#ifdef NGRAPH_DYNAMIC_COMPONENTS_ENABLE
string lib_prefix = SHARED_LIB_PREFIX;
string lib_suffix = SHARED_LIB_SUFFIX;
@ -168,14 +164,12 @@ DL_HANDLE runtime::BackendManager::open_shared_library(string type)
}
throw runtime_error(ss.str());
}
#endif
return handle;
}
map<string, string> runtime::BackendManager::get_registered_device_map()
{
map<string, string> rc;
#ifdef NGRAPH_DYNAMIC_COMPONENTS_ENABLE
string my_directory =
file_util::get_directory(Backend::get_backend_shared_library_search_directory());
vector<string> backend_list;
@ -192,7 +186,6 @@ map<string, string> runtime::BackendManager::get_registered_device_map()
}
};
file_util::iterate_files(my_directory, f, false, true);
#endif
return rc;
}

View File

@ -12,6 +12,9 @@
#include <vector>
#ifdef _WIN32
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <windows.h>
#define DL_HANDLE HMODULE
#else

View File

@ -26,7 +26,6 @@ if(COMMAND ie_add_vs_version_file)
endif()
add_dependencies(ie_backend inference_engine)
target_include_directories(ie_backend PUBLIC ${IE_MAIN_SOURCE_DIR}/include)
target_link_libraries(ie_backend PUBLIC ngraph_backend inference_engine)
install(TARGETS ie_backend

Some files were not shown because too many files have changed in this diff Show More