Renamed ov_runtime => openvino, ov_ => openvino_ prefix (#10069)

* Renamed ov_runtime => openvino, ov_ => openvino_ prefix

* Coverage fix

* More fixes

* Fixed MO tests with custom FE
This commit is contained in:
Ilya Lavrenov
2022-02-03 20:03:41 +03:00
committed by GitHub
parent 86faa25724
commit f2f281e60b
137 changed files with 368 additions and 356 deletions

View File

@@ -69,7 +69,7 @@ jobs:
- script: >
env -C ~/work
./buildreleasenolto.sh
libinference_engine_preproc.so
libopenvino_gapi_preproc.so
ov_intel_cpu_plugin
ov_intel_gpu_plugin
clDNN_unit_tests64

View File

@@ -23,14 +23,14 @@ ie_coverage_extract(INPUT "openvino" OUTPUT "legacy"
ie_coverage_genhtml(INFO_FILE "legacy"
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
ie_coverage_extract(INPUT "openvino" OUTPUT "ov_hetero_plugin"
ie_coverage_extract(INPUT "openvino" OUTPUT "hetero_plugin"
PATTERNS "${OV_COVERAGE_BASE_DIRECTORY}/src/plugins/hetero/*")
ie_coverage_genhtml(INFO_FILE "ov_hetero_plugin"
ie_coverage_genhtml(INFO_FILE "hetero_plugin"
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
ie_coverage_extract(INPUT "openvino" OUTPUT "ov_auto_plugin"
ie_coverage_extract(INPUT "openvino" OUTPUT "auto_plugin"
PATTERNS "${OV_COVERAGE_BASE_DIRECTORY}/src/plugins/auto/*")
ie_coverage_genhtml(INFO_FILE "ov_auto_plugin"
ie_coverage_genhtml(INFO_FILE "auto_plugin"
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
ie_coverage_extract(INPUT "openvino" OUTPUT "preprocessing"
@@ -73,9 +73,9 @@ if (ENABLE_INTEL_GPU)
endif()
if(ENABLE_INTEL_GNA)
ie_coverage_extract(INPUT "openvino" OUTPUT "ov_intel_gna_plugin"
ie_coverage_extract(INPUT "openvino" OUTPUT "intel_gna_plugin"
PATTERNS "${OV_COVERAGE_BASE_DIRECTORY}/src/plugins/intel_gna/*")
ie_coverage_genhtml(INFO_FILE "ov_intel_gna_plugin"
ie_coverage_genhtml(INFO_FILE "intel_gna_plugin"
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
endif()

View File

@@ -3,7 +3,7 @@
#
set(FRONTEND_INSTALL_INCLUDE "runtime/include/")
set(FRONTEND_NAME_PREFIX "ov_")
set(FRONTEND_NAME_PREFIX "openvino_")
set(FRONTEND_NAME_SUFFIX "_frontend")
set(FRONTEND_NAMES "" CACHE INTERNAL "")
@@ -35,7 +35,7 @@ function(ov_generate_frontends_hpp)
endif()
# add frontends to libraries including ov_frontends.hpp
ov_target_link_frontends(ov_runtime)
ov_target_link_frontends(openvino)
set(ov_frontends_hpp "${CMAKE_BINARY_DIR}/src/frontends/common/src/ov_frontends.hpp")
set(frontends_hpp_in "${IEDevScripts_DIR}/frontends/ov_frontends.hpp.in")

View File

@@ -102,32 +102,33 @@ function(ie_add_plugin)
endif()
add_dependencies(ie_plugins ${IE_PLUGIN_NAME})
if(TARGET inference_engine_preproc)
if(TARGET openvino_gapi_preproc)
if(BUILD_SHARED_LIBS)
add_dependencies(${IE_PLUGIN_NAME} inference_engine_preproc)
add_dependencies(${IE_PLUGIN_NAME} openvino_gapi_preproc)
else()
target_link_libraries(${IE_PLUGIN_NAME} PRIVATE inference_engine_preproc)
target_link_libraries(${IE_PLUGIN_NAME} PRIVATE openvino_gapi_preproc)
endif()
endif()
# fake dependencies to build in the following order:
# IE -> IE readers -> IE inference plugins -> IE-based apps
if(BUILD_SHARED_LIBS)
if(TARGET ov_ir_frontend)
add_dependencies(${IE_PLUGIN_NAME} ov_ir_frontend)
if(TARGET openvino_ir_frontend)
add_dependencies(${IE_PLUGIN_NAME} openvino_ir_frontend)
endif()
if(TARGET openvino_onnx_frontend)
add_dependencies(${IE_PLUGIN_NAME} openvino_onnx_frontend)
endif()
if(TARGET openvino_paddle_frontend)
add_dependencies(${IE_PLUGIN_NAME} openvino_paddle_frontend)
endif()
if(TARGET openvino_tensorflow_frontend)
add_dependencies(${IE_PLUGIN_NAME} openvino_tensorflow_frontend)
endif()
# TODO: remove with legacy CNNNLayer API / IR v7
if(TARGET inference_engine_ir_v7_reader)
add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_v7_reader)
endif()
if(TARGET ov_onnx_frontend)
add_dependencies(${IE_PLUGIN_NAME} ov_onnx_frontend)
endif()
if(TARGET ov_paddle_frontend)
add_dependencies(${IE_PLUGIN_NAME} ov_paddle_frontend)
endif()
if(TARGET ov_tensorflow_frontend)
add_dependencies(${IE_PLUGIN_NAME} ov_tensorflow_frontend)
endif()
endif()
# install rules
@@ -319,7 +320,7 @@ function(ie_generate_plugins_hpp)
endforeach()
# add plugins to libraries including ie_plugins.hpp
ie_target_link_plugins(ov_runtime)
ie_target_link_plugins(openvino)
if(TARGET inference_engine_s)
ie_target_link_plugins(inference_engine_s)
endif()

View File

@@ -82,8 +82,8 @@ function(register_extra_modules)
endif()
endforeach()
if ("${NS}" STREQUAL "openvino")
file(APPEND "${devconfig_file}" "add_library(${NS}::runtime ALIAS ov_runtime)\n")
file(APPEND "${devconfig_file}" "add_library(${NS}::runtime::dev ALIAS ov_runtime_dev)\n")
file(APPEND "${devconfig_file}" "add_library(${NS}::runtime ALIAS openvino)\n")
file(APPEND "${devconfig_file}" "add_library(${NS}::runtime::dev ALIAS openvino_dev)\n")
endif()
endfunction()

View File

@@ -180,7 +180,7 @@ if(ENABLE_INTEL_GNA AND NOT ENABLE_INTEL_GNA_SHARED AND NOT libGNA_FOUND)
NO_DEFAULT_PATH)
endif()
if(NOT TARGET ov_runtime)
if(NOT TARGET openvino)
set(_ov_as_external_package ON)
include("${CMAKE_CURRENT_LIST_DIR}/OpenVINOTargets.cmake")

View File

@@ -26,11 +26,16 @@
#
# Frontends:
#
# ngraph_ov_onnx_frontend_FOUND - True if the system has ov_onnx_frontend library
# ngraph::ov_onnx_frontend - ONNX FrontEnd target (optional)
# ngraph_onnx_frontend_FOUND - True if the system has ngraph::onnx_frontend library
# ngraph::onnx_frontend - ONNX FrontEnd target (optional)
#
# ngraph_paddle_frontend_FOUND - True if the system has Paddle frontend
# ngraph::ov_paddle_frontend - nGraph Paddle frontend (optional)
# ngraph_paddle_frontend_FOUND - True if the system has Paddle frontend
# ngraph::paddle_frontend - nGraph Paddle frontend (optional)
#
# ngraph_ir_frontend_FOUND - True if the system has OpenVINO IR frontend
#
# ngraph_tensorflow_frontend_FOUND - True if the system has TensorFlow frontend
# ngraph::tensorflow_frontend - nGraph TensorFlow frontend (optional)
#
@PACKAGE_INIT@
@@ -50,43 +55,46 @@ if(TARGET openvino::runtime AND NOT TARGET ngraph::ngraph)
INTERFACE_LINK_LIBRARIES openvino::runtime)
endif()
if(TARGET openvino::frontend::onnx AND NOT TARGET ngraph::ov_onnx_frontend)
add_library(ngraph::ov_onnx_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::ov_onnx_frontend PROPERTIES
if(TARGET openvino::frontend::onnx AND NOT TARGET ngraph::onnx_frontend)
add_library(ngraph::onnx_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::onnx_frontend PROPERTIES
INTERFACE_LINK_LIBRARIES openvino::frontend::onnx)
endif()
if(TARGET openvino::frontend::paddle AND NOT TARGET ngraph::ov_paddle_frontend)
add_library(ngraph::ov_paddle_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::ov_paddle_frontend PROPERTIES
if(TARGET openvino::frontend::paddle AND NOT TARGET ngraph::paddle_frontend)
add_library(ngraph::paddle_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::paddle_frontend PROPERTIES
INTERFACE_LINK_LIBRARIES openvino::frontend::paddle)
endif()
if(TARGET openvino::frontend::tensorflow AND NOT TARGET ngraph::ov_tensorflow_frontend)
add_library(ngraph::ov_tensorflow_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::ov_tensorflow_frontend PROPERTIES
if(TARGET openvino::frontend::tensorflow AND NOT TARGET ngraph::tensorflow_frontend)
add_library(ngraph::tensorflow_frontend INTERFACE IMPORTED)
set_target_properties(ngraph::tensorflow_frontend PROPERTIES
INTERFACE_LINK_LIBRARIES openvino::frontend::tensorflow)
endif()
set(ngraph_ngraph_FOUND ON)
set(NGRAPH_LIBRARIES ngraph::ngraph)
set(ngraph_ov_onnx_frontend_FOUND ${OpenVINO_Frontend_ONNX_FOUND})
set(ngraph_onnx_frontend_FOUND ${OpenVINO_Frontend_ONNX_FOUND})
set(ngraph_tensorflow_frontend_FOUND ${OpenVINO_Frontend_TensorFlow_FOUND})
set(ngraph_paddle_frontend_FOUND ${OpenVINO_Frontend_Paddle_FOUND})
set(ngraph_onnx_importer_FOUND ${OpenVINO_Frontend_ONNX_FOUND})
if(ngraph_onnx_importer_FOUND)
set(ONNX_IMPORTER_LIBRARIES ngraph::ov_onnx_frontend)
set(ONNX_IMPORTER_LIBRARIES ngraph::onnx_frontend)
# ngraph::onnx_importer target and variables are deprecated
# but need to create a dummy target for BW compatibility
if(NOT TARGET ngraph::onnx_importer)
add_library(ngraph::onnx_importer INTERFACE IMPORTED)
set_target_properties(ngraph::onnx_importer PROPERTIES
INTERFACE_LINK_LIBRARIES ngraph::ov_onnx_frontend)
INTERFACE_LINK_LIBRARIES ngraph::onnx_frontend)
endif()
endif()
set(ngraph_paddle_frontend_FOUND ${OpenVINO_Frontend_Paddle_FOUND})
set(ngraph_tensorflow_frontend_FOUND ${OpenVINO_Frontend_TensorFlow_FOUND})
set(ngraph_onnx_frontend_FOUND ${OpenVINO_Frontend_ONNX_FOUND})
set(ngraph_ir_frontend_FOUND ${OpenVINO_Frontend_IR_FOUND})
check_required_components(ngraph)

View File

@@ -24,7 +24,7 @@ if(NOT ENABLE_DOCKER)
set(all_docs_targets
ie_docs_snippets ov_template_func_tests
template_extension ov_template_extension ov_template_plugin)
template_extension openvino_template_extension openvino_template_plugin)
foreach(target_name IN LISTS all_docs_targets)
if(TARGET ${target_name})
set_target_properties(${target_name} PROPERTIES FOLDER docs)
@@ -36,7 +36,7 @@ if(NOT ENABLE_DOCKER)
# install
foreach(target ov_template_plugin template_extension ov_template_extension)
foreach(target openvino_template_plugin template_extension openvino_template_extension)
if(TARGET ${target})
install(TARGETS ${target}
LIBRARY DESTINATION ${IE_CPACK_RUNTIME_PATH}

View File

@@ -70,7 +70,7 @@ The example below demonstrates how to unregister an operator from the destructor
## Requirements for Building with CMake
A program that uses the `register_operator` functionality requires `openvino::core` and `openvino::frontend::onnx` libraries in addition to the OpenVINO Inference Runtime.
The `ov_onnx_frontend` is a component of the `OpenVINO` package , so `find_package(OpenVINO REQUIRED COMPONENTS ONNX)` can find both.
The `openvino::frontend::onnx` is a component of the `OpenVINO` package , so `find_package(OpenVINO REQUIRED COMPONENTS ONNX)` can find both.
Those libraries need to be passed to the `target_link_libraries` command in the CMakeLists.txt file.
See CMakeLists.txt below for reference:

View File

@@ -23,14 +23,15 @@ The table below shows the plugin libraries and additional dependencies for Linux
| Plugin | Library name for Linux | Dependency libraries for Linux | Library name for Windows | Dependency libraries for Windows | Library name for macOS | Dependency libraries for macOS |
|--------|-----------------------------|-------------------------------------------------------------|--------------------------|--------------------------------------------------------------------------------------------------------|------------------------------|---------------------------------------------|
| CPU | `libov_intel_cpu_plugin.so` | | `ov_intel_cpu_plugin.dll` | | `libov_intel_cpu_plugin.so` | |
| GPU | `libov_intel_gpu_plugin.so` | `libOpenCL.so` | `ov_intel_gpu_plugin.dll` | `OpenCL.dll` | Is not supported | - |
| MYRIAD | `libov_intel_vpu_plugin.so` | `libusb.so` | `ov_intel_vpu_plugin.dll`| `usb.dll` | `libov_intel_vpu_plugin.so` | `libusb.dylib` |
| HDDL | `libHDDLPlugin.so` | `libbsl.so`, `libhddlapi.so`, `libmvnc-hddl.so` | `HDDLPlugin.dll` | `bsl.dll`, `hddlapi.dll`, `json-c.dll`, `libcrypto-1_1-x64.dll`, `libssl-1_1-x64.dll`, `mvnc-hddl.dll` | Is not supported | - |
| GNA | `libov_intel_gna_plugin.so` | `libgna.so`, | `ov_intel_gna_plugin.dll` | `gna.dll` | Is not supported | - |
| HETERO | `libov_hetero_plugin.so` | Same as for selected plugins | `ov_hetero_plugin.dll` | Same as for selected plugins | `libov_hetero_plugin.so` | Same as for selected plugins |
| MULTI | `libov_auto_plugin.so` | Same as for selected plugins | `ov_auto_plugin.dll` | Same as for selected plugins | `libov_auto_plugin.so` | Same as for selected plugins |
| AUTO | `libov_auto_plugin.so` | Same as for selected plugins | `ov_auto_plugin.dll` | Same as for selected plugins | `libov_auto_plugin.so` | Same as for selected plugins |
| CPU | `libopenvino_intel_cpu_plugin.so` | | `openvino_intel_cpu_plugin.dll` | | `libopenvino_intel_cpu_plugin.so` | |
| GPU | `libopenvino_intel_gpu_plugin.so` | `libOpenCL.so` | `openvino_intel_gpu_plugin.dll` | `OpenCL.dll` | Is not supported | - |
| MYRIAD | `libopenvino_intel_myriad_plugin.so` | `libusb.so` | `openvino_intel_myriad_plugin.dll`| `usb.dll` | `libopenvino_intel_myriad_plugin.so` | `libusb.dylib` |
| HDDL | `libintel_hddl_plugin.so` | `libbsl.so`, `libhddlapi.so`, `libmvnc-hddl.so` | `intel_hddl_plugin.dll` | `bsl.dll`, `hddlapi.dll`, `json-c.dll`, `libcrypto-1_1-x64.dll`, `libssl-1_1-x64.dll`, `mvnc-hddl.dll` | Is not supported | - |
| GNA | `libopenvino_intel_gna_plugin.so` | `libgna.so`, | `openvino_intel_gna_plugin.dll` | `gna.dll` | Is not supported | - |
| HETERO | `libopenvino_hetero_plugin.so` | Same as for selected plugins | `openvino_hetero_plugin.dll` | Same as for selected plugins | `libopenvino_hetero_plugin.so` | Same as for selected plugins |
| MULTI | `libopenvino_auto_plugin.so` | Same as for selected plugins | `openvino_auto_plugin.dll` | Same as for selected plugins | `libopenvino_auto_plugin.so` | Same as for selected plugins |
| AUTO | `libopenvino_auto_plugin.so` | Same as for selected plugins | `openvino_auto_plugin.dll` | Same as for selected plugins | `libopenvino_auto_plugin.so` | Same as for selected plugins |
| BATCH | `libopenvino_auto_batch_plugin.so` | Same as for selected plugins | `openvino_auto_batch_plugin.dll` | Same as for selected plugins | `libopenvino_auto_batch_plugin.so` | Same as for selected plugins |
## Supported Configurations

View File

@@ -21,7 +21,7 @@ Once the commands above are executed, the Inference Engine Developer Package is
* `IE::ngraph` - shared nGraph library
* `IE::inference_engine` - shared Inference Engine library
* `IE::inference_engine_transformations` - shared library with Inference Engine ngraph-based Transformations
* `IE::inference_engine_preproc` - shared library with Inference Engine preprocessing plugin
* `IE::openvino_gapi_preproc` - shared library with Inference Engine preprocessing plugin
* `IE::inference_engine_plugin_api` - interface library with Inference Engine Plugin API headers
* `IE::inference_engine_lp_transformations` - shared library with low-precision transformations
* `IE::pugixml` - static Pugixml library

View File

@@ -46,7 +46,7 @@ if(OpenCV_FOUND)
endif()
if(ENABLE_OV_ONNX_FRONTEND)
target_link_libraries(${TARGET_NAME} PRIVATE ov_onnx_frontend)
target_link_libraries(${TARGET_NAME} PRIVATE openvino_onnx_frontend)
endif()
if(NOT MSVC)

View File

@@ -5,7 +5,7 @@
# [cmake:extension]
set(CMAKE_CXX_STANDARD 11)
set(TARGET_NAME "ov_template_extension")
set(TARGET_NAME "openvino_template_extension")
find_package(OpenVINO)

View File

@@ -3,7 +3,7 @@
#
# [cmake:plugin]
set(TARGET_NAME "ov_template_plugin")
set(TARGET_NAME "openvino_template_plugin")
file(GLOB_RECURSE SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)

View File

@@ -299,6 +299,6 @@ InferenceEngine::Parameter Plugin::GetMetric(const std::string& name,
// ! [plugin:get_metric]
// ! [plugin:create_plugin_engine]
static const InferenceEngine::Version version = {{2, 1}, CI_BUILD_NUMBER, "ov_template_plugin"};
static const InferenceEngine::Version version = {{2, 1}, CI_BUILD_NUMBER, "openvino_template_plugin"};
IE_DEFINE_PLUGIN_CREATE_FUNCTION(Plugin, version)
// ! [plugin:create_plugin_engine]

View File

@@ -9,7 +9,7 @@ ov_add_test_target(
NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
ov_template_plugin
openvino_template_plugin
LINK_LIBRARIES
openvino::funcSharedTests
INCLUDES
@@ -21,7 +21,7 @@ ov_add_test_target(
)
if(ENABLE_HETERO)
add_dependencies(${TARGET_NAME} ov_hetero_plugin)
add_dependencies(${TARGET_NAME} openvino_hetero_plugin)
endif()
find_package(OpenCV QUIET COMPONENTS core imgproc)

View File

@@ -18,7 +18,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(
smoke_IEClassBasicTestP, IEClassBasicTestP,
::testing::Values(std::make_pair("ov_template_plugin", CommonTestUtils::DEVICE_TEMPLATE)));
::testing::Values(std::make_pair("openvino_template_plugin", CommonTestUtils::DEVICE_TEMPLATE)));
INSTANTIATE_TEST_SUITE_P(
smoke_IEClassNetworkTestP, IEClassNetworkTestP,

View File

@@ -16,20 +16,20 @@ using namespace HeteroTests;
INSTANTIATE_TEST_SUITE_P(smoke_manyTargetInputs, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
[] {return ngraph::builder::subgraph::makeConvPool2Relu2();}, {"Conv_1"}, true))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_singleMajorNodeFunctions)),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_randomMajorNodeFunctions)),
HeteroSyntheticTest::getTestCaseName);
@@ -39,42 +39,42 @@ static std::vector<std::function<std::shared_ptr<ngraph::Function>()>> dynamicBu
INSTANTIATE_TEST_SUITE_P(smoke_NonZeroMajorNode_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
dynamicBuilders.front(), {"nonZero_1"}))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_NonZeroMajorNode_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::withMajorNodesFunctions(
dynamicBuilders.front(), {"nonZero_1"}, true))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::singleMajorNodeFunctions(
dynamicBuilders))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes_dynamic, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::randomMajorNodeFunctions(
dynamicBuilders))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::singleMajorNodeFunctions(
dynamicBuilders, true))),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes_dynamic_batch, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "ov_template_plugin"}, {"TEMPLATE1", "ov_template_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "openvino_template_plugin"}, {"TEMPLATE1", "openvino_template_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::randomMajorNodeFunctions(
dynamicBuilders, true))),
HeteroSyntheticTest::getTestCaseName);

View File

@@ -95,7 +95,7 @@ ngraph_function_creation_sample.exe -m lenet.bin -i 7-ubyte
[ INFO ] Loading Inference Engine
[ INFO ] Device info:
CPU
ov_intel_cpu_plugin version ......... <version>
openvino_intel_cpu_plugin version ......... <version>
Build ........... <build>
[ INFO ] Preparing input blobs

View File

@@ -24,7 +24,7 @@ endif()
add_subdirectory(plugins)
add_subdirectory(inference)
include(cmake/ov_runtime.cmake)
include(cmake/openvino.cmake)
# preprocessing has dependency on ov_runtime for static build
add_subdirectory(common/preprocessing)

View File

@@ -37,7 +37,7 @@
#else
#if defined(_WIN32)
#define INFERENCE_ENGINE_C_API_CALLBACK __cdecl
#ifdef inference_engine_c_api_EXPORTS
#ifdef openvino_c_EXPORTS
#define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __declspec(dllexport) __VA_ARGS__ __cdecl
#else
#define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __declspec(dllimport) __VA_ARGS__ __cdecl

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME inference_engine_c_api)
set(TARGET_NAME openvino_c)
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB HEADERS ${InferenceEngine_C_API_SOURCE_DIR}/include/*.h)
@@ -12,7 +12,7 @@ file(GLOB HEADERS ${InferenceEngine_C_API_SOURCE_DIR}/include/*.h)
add_library(${TARGET_NAME} ${HEADERS} ${SOURCES})
add_library(openvino::runtime::c ALIAS ${TARGET_NAME})
target_link_libraries(${TARGET_NAME} PRIVATE ov_runtime)
target_link_libraries(${TARGET_NAME} PRIVATE openvino)
target_include_directories(${TARGET_NAME} PUBLIC
$<BUILD_INTERFACE:${InferenceEngine_C_API_SOURCE_DIR}/include>)

View File

@@ -13,7 +13,7 @@ endif()
add_executable(${TARGET_NAME} ie_c_api_test.cpp test_model_repo.hpp)
target_link_libraries(${TARGET_NAME} PRIVATE inference_engine_c_api ${OpenCV_LIBRARIES}
target_link_libraries(${TARGET_NAME} PRIVATE openvino_c ${OpenCV_LIBRARIES}
commonTestUtils gtest_main)
target_compile_definitions(${TARGET_NAME}
@@ -23,19 +23,19 @@ target_compile_definitions(${TARGET_NAME}
MODELS_PATH=\"${MODELS_PATH}\" )
if(ENABLE_AUTO OR ENABLE_MULTI)
add_dependencies(${TARGET_NAME} ov_auto_plugin)
add_dependencies(${TARGET_NAME} openvino_auto_plugin)
endif()
if(ENABLE_AUTO_BATCH)
add_dependencies(${TARGET_NAME} ov_auto_batch_plugin)
add_dependencies(${TARGET_NAME} openvino_auto_batch_plugin)
endif()
if(ENABLE_INTEL_CPU)
add_dependencies(${TARGET_NAME} ov_intel_cpu_plugin)
add_dependencies(${TARGET_NAME} openvino_intel_cpu_plugin)
endif()
if(ENABLE_INTEL_GPU)
add_dependencies(${TARGET_NAME} ov_intel_gpu_plugin)
add_dependencies(${TARGET_NAME} openvino_intel_gpu_plugin)
endif()
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})

View File

@@ -157,7 +157,7 @@ TEST(ie_core_register_plugin, registerPlugin) {
IE_EXPECT_OK(ie_core_read_network(core, xml, bin, &network));
EXPECT_NE(nullptr, network);
const char *plugin_name = "ov_intel_cpu_plugin";
const char *plugin_name = "openvino_intel_cpu_plugin";
const char *device_name = "BLA";
IE_EXPECT_OK(ie_core_register_plugin(core, plugin_name, device_name));

View File

@@ -25,9 +25,9 @@ if(ENABLE_WHEEL)
endif()
if(ENABLE_OV_CORE_UNIT_TESTS)
add_subdirectory(tests/mock/ov_mock_py_frontend)
add_dependencies(pyopenvino ov_mock_py_frontend)
set_target_properties(ov_mock_py_frontend PROPERTIES
add_subdirectory(tests/mock/mock_py_frontend)
add_dependencies(pyopenvino openvino_mock_py_frontend)
set_target_properties(openvino_mock_py_frontend PROPERTIES
LIBRARY_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_DIRECTORY_BIN}
ARCHIVE_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_DIRECTORY_BIN}
COMPILE_PDB_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_DIRECTORY_BIN}

View File

@@ -559,7 +559,7 @@ cdef class IECore:
.. code-block:: python
ie = IECore()
ie.register_plugin(plugin="ov_intel_cpu_plugin", device_name="MY_NEW_PLUGIN")
ie.register_plugin(plugin="openvino_intel_cpu_plugin", device_name="MY_NEW_PLUGIN")
"""
self.impl.registerPlugin(plugin_name.encode(), device_name.encode())

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_FE_NAME "ov_mock_py_frontend")
set(TARGET_FE_NAME "openvino_mock_py_frontend")
file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp)

View File

@@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "ov_mock_py_frontend/mock_py_frontend.hpp"
#include "mock_py_frontend/mock_py_frontend.hpp"
#include "openvino/frontend/manager.hpp"
#include "openvino/frontend/visibility.hpp"

View File

@@ -10,7 +10,7 @@ source_group("src" FILES ${PYBIND_FE_SRC})
pybind11_add_module(${PYBIND_FE_NAME} MODULE ${PYBIND_FE_SRC})
set(DEPENDENCIES ov_mock_py_frontend openvino::runtime)
set(DEPENDENCIES openvino_mock_py_frontend openvino::runtime)
set(DEFINITIONS)
if (ENABLE_OV_ONNX_FRONTEND)

View File

@@ -5,8 +5,8 @@
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include "ov_mock_py_frontend/frontend_wrappers.hpp"
#include "ov_mock_py_frontend/mock_py_frontend.hpp"
#include "mock_py_frontend/frontend_wrappers.hpp"
#include "mock_py_frontend/mock_py_frontend.hpp"
namespace py = pybind11;
using namespace ngraph;

View File

@@ -216,7 +216,7 @@ def test_query_model(device):
@pytest.mark.skipif(os.environ.get("TEST_DEVICE", "CPU") != "CPU", reason="Device independent test")
def test_register_plugin():
ie = Core()
ie.register_plugin("ov_intel_cpu_plugin", "BLA")
ie.register_plugin("openvino_intel_cpu_plugin", "BLA")
func = ie.read_model(model=test_net_xml, weights=test_net_bin)
exec_net = ie.compile_model(func, "BLA")
assert isinstance(exec_net, CompiledModel), \
@@ -304,9 +304,9 @@ def test_add_extension_template_extension(device):
core = Core()
if platform == "win32":
core.add_extension(library_path="ov_template_extension.dll")
core.add_extension(library_path="openvino_template_extension.dll")
else:
core.add_extension(library_path="libov_template_extension.so")
core.add_extension(library_path="libopenvino_template_extension.so")
model = core.read_model(model=ir)
assert isinstance(model, Model)

View File

@@ -1,6 +1,6 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# ov_runtime.dll directory path visibility is needed to use _pyngraph module
# openvino.dll directory path visibility is needed to use _pyngraph module
# import below causes adding this path to os.environ["PATH"]
import openvino # noqa: F401 'imported but unused'

View File

@@ -151,7 +151,7 @@ def test_blob_set_shape_after_async_infer():
net = ng.function_to_cnn(function)
net.reshape({"data": [(1, 5), 4, 20, 20]})
ie_core = IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
request = exec_net.requests[0]
request.async_infer({"data": np.ones([4, 4, 20, 20])})

View File

@@ -64,7 +64,7 @@ def test_is_dynamic():
function = create_relu([-1, 3, 20, 20])
net = ng.function_to_cnn(function)
ie = IECore()
ie.register_plugin("ov_template_plugin", "TEMPLATE")
ie.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie.load_network(net, "TEMPLATE")
assert exec_net.outputs["out"].is_dynamic
p_shape = ng.partial_shape_from_data(exec_net.outputs["out"])

View File

@@ -60,7 +60,7 @@ def test_is_dynamic():
net.input_info["data"].input_data.shape
assert "Cannot return dims for Data with dynamic shapes!" in str(e.value)
ie = IECore()
ie.register_plugin("ov_template_plugin", "TEMPLATE")
ie.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie.load_network(net, "TEMPLATE")
assert exec_net.input_info["data"].input_data.is_dynamic
p_shape = ng.partial_shape_from_data(exec_net.input_info["data"].input_data)

View File

@@ -89,8 +89,8 @@ def test_query_network(device):
def test_register_plugin():
ie = IECore()
if ie.get_metric("CPU", "FULL_DEVICE_NAME") == "arm_compute::NEON":
pytest.skip("Can't run on ARM plugin due-to ov_intel_cpu_plugin specific test")
ie.register_plugin("ov_intel_cpu_plugin", "BLA")
pytest.skip("Can't run on ARM plugin due-to openvino_intel_cpu_plugin specific test")
ie.register_plugin("openvino_intel_cpu_plugin", "BLA")
net = ie.read_network(model=test_net_xml, weights=test_net_bin)
exec_net = ie.load_network(net, "BLA")
assert isinstance(exec_net, ExecutableNetwork), "Cannot load the network to the registered plugin with name 'BLA'"
@@ -101,7 +101,7 @@ def test_register_plugin():
def test_register_plugins():
ie = IECore()
if ie.get_metric("CPU", "FULL_DEVICE_NAME") == "arm_compute::NEON":
pytest.skip("Can't run on ARM plugin due-to ov_intel_cpu_plugin specific test")
pytest.skip("Can't run on ARM plugin due-to openvino_intel_cpu_plugin specific test")
if platform == "linux" or platform == "linux2":
ie.register_plugins(plugins_xml)
elif platform == "darwin":

View File

@@ -284,7 +284,7 @@ def test_create_two_exec_net():
function = create_relu([ng.Dimension(0,5), ng.Dimension(4), ng.Dimension(20), ng.Dimension(20)])
net = ng.function_to_cnn(function)
ie_core = IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net1 = ie_core.load_network(net, "TEMPLATE", num_requests=2)
assert ng.function_from_cnn(net) != None
exec_net2 = ie_core.load_network(net, "TEMPLATE", num_requests=2)

View File

@@ -592,7 +592,7 @@ def test_infer_dynamic_network_with_set_shape(shape, p_shape, ref_shape):
net = ng.function_to_cnn(function)
net.reshape({"data": p_shape})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
exec_net.requests[0].input_blobs["data"].set_shape(ref_shape)
assert exec_net.requests[0].input_blobs["data"].tensor_desc.dims == ref_shape
@@ -616,7 +616,7 @@ def test_infer_dynamic_network_without_set_shape(shape, p_shape, ref_shape):
net = ng.function_to_cnn(function)
net.reshape({"data": p_shape})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
exec_net.infer({"data": np.ones(ref_shape)})
assert exec_net.requests[0].input_blobs["data"].tensor_desc.dims == ref_shape
@@ -639,7 +639,7 @@ def test_infer_dynamic_network_with_set_blob(shape, p_shape, ref_shape):
net = ng.function_to_cnn(function)
net.reshape({"data": p_shape})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc
tensor_desc.dims = ref_shape
@@ -662,7 +662,7 @@ def test_infer_dynamic_network_twice():
net = ng.function_to_cnn(function)
net.reshape({"data": p_shape})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
request = exec_net.requests[0]
request.infer({"data": np.ones(ref_shape1)})
@@ -681,7 +681,7 @@ def test_infer_dynamic_network_with_set_blob_twice():
net = ng.function_to_cnn(function)
net.reshape({"data": p_shape})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
request = exec_net.requests[0]
td = request.input_blobs['data'].tensor_desc
@@ -710,7 +710,7 @@ def test_async_infer_dynamic_network_3_requests(shapes):
net = ng.function_to_cnn(function)
net.reshape({"data": [3, 4, (20, 50), (20, 50)]})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE", num_requests=3)
for i,request in enumerate(exec_net.requests):
request.async_infer({"data": np.ones(shapes[i])})
@@ -725,7 +725,7 @@ def test_set_blob_with_incorrect_name():
function = create_encoder([4, 4, 20, 20])
net = ng.function_to_cnn(function)
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc
tensor_desc.dims = [4, 4, 20, 20]
@@ -740,7 +740,7 @@ def test_set_blob_with_incorrect_size():
function = create_encoder([4, 4, 20, 20])
net = ng.function_to_cnn(function)
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc
tensor_desc.dims = [tensor_desc.dims[0]*2, 4, 20, 20]
@@ -760,7 +760,7 @@ def test_set_blob_after_async_infer():
net = ng.function_to_cnn(function)
net.reshape({"data": [(0, 5), 4, 20, 20]})
ie_core = ie.IECore()
ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
ie_core.register_plugin("openvino_template_plugin", "TEMPLATE")
exec_net = ie_core.load_network(net, "TEMPLATE")
request = exec_net.requests[0]
tensor_desc = request.input_blobs['data'].tensor_desc

View File

@@ -1,6 +1,6 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# ov_runtime.dll directory path visibility is needed to use _pyngraph module
# openvino.dll directory path visibility is needed to use _pyngraph module
# import below causes adding this path to os.environ["PATH"]
import ngraph # noqa: F401 'imported but unused'

View File

@@ -51,7 +51,7 @@ function(set_ie_threading_interface_for TARGET_NAME)
target_type STREQUAL "MODULE_LIBRARY")
set(LINK_TYPE "PRIVATE")
elseif(target_type STREQUAL "STATIC_LIBRARY")
# Affected libraries: inference_engine_s, inference_engine_preproc_s
# Affected libraries: inference_engine_s, openvino_gapi_preproc_s
# they don't have TBB in public headers => PRIVATE
set(LINK_TYPE "PRIVATE")
elseif(target_type STREQUAL "SHARED_LIBRARY")

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME ov_runtime)
set(TARGET_NAME openvino)
add_library(${TARGET_NAME} $<TARGET_OBJECTS:ngraph_obj>
$<TARGET_OBJECTS:frontend_common_obj>
@@ -48,7 +48,7 @@ ie_mark_target_as_cc(${TARGET_NAME})
set_target_properties(${TARGET_NAME} PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE ${ENABLE_LTO})
ie_register_plugins(MAIN_TARGET ${TARGET_NAME}
POSSIBLE_PLUGINS ov_auto_plugin ov_auto_batch_plugin ov_hetero_plugin ov_intel_gpu_plugin ov_intel_gna_plugin ov_intel_cpu_plugin ov_intel_myriad_plugin)
POSSIBLE_PLUGINS openvino_auto_plugin openvino_auto_batch_plugin openvino_hetero_plugin openvino_intel_gpu_plugin openvino_intel_gna_plugin openvino_intel_cpu_plugin openvino_intel_myriad_plugin)
# Export for build tree
@@ -68,11 +68,11 @@ target_include_directories(${TARGET_NAME}_dev INTERFACE $<BUILD_INTERFACE:${Open
$<BUILD_INTERFACE:${OpenVINO_SOURCE_DIR}/src/core/dev_api>
$<BUILD_INTERFACE:${OpenVINO_SOURCE_DIR}/src/inference/dev_api>
$<BUILD_INTERFACE:${OpenVINO_SOURCE_DIR}/src/common/low_precision_transformations/include>
$<TARGET_PROPERTY:inference_engine_preproc,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino_gapi_preproc,INTERFACE_INCLUDE_DIRECTORIES>
)
target_compile_definitions(${TARGET_NAME}_dev INTERFACE
$<TARGET_PROPERTY:inference_engine_preproc,INTERFACE_COMPILE_DEFINITIONS>)
$<TARGET_PROPERTY:openvino_gapi_preproc,INTERFACE_COMPILE_DEFINITIONS>)
target_link_libraries(${TARGET_NAME}_dev INTERFACE ${TARGET_NAME} pugixml::static openvino::itt openvino::util)
add_library(openvino::runtime::dev ALIAS ${TARGET_NAME}_dev)

View File

@@ -12,4 +12,4 @@ add_subdirectory(transformations)
add_subdirectory(offline_transformations)
add_subdirectory(low_precision_transformations)
add_dependencies(ov_runtime_libraries inference_engine_preproc)
add_dependencies(ov_runtime_libraries openvino_gapi_preproc)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set (TARGET_NAME "inference_engine_preproc")
set (TARGET_NAME "openvino_gapi_preproc")
file(GLOB LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
@@ -142,7 +142,7 @@ if(ENABLE_GAPI_PREPROCESSING)
target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime)
else()
# for static linkage the dependencies are in opposite order
target_link_libraries(ov_runtime PRIVATE ${TARGET_NAME})
target_link_libraries(openvino PRIVATE ${TARGET_NAME})
endif()
# Workaround to avoid warnings caused with bug in the avx512intrin.h of GCC5
@@ -163,7 +163,7 @@ endif()
target_include_directories(${TARGET_NAME} INTERFACE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
$<TARGET_PROPERTY:ov_runtime,INTERFACE_INCLUDE_DIRECTORIES>)
$<TARGET_PROPERTY:openvino,INTERFACE_INCLUDE_DIRECTORIES>)
# Static library used for unit tests which are always built

View File

@@ -77,7 +77,7 @@ public:
"Use 'cmake -DENABLE_GAPI_PREPROCESSING=ON ...'";
# endif // ENABLE_GAPI_PREPROCESSING
#else
ov::util::FilePath libraryName = ov::util::to_file_path(std::string("inference_engine_preproc") + std::string(IE_BUILD_POSTFIX));
ov::util::FilePath libraryName = ov::util::to_file_path(std::string("openvino_gapi_preproc") + std::string(IE_BUILD_POSTFIX));
ov::util::FilePath preprocLibraryPath = FileUtils::makePluginLibraryName(getInferenceEngineLibraryPath(), libraryName);
if (!FileUtils::fileExist(preprocLibraryPath)) {

View File

@@ -421,7 +421,7 @@ if(SUGGEST_OVERRIDE_SUPPORTED)
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()
list(APPEND UNIT_TESTS_DEPENDENCIES ov_template_extension)
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_extension)
if (ENABLE_INTEL_CPU AND ENABLE_OV_CORE_BACKEND_UNIT_TESTS)
message(STATUS "OV_CORE_TESTS: IE:CPU enabled")
@@ -429,7 +429,7 @@ if (ENABLE_INTEL_CPU AND ENABLE_OV_CORE_BACKEND_UNIT_TESTS)
if (ENABLE_STRICT_DEPENDENCIES)
# For convinience add a runtime dependency to build along with this target.
# Warning: Parallel build with -GNinja may not be efficient.
list(APPEND UNIT_TESTS_DEPENDENCIES ov_intel_cpu_plugin)
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_intel_cpu_plugin)
endif()
endif()
@@ -439,7 +439,7 @@ if (ENABLE_INTEL_GPU AND ENABLE_OV_CORE_BACKEND_UNIT_TESTS)
if (ENABLE_STRICT_DEPENDENCIES)
# For convinience add a runtime dependency to build along with this target.
# Warning: Parallel build with -GNinja may not be efficient.
list(APPEND UNIT_TESTS_DEPENDENCIES ov_intel_gpu_plugin)
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_intel_gpu_plugin)
endif()
endif()
@@ -447,7 +447,7 @@ if (ENABLE_OV_CORE_BACKEND_UNIT_TESTS)
message(STATUS "OV_CORE_TESTS: INTERPRETER enabled")
set(ACTIVE_BACKEND_LIST ${ACTIVE_BACKEND_LIST} INTERPRETER)
if (ENABLE_STRICT_DEPENDENCIES)
list(APPEND UNIT_TESTS_DEPENDENCIES ov_template_plugin)
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_plugin)
endif()
endif()
@@ -603,9 +603,9 @@ if (OV_COMPILER_IS_CLANG)
endif()
if (ENABLE_OV_ONNX_FRONTEND)
get_target_property(ONNX_FRONTEND_SRC_DIR ov_onnx_frontend SOURCE_DIR)
get_target_property(ONNX_FRONTEND_SRC_DIR openvino_onnx_frontend SOURCE_DIR)
target_include_directories(ov_core_unit_tests PRIVATE ${ONNX_FRONTEND_SRC_DIR}/src)
target_link_libraries(ov_core_unit_tests PRIVATE ov_onnx_frontend onnx_test_util)
target_link_libraries(ov_core_unit_tests PRIVATE openvino_onnx_frontend onnx_test_util)
if (LINUX)
target_link_options(ov_core_unit_tests PRIVATE -Wl,--exclude-libs,ALL)
elseif(APPLE)
@@ -614,7 +614,7 @@ if (ENABLE_OV_ONNX_FRONTEND)
endif()
if(ENABLE_OV_IR_FRONTEND)
add_dependencies(ov_core_unit_tests ov_ir_frontend)
add_dependencies(ov_core_unit_tests openvino_ir_frontend)
endif()
install(TARGETS ov_core_unit_tests

View File

@@ -34,7 +34,7 @@ public:
TestCase(const std::shared_ptr<Function>& function, const std::string& dev = "TEMPLATE") : m_function{function} {
try {
// Register template plugin
m_core.register_plugin(std::string("ov_template_plugin") + IE_BUILD_POSTFIX, "TEMPLATE");
m_core.register_plugin(std::string("openvino_template_plugin") + IE_BUILD_POSTFIX, "TEMPLATE");
} catch (...) {
}
m_request = m_core.compile_model(function, dev).create_infer_request();

View File

@@ -12,7 +12,7 @@
#include "so_extension.hpp"
inline std::string get_extension_path() {
return ov::util::make_plugin_library_name<char>({}, std::string("ov_template_extension") + IE_BUILD_POSTFIX);
return ov::util::make_plugin_library_name<char>({}, std::string("openvino_template_extension") + IE_BUILD_POSTFIX);
}
TEST(extension, load_extension) {

View File

@@ -16,12 +16,12 @@ if (ENABLE_OV_TF_FRONTEND)
endif()
set(SRC ${CMAKE_CURRENT_SOURCE_DIR}/mock_frontend.cpp)
set(MOCK1_FE_NAME ov_mock1_frontend)
set(MOCK1_FE_NAME openvino_mock1_frontend)
add_library(${MOCK1_FE_NAME} SHARED EXCLUDE_FROM_ALL ${SRC})
target_compile_definitions(${MOCK1_FE_NAME} PRIVATE "-DMOCK_VARIANT=\"1\"")
target_include_directories(${MOCK1_FE_NAME} PRIVATE ".")
target_include_directories(${MOCK1_FE_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})
target_link_libraries(${MOCK1_FE_NAME} PRIVATE frontend_common)
add_dependencies(ov_core_unit_tests ${MOCK1_FE_NAME})

View File

@@ -35,7 +35,7 @@ using namespace ov::frontend;
static std::string find_my_pathname() {
#ifdef _WIN32
HMODULE hModule = GetModuleHandleW(SHARED_LIB_PREFIX L"ov_runtime" SHARED_LIB_SUFFIX);
HMODULE hModule = GetModuleHandleW(SHARED_LIB_PREFIX L"openvino" SHARED_LIB_SUFFIX);
WCHAR wpath[MAX_PATH];
GetModuleFileNameW(hModule, wpath, MAX_PATH);
std::wstring ws(wpath);

View File

@@ -8,11 +8,11 @@
#include "openvino/opsets/opset8.hpp"
// Defined if we are building the plugin DLL (instead of using it)
#ifdef ov_mock1_frontend_EXPORTS
#ifdef openvino_mock1_frontend_EXPORTS
# define MOCK_API OPENVINO_CORE_EXPORTS
#else
# define MOCK_API OPENVINO_CORE_IMPORTS
#endif // ov_mock1_frontend_EXPORTS
#endif // openvino_mock1_frontend_EXPORTS
using namespace ngraph;
using namespace ov::frontend;

View File

@@ -9,7 +9,7 @@ list(FILTER SRC EXCLUDE REGEX standalone_build)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_onnx_frontend frontend_common)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes openvino_onnx_frontend frontend_common)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})
@@ -23,7 +23,7 @@ target_compile_definitions(${TARGET_NAME} PRIVATE -D TEST_ONNX_MODELS_DIRNAME=\"
set(MANIFEST ${CMAKE_CURRENT_SOURCE_DIR}/unit_test.manifest)
target_compile_definitions(${TARGET_NAME} PRIVATE -D MANIFEST=\"${MANIFEST}\")
add_dependencies(${TARGET_NAME} ov_onnx_frontend)
add_dependencies(${TARGET_NAME} openvino_onnx_frontend)
add_dependencies(${TARGET_NAME} test_model_zoo)
add_subdirectory(standalone_build)

View File

@@ -9,7 +9,7 @@ list(FILTER SRC EXCLUDE REGEX standalone_build)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_paddle_frontend openvino::runtime)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes openvino_paddle_frontend openvino::runtime)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})
@@ -56,11 +56,11 @@ else()
endif()
add_dependencies(${TARGET_NAME} paddle_test_models)
add_dependencies(${TARGET_NAME} ov_paddle_frontend)
add_dependencies(${TARGET_NAME} openvino_paddle_frontend)
# Fuzzy tests for PaddlePaddle use IE_CPU engine
if (ENABLE_INTEL_CPU)
add_dependencies(${TARGET_NAME} ov_intel_cpu_plugin)
add_dependencies(${TARGET_NAME} openvino_intel_cpu_plugin)
endif()
add_subdirectory(standalone_build)

View File

@@ -9,7 +9,7 @@ list(FILTER SRC EXCLUDE REGEX standalone_build)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_tensorflow_frontend openvino::runtime)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes openvino_tensorflow_frontend openvino::runtime)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})
@@ -64,9 +64,9 @@ else()
endif()
add_dependencies(${TARGET_NAME} tensorflow_test_models)
add_dependencies(${TARGET_NAME} ov_tensorflow_frontend)
add_dependencies(${TARGET_NAME} openvino_tensorflow_frontend)
get_target_property(TENSORFLOW_FRONTEND_SRC_DIR ov_tensorflow_frontend SOURCE_DIR)
get_target_property(TENSORFLOW_FRONTEND_SRC_DIR openvino_tensorflow_frontend SOURCE_DIR)
target_include_directories(${TARGET_NAME} PRIVATE ${TENSORFLOW_FRONTEND_SRC_DIR}/src/pass/)
add_subdirectory(standalone_build)

View File

@@ -7,7 +7,7 @@ add_library(${TARGET_NAME} STATIC standalone_build_test.cpp)
# This test verifies that application can link to TensorFlow frontend only
# Other dependencies on core header files will be resolved automatically
target_link_libraries(${TARGET_NAME} PUBLIC ov_tensorflow_frontend)
target_link_libraries(${TARGET_NAME} PUBLIC openvino_tensorflow_frontend)
# Enable code style check
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@@ -109,7 +109,7 @@ public:
private:
// Helper structure for searching plugin either by name or by file name
// File name here doesn't contain prefix/suffix (like "ov_*_frontend.so")
// File name here doesn't contain prefix/suffix (like "openvino_*_frontend.so")
struct FrontEndNames {
FrontEndNames(std::string n, std::string f) : name(std::move(n)), file_name(std::move(f)) {}
bool operator==(const FrontEndNames& other) const {

View File

@@ -24,7 +24,7 @@ class PluginInfo {
bool load_internal();
public:
std::string m_file_name; // Plugin file name, e.g. "libov_ir_frontend.so"
std::string m_file_name; // Plugin file name, e.g. "libopenvino_ir_frontend.so"
std::string m_file_path; // Plugin file full path
PluginInfo() = default;

View File

@@ -10,11 +10,11 @@
# define IR_API
# define IR_C_API
#else
# ifdef ov_ir_frontend_EXPORTS
# ifdef openvino_ir_frontend_EXPORTS
# define IR_API OPENVINO_CORE_EXPORTS
# define IR_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define IR_API OPENVINO_CORE_IMPORTS
# define IR_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_ir_frontend_EXPORTS
# endif // openvino_ir_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@@ -7,9 +7,9 @@
#ifdef OPENVINO_STATIC_LIBRARY
# define ONNX_IMPORTER_API
#else
# ifdef ov_onnx_frontend_EXPORTS
# ifdef openvino_onnx_frontend_EXPORTS
# define ONNX_IMPORTER_API OPENVINO_CORE_EXPORTS
# else
# define ONNX_IMPORTER_API OPENVINO_CORE_IMPORTS
# endif // ov_onnx_frontend_EXPORTS
# endif // openvino_onnx_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@@ -10,11 +10,11 @@
# define ONNX_FRONTEND_API
# define ONNX_FRONTEND_C_API
#else
# ifdef ov_onnx_frontend_EXPORTS
# ifdef openvino_onnx_frontend_EXPORTS
# define ONNX_FRONTEND_API OPENVINO_CORE_EXPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define ONNX_FRONTEND_API OPENVINO_CORE_IMPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_onnx_frontend_EXPORTS
# endif // openvino_onnx_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@@ -10,11 +10,11 @@
# define PADDLE_API
# define PADDLE_C_API
#else
# ifdef ov_paddle_frontend_EXPORTS
# ifdef openvino_paddle_frontend_EXPORTS
# define PADDLE_API OPENVINO_CORE_EXPORTS
# define PADDLE_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define PADDLE_API OPENVINO_CORE_IMPORTS
# define PADDLE_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_paddle_frontend_EXPORTS
# endif // openvino_paddle_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@@ -10,11 +10,11 @@
# define TENSORFLOW_API
# define TENSORFLOW_C_API
#else
# ifdef ov_tensorflow_frontend_EXPORTS
# ifdef openvino_tensorflow_frontend_EXPORTS
# define TENSORFLOW_API OPENVINO_CORE_EXPORTS
# define TENSORFLOW_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define TENSORFLOW_API OPENVINO_CORE_IMPORTS
# define TENSORFLOW_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_tensorflow_frontend_EXPORTS
# endif // openvino_tensorflow_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@@ -93,13 +93,13 @@ source_group("include" FILES ${LIBRARY_HEADERS} ${PUBLIC_HEADERS})
add_library(${TARGET_NAME}_plugin_api INTERFACE)
target_include_directories(${TARGET_NAME}_plugin_api INTERFACE
$<TARGET_PROPERTY:${TARGET_NAME}_preproc,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino_gapi_preproc,INTERFACE_INCLUDE_DIRECTORIES>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/dev_api>
$<BUILD_INTERFACE:${PUBLIC_HEADERS_DIR}>
$<BUILD_INTERFACE:${PUBLIC_HEADERS_DIR}/ie>)
target_compile_definitions(${TARGET_NAME}_plugin_api INTERFACE
$<TARGET_PROPERTY:${TARGET_NAME}_preproc,INTERFACE_COMPILE_DEFINITIONS>)
$<TARGET_PROPERTY:openvino_gapi_preproc,INTERFACE_COMPILE_DEFINITIONS>)
target_link_libraries(${TARGET_NAME}_plugin_api INTERFACE pugixml::static openvino::itt openvino::util)
@@ -126,7 +126,7 @@ ie_faster_build(${TARGET_NAME}_obj
target_compile_definitions(${TARGET_NAME}_obj PRIVATE IMPLEMENT_INFERENCE_ENGINE_API
$<TARGET_PROPERTY:ngraph,INTERFACE_COMPILE_DEFINITIONS>
$<TARGET_PROPERTY:frontend_common::static,INTERFACE_COMPILE_DEFINITIONS>
$<TARGET_PROPERTY:${TARGET_NAME}_preproc,INTERFACE_COMPILE_DEFINITIONS>)
$<TARGET_PROPERTY:openvino_gapi_preproc,INTERFACE_COMPILE_DEFINITIONS>)
target_include_directories(${TARGET_NAME}_obj SYSTEM PRIVATE $<TARGET_PROPERTY:ngraph,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:pugixml::static,INTERFACE_INCLUDE_DIRECTORIES>
@@ -187,7 +187,7 @@ if(WIN32)
endif()
target_link_libraries(${TARGET_NAME}_s PRIVATE openvino::itt ${CMAKE_DL_LIBS} ngraph
frontend_common::static inference_engine_preproc_s inference_engine_transformations pugixml::static)
frontend_common::static openvino_gapi_preproc_s inference_engine_transformations pugixml::static)
target_compile_definitions(${TARGET_NAME}_s PUBLIC USE_STATIC_IE)

View File

@@ -85,7 +85,7 @@ DECLARE_METRIC_KEY(HDDL_DEVICE_TOTAL_NUM, int);
} // namespace Metrics
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This option allows to specify the number of MYX devices used for inference a specific Executable network.
* Note: Only one network would be allocated to one device.
@@ -103,7 +103,7 @@ DECLARE_METRIC_KEY(HDDL_DEVICE_TOTAL_NUM, int);
DECLARE_VPU_CONFIG(HDDL_GRAPH_TAG);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This config makes the executable networks to be allocated on one certain device (instead of multiple devices).
* And all inference through this executable network, will be done on this device.
@@ -119,7 +119,7 @@ DECLARE_VPU_CONFIG(HDDL_GRAPH_TAG);
DECLARE_VPU_CONFIG(HDDL_STREAM_ID);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This config allows user to control device flexibly. This config gives a "tag" for a certain device while
* allocating a network to it. Afterward, user can allocating/deallocating networks to this device with this "tag".
@@ -135,7 +135,7 @@ DECLARE_VPU_CONFIG(HDDL_STREAM_ID);
DECLARE_VPU_CONFIG(HDDL_DEVICE_TAG);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: "YES/NO", default is "NO".
* This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set. After a user load a
* network, the user got a handle for the network.
@@ -148,7 +148,7 @@ DECLARE_VPU_CONFIG(HDDL_DEVICE_TAG);
DECLARE_VPU_CONFIG(HDDL_BIND_DEVICE);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: A signed int wrapped in a string, default is "0".
* This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set and "BIND_DEVICE" is "False".
* When there are multiple devices running a certain network (a same network running on multiple devices in Bypass
@@ -158,7 +158,7 @@ DECLARE_VPU_CONFIG(HDDL_BIND_DEVICE);
DECLARE_VPU_CONFIG(HDDL_RUNTIME_PRIORITY);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: "YES/NO", default is "NO".
* SGAD is short for "Single Graph All Device". With this scheduler, once application allocates 1 network, all devices
* (managed by SGAD scheduler) will be loaded with this graph. The number of network that can be loaded to one device
@@ -167,7 +167,7 @@ DECLARE_VPU_CONFIG(HDDL_RUNTIME_PRIORITY);
DECLARE_VPU_CONFIG(HDDL_USE_SGAD);
/**
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: A signed int wrapped in a string, default is "0".
* This config gives a "group id" for a certain device when this device has been reserved for certain client, client
* can use this device grouped by calling this group id while other client can't use this device

View File

@@ -146,7 +146,7 @@ namespace VPUConfigParams {
/**
* @deprecated Use InferenceEngine::HDDL_GRAPH_TAG instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This option allows to specify the number of MYX devices used for inference a specific Executable network.
* Note: Only one network would be allocated to one device.
@@ -166,7 +166,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(GRAPH_TAG);
/**
* @deprecated Use InferenceEngine::HDDL_STREAM_ID instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This config makes the executable networks to be allocated on one certain device (instead of multiple devices).
* And all inference through this executable network, will be done on this device.
@@ -184,7 +184,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(STREAM_ID);
/**
* @deprecated Use InferenceEngine::HDDL_DEVICE_TAG instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
* This config allows user to control device flexibly. This config gives a "tag" for a certain device while
* allocating a network to it. Afterward, user can allocating/deallocating networks to this device with this "tag".
@@ -202,7 +202,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(DEVICE_TAG);
/**
* @deprecated Use InferenceEngine::HDDL_BIND_DEVICE instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: "YES/NO", default is "NO".
* This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set. After a user load a
* network, the user got a handle for the network.
@@ -217,7 +217,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(BIND_DEVICE);
/**
* @deprecated Use InferenceEngine::HDDL_RUNTIME_PRIORITY instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: A signed int wrapped in a string, default is "0".
* This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set and "BIND_DEVICE" is "False".
* When there are multiple devices running a certain network (a same network running on multiple devices in Bypass
@@ -229,7 +229,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(RUNTIME_PRIORITY);
/**
* @deprecated Use InferenceEngine::HDDL_USE_SGAD instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: "YES/NO", default is "NO".
* SGAD is short for "Single Graph All Device". With this scheduler, once application allocates 1 network, all devices
* (managed by SGAD scheduler) will be loaded with this graph. The number of network that can be loaded to one device
@@ -240,7 +240,7 @@ DECLARE_VPU_HDDL_CONFIG_KEY(USE_SGAD);
/**
* @deprecated Use InferenceEngine::HDDL_GROUP_DEVICE instead
* @brief [Only for ov_intel_hddl_plugin]
* @brief [Only for OpenVINO Intel HDDL device]
* Type: A signed int wrapped in a string, default is "0".
* This config gives a "group id" for a certain device when this device has been reserved for certain client, client
* can use this device grouped by calling this group id while other client can't use this device

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set (TARGET_NAME "ov_auto_plugin")
set (TARGET_NAME "openvino_auto_plugin")
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/utils/*.cpp)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_auto_batch_plugin")
set(TARGET_NAME "openvino_auto_batch_plugin")
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set (TARGET_NAME "ov_hetero_plugin")
set (TARGET_NAME "openvino_hetero_plugin")
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_cpu_plugin")
set(TARGET_NAME "openvino_intel_cpu_plugin")
if(CMAKE_COMPILER_IS_GNUCC)
ie_add_compiler_flags(-Wno-all)
@@ -70,7 +70,7 @@ if(BUILD_SHARED_LIBS)
add_library(${TARGET_NAME}_obj OBJECT ${SOURCES} ${HEADERS})
link_system_libraries(${TARGET_NAME}_obj PUBLIC mkldnn)
target_include_directories(${TARGET_NAME}_obj PRIVATE $<TARGET_PROPERTY:inference_engine_preproc_s,INTERFACE_INCLUDE_DIRECTORIES>
target_include_directories(${TARGET_NAME}_obj PRIVATE $<TARGET_PROPERTY:openvino_gapi_preproc_s,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino::runtime::dev,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino::itt,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:ov_shape_inference,INTERFACE_INCLUDE_DIRECTORIES>

View File

@@ -878,5 +878,5 @@ InferenceEngine::IExecutableNetworkInternal::Ptr Engine::ImportNetwork(std::istr
return execNetwork;
}
static const Version version = {{2, 1}, CI_BUILD_NUMBER, "ov_intel_cpu_plugin"};
static const Version version = {{2, 1}, CI_BUILD_NUMBER, "openvino_intel_cpu_plugin"};
IE_DEFINE_PLUGIN_CREATE_FUNCTION(Engine, version)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_gna_plugin")
set(TARGET_NAME "openvino_intel_gna_plugin")
if(CMAKE_COMPILER_IS_GNUCC)
ie_add_compiler_flags(-Wno-all)
@@ -57,7 +57,7 @@ target_compile_definitions(${TARGET_NAME}_test_static
INTEGER_LOW_P
USE_STATIC_IE)
target_link_libraries(${TARGET_NAME}_test_static PUBLIC inference_engine_s inference_engine_preproc_s inference_engine_transformations libGNA::API)
target_link_libraries(${TARGET_NAME}_test_static PUBLIC inference_engine_s openvino_gapi_preproc_s inference_engine_transformations libGNA::API)
target_include_directories(${TARGET_NAME}_test_static
PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}

View File

@@ -14,7 +14,7 @@ static const Version gnaPluginDescription = {
CI_BUILD_NUMBER
"_with_GNA_LIB_VER==2"
,
"ov_intel_gna_plugin"
"openvino_intel_gna_plugin"
};
IE_DEFINE_PLUGIN_CREATE_FUNCTION(GNAPluginInternal, gnaPluginDescription)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set (TARGET_NAME "ov_intel_gpu_plugin")
set (TARGET_NAME "openvino_intel_gpu_plugin")
if(CMAKE_COMPILER_IS_GNUCXX)
ie_add_compiler_flags(-Wno-all -Wno-missing-declarations)
@@ -36,7 +36,7 @@ ie_add_plugin(NAME ${TARGET_NAME}
target_compile_options(${TARGET_NAME} PRIVATE
$<$<CONFIG:Release>:$<IF:$<CXX_COMPILER_ID:MSVC>,/Os,-Os>>)
target_link_libraries(${TARGET_NAME} PRIVATE ov_intel_gpu_graph
target_link_libraries(${TARGET_NAME} PRIVATE openvino_intel_gpu_graph
pugixml::static)
target_include_directories(${TARGET_NAME} PRIVATE

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_gpu_graph")
set(TARGET_NAME "openvino_intel_gpu_graph")
file(GLOB_RECURSE LIBRARY_SRC
"${INCLUDE_DIR}/*.h"
@@ -34,8 +34,8 @@ target_compile_options(${TARGET_NAME} PRIVATE
$<$<CONFIG:Release>:$<IF:$<CXX_COMPILER_ID:MSVC>,/Os,-Os>>)
target_link_libraries(${TARGET_NAME} PUBLIC OpenCL)
target_link_libraries(${TARGET_NAME} PRIVATE ov_intel_gpu_kernels
ov_intel_gpu_runtime
target_link_libraries(${TARGET_NAME} PRIVATE openvino_intel_gpu_kernels
openvino_intel_gpu_runtime
openvino::itt
openvino::runtime::dev
openvino::runtime)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_gpu_kernels")
set(TARGET_NAME "openvino_intel_gpu_kernels")
find_package(PythonInterp 3 QUIET)
if(NOT PYTHONINTERP_FOUND)

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_gpu_runtime")
set(TARGET_NAME "openvino_intel_gpu_runtime")
file(GLOB LIBRARY_SOURCES_MAIN
"${CMAKE_CURRENT_SOURCE_DIR}/*.h"

View File

@@ -37,7 +37,7 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_RELEASE "-Wno-error=maybe-uninitialized -Wno-maybe-uninitialized")
endif()
target_link_libraries(${TARGET_NAME} PRIVATE ov_intel_gpu_graph
target_link_libraries(${TARGET_NAME} PRIVATE openvino_intel_gpu_graph
inference_engine
OpenCL
gtest
@@ -46,8 +46,8 @@ target_link_libraries(${TARGET_NAME} PRIVATE ov_intel_gpu_graph
target_include_directories(${TARGET_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/test_utils/
$<TARGET_PROPERTY:ov_intel_gpu_kernels,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:ov_intel_gpu_runtime,INTERFACE_INCLUDE_DIRECTORIES>)
$<TARGET_PROPERTY:openvino_intel_gpu_kernels,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino_intel_gpu_runtime,INTERFACE_INCLUDE_DIRECTORIES>)
if(WIN32)
target_link_libraries(${TARGET_NAME} PRIVATE setupapi)
elseif((NOT ANDROID) AND (UNIX))

View File

@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "ov_intel_myriad_plugin")
set(TARGET_NAME "openvino_intel_myriad_plugin")
file(GLOB_RECURSE SOURCES *.cpp *.hpp *.h)

View File

@@ -9,5 +9,5 @@
using namespace InferenceEngine;
using namespace vpu::MyriadPlugin;
static const Version version = {{2, 1}, CI_BUILD_NUMBER, "ov_intel_myriad_plugin"};
static const Version version = {{2, 1}, CI_BUILD_NUMBER, "openvino_intel_myriad_plugin"};
IE_DEFINE_PLUGIN_CREATE_FUNCTION(Engine, version, std::make_shared<Mvnc>())

View File

@@ -24,14 +24,14 @@ set(LINK_LIBRARIES
set(DEPENDENCIES
mock_engine
template_extension
ov_template_extension
openvino_template_extension
lptNgraphFunctions
sharedTestClasses
test_model_zoo
)
if(ENABLE_OV_IR_FRONTEND)
list(APPEND DEPENDENCIES ov_ir_frontend)
list(APPEND DEPENDENCIES openvino_ir_frontend)
endif()
if(ENABLE_IR_V7_READER)
@@ -39,15 +39,15 @@ if(ENABLE_IR_V7_READER)
endif()
if(ENABLE_HETERO)
list(APPEND DEPENDENCIES ov_hetero_plugin)
list(APPEND DEPENDENCIES openvino_hetero_plugin)
endif()
if(ENABLE_AUTO OR ENABLE_MULTI)
list(APPEND DEPENDENCIES ov_auto_plugin)
list(APPEND DEPENDENCIES openvino_auto_plugin)
endif()
if(ENABLE_AUTO_BATCH)
list(APPEND DEPENDENCIES ov_auto_batch_plugin)
list(APPEND DEPENDENCIES openvino_auto_batch_plugin)
endif()
if (NOT ENABLE_OV_ONNX_FRONTEND)
@@ -76,13 +76,13 @@ if(ENABLE_OV_ONNX_FRONTEND)
target_compile_definitions(${TARGET_NAME} PRIVATE
ENABLE_OV_ONNX_FRONTEND
ONNX_TEST_MODELS="${TEST_MODEL_ZOO}/onnx_reader/models/")
add_dependencies(${TARGET_NAME} ov_onnx_frontend)
add_dependencies(${TARGET_NAME} openvino_onnx_frontend)
endif()
if(ENABLE_OV_PADDLE_FRONTEND)
target_compile_definitions(${TARGET_NAME} PRIVATE
PADDLE_TEST_MODELS="${CMAKE_CURRENT_SOURCE_DIR}/paddle_reader/models/")
add_dependencies(${TARGET_NAME} ov_paddle_frontend)
add_dependencies(${TARGET_NAME} openvino_paddle_frontend)
endif()
ie_faster_build(${TARGET_NAME}
@@ -196,7 +196,7 @@ function(ie_headers_compilation_with_custom_flags)
target_compile_definitions(${target_name} PRIVATE $<TARGET_PROPERTY:inference_engine,INTERFACE_COMPILE_DEFINITIONS>)
if(IE_TEST_PLUGIN_API)
# ngraph, pugixml::static, openvino::itt, inference_engine_preproc headers
# ngraph, pugixml::static, openvino::itt, openvino_gapi_preproc headers
target_include_directories(${target_name} SYSTEM PRIVATE
$<TARGET_PROPERTY:pugixml::static,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino::itt,INTERFACE_INCLUDE_DIRECTORIES>)

View File

@@ -28,7 +28,7 @@ static std::string get_extension_path() {
}
static std::string get_ov_extension_path() {
return FileUtils::makePluginLibraryName<char>({}, std::string("ov_template_extension") + IE_BUILD_POSTFIX);
return FileUtils::makePluginLibraryName<char>({}, std::string("openvino_template_extension") + IE_BUILD_POSTFIX);
}
class CustomOpsSerializationTest : public ::testing::Test {

View File

@@ -174,7 +174,7 @@ public:
namespace {
std::string getOVExtensionPath() {
return FileUtils::makePluginLibraryName<char>({}, std::string("ov_template_extension") + IE_BUILD_POSTFIX);
return FileUtils::makePluginLibraryName<char>({}, std::string("openvino_template_extension") + IE_BUILD_POSTFIX);
}
} // namespace

View File

@@ -18,8 +18,8 @@ addIeTargetTest(
ADD_CPPLINT
)
if(TARGET ov_ir_frontend)
add_dependencies(${TARGET_NAME} ov_ir_frontend)
if(TARGET openvino_ir_frontend)
add_dependencies(${TARGET_NAME} openvino_ir_frontend)
endif()
ie_faster_build(${TARGET_NAME} UNITY)

View File

@@ -36,7 +36,7 @@ The target is able to take the following command-line arguments:
* `-h` prints target command-line options with description.
* `--device` specifies target device.
* `--input_folders` specifies folders with IRs to run. The separator is `,`.
* `--plugin_lib_name` is name of plugin library. The example is ov_intel_cpu_plugin. Use only with unregistered in IE Core devices.
* `--plugin_lib_name` is name of plugin library. The example is `openvino_intel_cpu_plugin`. Use only with unregistered in IE Core devices.
* `--disable_test_config` allows to ignore all skipped tests with the exception of `DISABLED_` prefix using.
* `--skip_config_path` allows to specify paths to files contain regular expressions list to skip tests. [Examples](./op_conformance_runner/skip_configs)
* `--config_path` allows to specify path to file contains plugin config. [Example](./op_conformance_runner/config/config_example.txt)

View File

@@ -13,18 +13,17 @@ namespace conformance {
inline const std::string getPluginLibNameByDevice(const std::string& deviceName) {
const std::map<std::string, std::string> devices{
{ "AUTO", "ov_multi_plugin" },
{ "HDDL", "ov_intel_hddl_plugin" },
{ "VPUX", "ov_intel_vpux_plugin" },
{ "AUTO", "ov_auto_plugin" },
{ "CPU", "ov_intel_cpu_plugin" },
{ "GNA", "ov_intel_gna_plugin" },
{ "GPU", "ov_intel_gpu_plugin" },
{ "HETERO", "ov_hetero_plugin" },
{ "BATCH", "ov_auto_batch_plugin" },
{ "MULTI", "ov_multi_plugin" },
{ "MYRIAD", "ov_intel_myriad_plugin" },
{ "TEMPLATE", "ov_template_plugin" },
{ "AUTO", "openvino_auto_plugin" },
{ "HDDL", "openvino_intel_hddl_plugin" },
{ "VPUX", "openvino_intel_vpux_plugin" },
{ "CPU", "openvino_intel_cpu_plugin" },
{ "GNA", "openvino_intel_gna_plugin" },
{ "GPU", "openvino_intel_gpu_plugin" },
{ "HETERO", "openvino_hetero_plugin" },
{ "BATCH", "openvino_auto_batch_plugin" },
{ "MULTI", "openvino_auto_plugin" },
{ "MYRIAD", "openvino_intel_myriad_plugin" },
{ "TEMPLATE", "openvino_template_plugin" },
};
if (devices.find(deviceName) == devices.end()) {
throw std::runtime_error("Incorrect device name");

View File

@@ -15,17 +15,17 @@ namespace conformance {
inline const std::string get_plugin_lib_name_by_device(const std::string& deviceName) {
const std::map<std::string, std::string> devices{
{ "AUTO", "ov_auto_plugin" },
{ "HDDL", "HDDLPlugin" },
{ "VPUX", "ov_intel_vpux_plugin" },
{ "AUTO", "ov_auto_plugin" },
{ "CPU", "ov_intel_cpu_plugin" },
{ "GNA", "ov_intel_gna_plugin" },
{ "GPU", "ov_intel_gpu_plugin" },
{ "HETERO", "ov_hetero_plugin" },
{ "MULTI", "ov_multi_plugin" },
{ "MYRIAD", "ov_intel_vpu_plugin" },
{ "TEMPLATE", "ov_template_plugin" },
{ "AUTO", "openvino_auto_plugin" },
{ "HDDL", "intel_hddl_plugin" },
{ "VPUX", "openvino_intel_vpux_plugin" },
{ "AUTO", "openvino_auto_plugin" },
{ "CPU", "openvino_intel_cpu_plugin" },
{ "GNA", "openvino_intel_gna_plugin" },
{ "GPU", "openvino_intel_gpu_plugin" },
{ "HETERO", "openvino_hetero_plugin" },
{ "MULTI", "openvino_auto_plugin" },
{ "MYRIAD", "openvino_intel_myriad_plugin" },
{ "TEMPLATE", "openvino_template_plugin" },
};
if (devices.find(deviceName) == devices.end()) {
throw std::runtime_error("Incorrect device name");

View File

@@ -22,7 +22,7 @@ static const char target_device_message[] = "Required. Specify the target device
"The application looks for a suitable plugin for the specified device.";
static const char input_folders_message[] = "Required. Paths to the input folders with IRs. Delimiter is `,` symbol.";
static const char target_plugin_message[] =
"Optional. Name of plugin library. The example is ov_intel_cpu_plugin. Use only with unregistered in IE Core devices";
"Optional. Name of plugin library. The example is `openvino_intel_cpu_plugin`. Use only with unregistered in IE Core devices";
static const char output_folder_message[] = "Optional. Paths to the output folder to save report. Default value is \".\"";
static const char report_unique_name_message[] = "Optional. Allow to save report with unique name (report_pid_timestamp.xml). "
"Mutually exclusive with --extend_report. Default value is false";

View File

@@ -4,12 +4,12 @@
set(TARGET_NAME cpuFuncTests)
add_library(cpuSpecificRtInfo STATIC $<TARGET_PROPERTY:ov_intel_cpu_plugin,SOURCE_DIR>/src/utils/rt_info/memory_formats_attribute.hpp
$<TARGET_PROPERTY:ov_intel_cpu_plugin,SOURCE_DIR>/src/utils/rt_info/memory_formats_attribute.cpp)
add_library(cpuSpecificRtInfo STATIC $<TARGET_PROPERTY:openvino_intel_cpu_plugin,SOURCE_DIR>/src/utils/rt_info/memory_formats_attribute.hpp
$<TARGET_PROPERTY:openvino_intel_cpu_plugin,SOURCE_DIR>/src/utils/rt_info/memory_formats_attribute.cpp)
target_link_libraries(cpuSpecificRtInfo PRIVATE openvino::runtime)
set(INCLUDES ${CMAKE_CURRENT_SOURCE_DIR} $<TARGET_PROPERTY:ov_intel_cpu_plugin,SOURCE_DIR>/src)
set(DEPENDENCIES ov_intel_cpu_plugin)
set(INCLUDES ${CMAKE_CURRENT_SOURCE_DIR} $<TARGET_PROPERTY:openvino_intel_cpu_plugin,SOURCE_DIR>/src)
set(DEPENDENCIES openvino_intel_cpu_plugin)
set(LINK_LIBRARIES funcSharedTests cpuSpecificRtInfo)
if (ENABLE_OV_ONNX_FRONTEND)
list(APPEND INCLUDES "${OpenVINO_SOURCE_DIR}/docs/onnx_custom_op")

View File

@@ -15,7 +15,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(
smoke_OVClassCommon, OVClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_cpu_plugin", "CPU")));
::testing::Values(std::make_pair("openvino_intel_cpu_plugin", "CPU")));
INSTANTIATE_TEST_SUITE_P(
smoke_OVClassNetworkTestP, OVClassNetworkTestP,

View File

@@ -15,7 +15,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(
smoke_IEClassCommon, IEClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_cpu_plugin", "CPU")));
::testing::Values(std::make_pair("openvino_intel_cpu_plugin", "CPU")));
INSTANTIATE_TEST_SUITE_P(
smoke_IEClassNetworkTestP, IEClassNetworkTestP,

View File

@@ -16,13 +16,13 @@ using namespace HeteroTests;
INSTANTIATE_TEST_SUITE_P(smoke_SingleMajorNode, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"CPU0", "ov_intel_cpu_plugin"}, {"CPU1", "ov_intel_cpu_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"CPU0", "openvino_intel_cpu_plugin"}, {"CPU1", "openvino_intel_cpu_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_singleMajorNodeFunctions)),
HeteroSyntheticTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_RandomMajorNodes, HeteroSyntheticTest,
::testing::Combine(
::testing::Values(std::vector<PluginParameter>{{"CPU0", "ov_intel_cpu_plugin"}, {"CPU1", "ov_intel_cpu_plugin"}}),
::testing::Values(std::vector<PluginParameter>{{"CPU0", "openvino_intel_cpu_plugin"}, {"CPU1", "openvino_intel_cpu_plugin"}}),
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_randomMajorNodeFunctions)),
HeteroSyntheticTest::getTestCaseName);

View File

@@ -8,7 +8,7 @@ addIeTargetTest(
NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
ov_intel_gna_plugin
openvino_intel_gna_plugin
LINK_LIBRARIES
funcSharedTests
ADD_CPPLINT

View File

@@ -17,7 +17,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(nightly_OVClassBasicTestP,
OVClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_gna_plugin", "GNA")));
::testing::Values(std::make_pair("openvino_intel_gna_plugin", "GNA")));
// TODO
INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_OVClassNetworkTestP, OVClassNetworkTestP, ::testing::Values("GNA"));

View File

@@ -15,7 +15,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(
nightly_IEClassBasicTestP, IEClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_gna_plugin", "GNA")));
::testing::Values(std::make_pair("openvino_intel_gna_plugin", "GNA")));
// TODO
INSTANTIATE_TEST_SUITE_P(

View File

@@ -12,7 +12,7 @@ addIeTargetTest(
INCLUDES
${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
ov_intel_gpu_plugin
openvino_intel_gpu_plugin
LINK_LIBRARIES
funcSharedTests
OpenCL

View File

@@ -23,7 +23,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(nightly_OVClassCommon,
OVClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_gpu_plugin", "GPU")));
::testing::Values(std::make_pair("openvino_intel_gpu_plugin", "GPU")));
INSTANTIATE_TEST_SUITE_P(nightly_OVClassNetworkTestP, OVClassNetworkTestP, ::testing::Values("GPU"));

View File

@@ -21,7 +21,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(
nightly_IEClassCommon, IEClassBasicTestP,
::testing::Values(std::make_pair("ov_intel_gpu_plugin", "GPU"))
::testing::Values(std::make_pair("openvino_intel_gpu_plugin", "GPU"))
);
INSTANTIATE_TEST_SUITE_P(

View File

@@ -17,7 +17,7 @@ addIeTargetTest(
${XLINK_INCLUDE}
${XLINK_PLATFORM_INCLUDE}
DEPENDENCIES
ov_intel_myriad_plugin
openvino_intel_myriad_plugin
LINK_LIBRARIES
vpu_graph_transformer
vpu_common_lib

Some files were not shown because too many files have changed in this diff Show More