[GPU] Move plugin to new source dir (#8890)

This commit is contained in:
Vladimir Paramuzov 2021-11-30 17:26:09 +03:00 committed by GitHub
parent 9b6b184e5e
commit 9b97619687
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
99 changed files with 42 additions and 44 deletions

View File

@ -98,7 +98,7 @@ jobs:
-DENABLE_CPPLINT=OFF
-DENABLE_TESTS=OFF
-DENABLE_MKL_DNN=ON
-DENABLE_CLDNN=OFF
-DENABLE_INTEL_GPU=OFF
-DENABLE_PROFILING_ITT=OFF
-DENABLE_SAMPLES=OFF
-DNGRAPH_ONNX_FRONTEND_ENABLE=ON

View File

@ -132,7 +132,7 @@ jobs:
- script: |
set PATH=$(WORK_DIR)\ninja-win;%PATH%
call "$(MSVS_VARS_PATH)" && $(CMAKE_CMD) -G "Ninja Multi-Config" -DENABLE_ONEDNN_FOR_GPU=OFF -DENABLE_GNA=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_CLDNN=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_GAPI_PREPROCESSING=$(CMAKE_BUILD_SHARED_LIBS) -DBUILD_SHARED_LIBS=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_REQUIREMENTS_INSTALL=OFF -DENABLE_FASTER_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_TESTS=ON -DENABLE_STRICT_DEPENDENCIES=OFF -DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE="C:\hostedtoolcache\windows\Python\3.7.6\x64\python.exe" -DPYTHON_INCLUDE_DIR="C:\hostedtoolcache\windows\Python\3.7.6\x64\include" -DPYTHON_LIBRARY="C:\hostedtoolcache\windows\Python\3.7.6\x64\libs\python37.lib" -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)\modules -DCMAKE_C_COMPILER:PATH="$(MSVC_COMPILER_PATH)" -DCMAKE_CXX_COMPILER:PATH="$(MSVC_COMPILER_PATH)" $(REPO_DIR)
call "$(MSVS_VARS_PATH)" && $(CMAKE_CMD) -G "Ninja Multi-Config" -DENABLE_ONEDNN_FOR_GPU=OFF -DENABLE_GNA=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_INTEL_GPU=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_GAPI_PREPROCESSING=$(CMAKE_BUILD_SHARED_LIBS) -DBUILD_SHARED_LIBS=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_REQUIREMENTS_INSTALL=OFF -DENABLE_FASTER_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_TESTS=ON -DENABLE_STRICT_DEPENDENCIES=OFF -DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE="C:\hostedtoolcache\windows\Python\3.7.6\x64\python.exe" -DPYTHON_INCLUDE_DIR="C:\hostedtoolcache\windows\Python\3.7.6\x64\include" -DPYTHON_LIBRARY="C:\hostedtoolcache\windows\Python\3.7.6\x64\libs\python37.lib" -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)\modules -DCMAKE_C_COMPILER:PATH="$(MSVC_COMPILER_PATH)" -DCMAKE_CXX_COMPILER:PATH="$(MSVC_COMPILER_PATH)" $(REPO_DIR)
workingDirectory: $(BUILD_DIR)
displayName: 'CMake'

View File

@ -61,7 +61,7 @@ RUN cmake .. \
-DENABLE_CPPLINT=OFF \
-DENABLE_TESTS=OFF \
-DENABLE_MKL_DNN=ON \
-DENABLE_CLDNN=OFF \
-DENABLE_INTEL_GPU=OFF \
-DENABLE_PROFILING_ITT=OFF \
-DENABLE_SAMPLES=OFF \
-DENABLE_PYTHON=ON \

View File

@ -30,11 +30,11 @@ azure-pipelines.yml @openvinotoolkit/openvino-admins
/inference-engine/thirdparty/mkl-dnn/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers
# IE GPU:
/inference-engine/src/cldnn_engine/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
/src/inference/include/ie/gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
/src/inference/include/ie/cldnn/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
/src/inference/include/openvino/runtime/gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
/inference-engine/thirdparty/clDNN/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
/src/plugins/intel_gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers
# IE VPU:
/inference-engine/src/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers

View File

@ -65,10 +65,10 @@ if(ENABLE_MKL_DNN)
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
endif()
if(ENABLE_CLDNN)
ie_coverage_extract(INPUT "openvino" OUTPUT "cldnn_engine"
PATTERNS "${OV_COVERAGE_BASE_DIRECTORY}/inference-engine/src/cldnn_engine/*")
ie_coverage_genhtml(INFO_FILE "cldnn_engine"
if (ENABLE_INTEL_GPU)
ie_coverage_extract(INPUT "openvino" OUTPUT "intel_gpu_plugin"
PATTERNS "${OV_COVERAGE_BASE_DIRECTORY}/src/plugins/intel_gpu/*")
ie_coverage_genhtml(INFO_FILE "intel_gpu_plugin"
PREFIX "${OV_COVERAGE_BASE_DIRECTORY}")
endif()

View File

@ -13,6 +13,7 @@ ie_option (ENABLE_TESTS "unit, behavior and functional tests" OFF)
ie_option (ENABLE_STRICT_DEPENDENCIES "Skip configuring \"convinient\" dependencies for efficient parallel builds" ON)
ie_dependent_option (ENABLE_CLDNN "clDnn based plugin for inference engine" ON "X86_64;NOT APPLE;NOT MINGW;NOT WINDOWS_STORE;NOT WINDOWS_PHONE" OFF)
ie_dependent_option (ENABLE_INTEL_GPU "GPU plugin for inference engine on Intel GPU" ON "ENABLE_CLDNN" OFF)
if (NOT ENABLE_CLDNN OR ANDROID OR
(CMAKE_COMPILER_IS_GNUCXX AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.0))
@ -55,8 +56,6 @@ ie_dependent_option (ENABLE_DOCS "Build docs using Doxygen" OFF "PYTHONINTERP_FO
ie_dependent_option (ENABLE_GNA "GNA support for inference engine" ON "NOT APPLE;NOT ANDROID;X86_64" OFF)
ie_dependent_option (ENABLE_CLDNN_TESTS "Enable clDNN unit tests" OFF "ENABLE_CLDNN" OFF)
# "MKL-DNN library based on OMP or TBB or Sequential implementation: TBB|OMP|SEQ"
if(X86 OR ARM OR (MSVC AND (ARM OR AARCH64)) )
set(THREADING_DEFAULT "SEQ")
@ -197,8 +196,8 @@ if (ENABLE_MYRIAD_NO_BOOT AND ENABLE_MYRIAD )
add_definitions(-DENABLE_MYRIAD_NO_BOOT=1)
endif()
if (ENABLE_CLDNN)
add_definitions(-DENABLE_CLDNN=1)
if (ENABLE_INTEL_GPU)
add_definitions(-DENABLE_INTEL_GPU=1)
endif()
if (ENABLE_MKL_DNN)

View File

@ -3,7 +3,7 @@ GPU Plugin {#openvino_docs_IE_DG_supported_plugins_GPU}
The GPU plugin uses the Intel® Compute Library for Deep Neural Networks (clDNN) to infer deep neural networks.
clDNN is an open source performance library for Deep Learning (DL) applications intended for acceleration of Deep Learning Inference on Intel® Processor Graphics including Intel® HD Graphics, Intel® Iris® Graphics, Intel® Iris® Xe Graphics, and Intel® Iris® Xe MAX graphics.
For an in-depth description of clDNN, see [Inference Engine source files](https://github.com/openvinotoolkit/openvino/tree/master/inference-engine/src/cldnn_engine) and [Accelerate Deep Learning Inference with Intel® Processor Graphics](https://software.intel.com/en-us/articles/accelerating-deep-learning-inference-with-intel-processor-graphics).
For an in-depth description of clDNN, see [Inference Engine source files](https://github.com/openvinotoolkit/openvino/tree/master/src/plugins/intel_gpu/) and [Accelerate Deep Learning Inference with Intel® Processor Graphics](https://software.intel.com/en-us/articles/accelerating-deep-learning-inference-with-intel-processor-graphics).
## Device Naming Convention
* Devices are enumerated as "GPU.X" where `X={0, 1, 2,...}`. Only Intel® GPU devices are considered.

View File

@ -13,10 +13,6 @@ if(ENABLE_MKL_DNN)
add_subdirectory(mkldnn_plugin)
endif()
if(ENABLE_CLDNN)
add_subdirectory(cldnn_engine)
endif()
if(ENABLE_VPU)
add_subdirectory(vpu)
endif()

View File

@ -8,7 +8,7 @@ if (ENABLE_MKL_DNN)
add_subdirectory(cpu)
endif()
if (ENABLE_CLDNN)
if (ENABLE_INTEL_GPU)
add_subdirectory(gpu)
endif()

View File

@ -5,7 +5,7 @@
set(VPU_DEPENDENCIES
vpu_copy_firmware)
if (ENABLE_CLDNN)
if (ENABLE_INTEL_GPU)
list(APPEND VPU_DEPENDENCIES clDNNPlugin)
if(ENABLE_HETERO)
list(APPEND VPU_DEPENDENCIES ov_hetero_plugin)

View File

@ -17,8 +17,8 @@ if(ENABLE_LTO)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE ON)
endif()
if (ENABLE_CLDNN)
if (ENABLE_TESTS AND ENABLE_CLDNN_TESTS)
if (ENABLE_INTEL_GPU)
if (ENABLE_TESTS)
set(CLDNN__INCLUDE_TESTS ON CACHE BOOL "" FORCE)
else()
set(CLDNN__INCLUDE_TESTS OFF CACHE BOOL "" FORCE)

View File

@ -30,7 +30,7 @@ if(ENABLE_MKL_DNN)
add_dependencies(${TARGET_NAME} MKLDNNPlugin)
endif()
if(ENABLE_CLDNN)
if(ENABLE_INTEL_GPU)
add_dependencies(${TARGET_NAME} clDNNPlugin)
endif()

View File

@ -44,7 +44,7 @@ set the mentioned flags to `ON`. Note the `CMAKE_INSTALL_PREFIX`, which defaults
cd openvino/build
cmake .. \
-DENABLE_CLDNN=OFF \
-DENABLE_INTEL_GPU=OFF \
-DENABLE_OPENCV=OFF \
-DENABLE_VPU=OFF \
-DENABLE_PYTHON=ON \
@ -107,7 +107,7 @@ cmake .. ^
-G"Visual Studio 16 2019" ^
-DCMAKE_BUILD_TYPE=Release ^
-DCMAKE_INSTALL_PREFIX="%OPENVINO_BASEDIR%/openvino_dist" ^
-DENABLE_CLDNN=OFF ^
-DENABLE_INTEL_GPU=OFF ^
-DENABLE_OPENCV=OFF ^
-DENABLE_VPU=OFF ^
-DNGRAPH_ONNX_FRONTEND_ENABLE=ON ^

View File

@ -455,7 +455,7 @@ if (ENABLE_MKL_DNN AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
endif()
endif()
if (ENABLE_CLDNN AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
if (ENABLE_INTEL_GPU AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
message(STATUS "NGRAPH_TESTS: IE:GPU enabled")
set(ACTIVE_BACKEND_LIST ${ACTIVE_BACKEND_LIST} "IE:GPU")
if (ENABLE_STRICT_DEPENDENCIES)

View File

@ -5,3 +5,7 @@
if(ENABLE_HETERO)
add_subdirectory(hetero)
endif()
if(ENABLE_INTEL_GPU)
add_subdirectory(intel_gpu)
endif()

View File

@ -16,15 +16,14 @@ if(ENABLE_GPU_DEBUG_CAPS)
add_definitions(-DGPU_DEBUG_CONFIG=1)
endif()
file(GLOB_RECURSE MAIN_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.h)
file(GLOB_RECURSE PLUGIN_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/src/plugin/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/include/intel_gpu/plugin/*.h)
addVersionDefines(cldnn_engine.cpp CI_BUILD_NUMBER CLDNN_VERSION)
addVersionDefines(src/plugin/cldnn_engine.cpp CI_BUILD_NUMBER CLDNN_VERSION)
ie_add_plugin(NAME ${TARGET_NAME}
DEVICE_NAME "GPU"
SOURCES ${MAIN_SRC} ${LIBRARY_HEADERS}
VERSION_DEFINES_FOR cldnn_engine.cpp)
SOURCES ${PLUGIN_SOURCES}
VERSION_DEFINES_FOR src/plugin/cldnn_engine.cpp)
target_compile_options(${TARGET_NAME} PRIVATE
$<$<CONFIG:Release>:$<IF:$<CXX_COMPILER_ID:MSVC>,/Os,-Os>>)
@ -35,7 +34,7 @@ target_link_libraries(${TARGET_NAME} PRIVATE clDNN_lib pugixml::static
ngraph)
target_include_directories(${TARGET_NAME} PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/include/intel_gpu/plugin/
$<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>
${clDNN_SOURCE_DIR}/api)

View File

@ -14,7 +14,7 @@ add_subdirectory(ittapi)
add_subdirectory(itt_collector EXCLUDE_FROM_ALL)
add_subdirectory(zlib EXCLUDE_FROM_ALL)
add_subdirectory(cnpy EXCLUDE_FROM_ALL)
if(ENABLE_CLDNN)
if(ENABLE_INTEL_GPU)
add_subdirectory(ocl)
endif()