From bd29f64570ebbdf5e7e3624c2b3c7c6985c7bdbb Mon Sep 17 00:00:00 2001 From: Ilya Lavrenov Date: Thu, 23 Sep 2021 10:41:33 +0300 Subject: [PATCH] Build IR FE with plugins (#7593) * Build IR FE with plugins * Add paddlepaddle --- cmake/developer_package/plugins/plugins.cmake | 4 ++-- docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md | 10 ++++++---- docs/IE_DG/inference_engine_intro.md | 10 ++++++---- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/cmake/developer_package/plugins/plugins.cmake b/cmake/developer_package/plugins/plugins.cmake index 3f83954cfa7..c99ade034b1 100644 --- a/cmake/developer_package/plugins/plugins.cmake +++ b/cmake/developer_package/plugins/plugins.cmake @@ -89,8 +89,8 @@ function(ie_add_plugin) # fake dependencies to build in the following order: # IE -> IE readers -> IE inference plugins -> IE-based apps - if(TARGET inference_engine_ir_reader) - add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_reader) + if(TARGET ir_ngraph_frontend) + add_dependencies(${IE_PLUGIN_NAME} ir_ngraph_frontend) endif() if(TARGET inference_engine_ir_v7_reader) add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_v7_reader) diff --git a/docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md b/docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md index e2960c5dd87..2b30083f133 100644 --- a/docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md +++ b/docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md @@ -38,12 +38,14 @@ This library contains the classes to: ### Plugin Libraries to Read a Network Object -Starting from 2020.4 release, Inference Engine introduced a concept of `CNNNetwork` reader plugins. Such plugins can be automatically dynamically loaded by Inference Engine in runtime depending on file format: +Starting from 2022.1 release, OpenVINO Runtime introduced a concept of frontend plugins. Such plugins can be automatically dynamically loaded by OpenVINO Runtime dynamically depending on file format: * Linux* OS: - - `libinference_engine_ir_reader.so` to read a network from IR - - `onnx_ngraph_frontend.so` to read a network from ONNX model format + - `libir_ngraph_frontend.so` to read a network from IR + - `libpaddlepaddle_ngraph_frontend.so` to read a network from PaddlePaddle model format + - `libonnx_ngraph_frontend.so` to read a network from ONNX model format * Windows* OS: - - `inference_engine_ir_reader.dll` to read a network from IR + - `ir_ngraph_frontend.dll` to read a network from IR + - `paddlepaddle_ngraph_frontend.dll` to read a network from PaddlePaddle model format - `onnx_ngraph_frontend.dll` to read a network from ONNX model format ### Device-Specific Plugin Libraries diff --git a/docs/IE_DG/inference_engine_intro.md b/docs/IE_DG/inference_engine_intro.md index 3ad44b99144..6739c7693a8 100644 --- a/docs/IE_DG/inference_engine_intro.md +++ b/docs/IE_DG/inference_engine_intro.md @@ -43,12 +43,14 @@ This library contains the classes to: ### Plugin Libraries to read a network object ### -Starting from 2020.4 release, Inference Engine introduced a concept of `CNNNetwork` reader plugins. Such plugins can be automatically dynamically loaded by Inference Engine in runtime depending on file format: +Starting from 2022.1 release, OpenVINO Runtime introduced a concept of frontend plugins. Such plugins can be automatically dynamically loaded by OpenVINO Runtime dynamically depending on file format: * Unix* OS: - - `libinference_engine_ir_reader.so` to read a network from IR - - `onnx_ngraph_frontend.so` to read a network from ONNX model format + - `libir_ngraph_frontend.so` to read a network from IR + - `libpaddlepaddle_ngraph_frontend.so` to read a network from PaddlePaddle model format + - `libonnx_ngraph_frontend.so` to read a network from ONNX model format * Windows* OS: - - `inference_engine_ir_reader.dll` to read a network from IR + - `ir_ngraph_frontend.dll` to read a network from IR + - `paddlepaddle_ngraph_frontend.dll` to read a network from PaddlePaddle model format - `onnx_ngraph_frontend.dll` to read a network from ONNX model format ### Device-specific Plugin Libraries ###