diff --git a/cmake/developer_package/ncc_naming_style/openvino.style b/cmake/developer_package/ncc_naming_style/openvino.style
index c5fa45b2199..4f9df1dfebd 100644
--- a/cmake/developer_package/ncc_naming_style/openvino.style
+++ b/cmake/developer_package/ncc_naming_style/openvino.style
@@ -1,8 +1,6 @@
# custom OpenVINO values
CppMethod: '^(operator\W+|[a-z_\d]+|signaling_NaN|quiet_NaN)$'
-# TODO: remove stopwatch|unsupported_op
ClassName: '^([A-Z][\w]+|b?float16|numeric_limits|ngraph_error|stopwatch|unsupported_op)$'
-# TODO: remove oi_pair
StructName: '^([A-Z][\w]+|element_type_traits|hash|oi_pair)$'
FunctionName: '^(operator\W+|[a-z_\d]+)$'
Namespace: '^([a-z\d_]+|InferenceEngine)$'
@@ -20,11 +18,11 @@ VariableReference: '^\w+$'
EnumName: '^[A-Z][\w]+$'
# excepts element_type
-# TODO: Fix interpolate
EnumConstantName: '^([A-Z\d_]+|undefined|dynamic|boolean|bf16|f16|f32|f64|i4|i8|i16|i32|i64|u1|u4|u8|u16|u32|u64|asymmetric|align_corners|round_prefer_floor|round_prefer_ceil|floor|ceil|simple|nearest|linear|linear_onnx|cubic|area|scales|sizes|half_pixel|tf_half_pixel_for_nn|pytorch_half_pixel|asymetric)$'
# TODO: align
UsingDeclaration: '^.*$'
TypedefName: '^.*$'
+CxxDynamicCastExpression: '^.*$'
# not needed values
ClassTemplatePartialSpecialization: 'XXXX'
@@ -65,7 +63,6 @@ StatementExpression: 'XXXX'
GenericSelectionExpression: 'XXXX'
GnuNullExpression: 'XXXX'
CxxStaticCastExpression: '^.*$'
-CxxDynamicCastExpression: 'XXXX'
CxxReinterpretCastExpression: '^.*$'
CxxConstCastExpression: 'XXXX'
CxxFunctionalCastExpression: '^.*$'
diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt
index 95b8a3f2359..93d1feed405 100644
--- a/docs/CMakeLists.txt
+++ b/docs/CMakeLists.txt
@@ -36,7 +36,7 @@ if(NOT ENABLE_DOCKER)
# install
- install(TARGETS templatePlugin template_extension
+ install(TARGETS templatePlugin template_extension template_ov_extension
LIBRARY DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests EXCLUDE_FROM_ALL)
endif()
diff --git a/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md b/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md
index ed4d6559532..8b3d50f6660 100644
--- a/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md
+++ b/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md
@@ -20,7 +20,7 @@ To add your custom nGraph operation, create a new class that extends `ngraph::Op
Based on that, declaration of an operation class can look as follows:
-@snippet template_extension/op.hpp op:header
+@snippet template_extension/old/op.hpp op:header
### Class Fields
@@ -35,37 +35,37 @@ nGraph operation contains two constructors:
* Default constructor, which enables you to create an operation without attributes
* Constructor that creates and validates an operation with specified inputs and attributes
-@snippet template_extension/op.cpp op:ctor
+@snippet template_extension/old/op.cpp op:ctor
### `validate_and_infer_types()`
`ngraph::Node::validate_and_infer_types` method validates operation attributes and calculates output shapes using attributes of the operation.
-@snippet template_extension/op.cpp op:validate
+@snippet template_extension/old/op.cpp op:validate
### `clone_with_new_inputs()`
`ngraph::Node::clone_with_new_inputs` method creates a copy of the nGraph operation with new inputs.
-@snippet template_extension/op.cpp op:copy
+@snippet template_extension/old/op.cpp op:copy
### `visit_attributes()`
`ngraph::Node::visit_attributes` method enables you to visit all operation attributes.
-@snippet template_extension/op.cpp op:visit_attributes
+@snippet template_extension/old/op.cpp op:visit_attributes
### `evaluate()` and `has_evaluate()`
`ngraph::Node::evaluate` method enables you to apply constant folding to an operation.
-@snippet template_extension/op.cpp op:evaluate
+@snippet template_extension/old/op.cpp op:evaluate
## Register Custom Operations in Extension Class
To add custom operations to the [Extension](Extension.md) class, create an operation set with custom operations and implement the `InferenceEngine::IExtension::getOpSets` method:
-@snippet template_extension/extension.cpp extension:getOpSets
+@snippet template_extension/old/extension.cpp extension:getOpSets
This method returns a map of opsets that exist in the extension library.
diff --git a/docs/IE_DG/Extensibility_DG/Building.md b/docs/IE_DG/Extensibility_DG/Building.md
index be93c5a06d3..b1435914ccc 100644
--- a/docs/IE_DG/Extensibility_DG/Building.md
+++ b/docs/IE_DG/Extensibility_DG/Building.md
@@ -4,14 +4,14 @@ Inference Engine build infrastructure provides the Inference Engine Package for
To build an extension library, use the following CMake script:
-@snippet template_extension/CMakeLists.txt cmake:extension
+@snippet template_extension/old/CMakeLists.txt cmake:extension
This CMake script finds the Inference Engine and nGraph using the `find_package` CMake command.
To build an extension library, run the commands below:
```sh
-$ cd template_extension
+$ cd template_extension/old
$ mkdir build
$ cd build
$ cmake -DOpenVINO_DIR=[OpenVINO_DIR] ../
diff --git a/docs/IE_DG/Extensibility_DG/CPU_Kernel.md b/docs/IE_DG/Extensibility_DG/CPU_Kernel.md
index 923bcc36bc2..09f1838ca88 100644
--- a/docs/IE_DG/Extensibility_DG/CPU_Kernel.md
+++ b/docs/IE_DG/Extensibility_DG/CPU_Kernel.md
@@ -7,7 +7,7 @@ The primary means of the performance of the CPU codepath in the Inference Engine
All custom kernels for the CPU plugin should be inherited from the InferenceEngine::ILayerExecImpl interface.
Based on that, declaration of a kernel implementation class can look as follows:
-@snippet template_extension/cpu_kernel.hpp cpu_implementation:header
+@snippet template_extension/old/cpu_kernel.hpp cpu_implementation:header
### Class Fields
@@ -22,25 +22,25 @@ The provided implementation has several fields:
An implementation constructor checks parameters of an nGraph operation, stores required attributes, and stores an error message in the case of an error.
-@snippet template_extension/cpu_kernel.cpp cpu_implementation:ctor
+@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:ctor
### `getSupportedConfigurations`
InferenceEngine::ILayerExecImpl::getSupportedConfigurations method returns all supported configuration formats (input/output tensor layouts) for your implementation. To specify formats of data, use InferenceEngine::TensorDesc. Refer to the [Memory Primitives](../Memory_primitives.md) section for instructions.
-@snippet template_extension/cpu_kernel.cpp cpu_implementation:getSupportedConfigurations
+@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:getSupportedConfigurations
### `init`
InferenceEngine::ILayerExecImpl::init method gets a runtime-selected configuration from a vector that is populated from the `getSupportedConfigurations` method and checks the parameters:
-@snippet template_extension/cpu_kernel.cpp cpu_implementation:init
+@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:init
### `execute`
InferenceEngine::ILayerExecImpl::execute method accepts and processes the actual tenors as input/output blobs:
-@snippet template_extension/cpu_kernel.cpp cpu_implementation:execute
+@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:execute
## Register Implementation in `Extension` Class
@@ -52,13 +52,13 @@ To register custom kernel implementation in the [Extension](Extension.md) class,
InferenceEngine::IExtension::getImplTypes returns a vector of implementation types for an operation.
-@snippet template_extension/extension.cpp extension:getImplTypes
+@snippet template_extension/old/extension.cpp extension:getImplTypes
### getImplementation
InferenceEngine::IExtension::getImplementation returns the kernel implementation with a specified type for an operation.
-@snippet template_extension/extension.cpp extension:getImplementation
+@snippet template_extension/old/extension.cpp extension:getImplementation
## Load Extension with Executable Kernels to Plugin
diff --git a/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md b/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md
index eb7183f0dc2..dd554320241 100644
--- a/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md
+++ b/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md
@@ -39,12 +39,12 @@ If you do not need an operator anymore, unregister it by calling `unregister_ope
The same principles apply when registering a custom ONNX operator based on custom nGraph operations.
This example shows how to register a custom ONNX operator based on `Operation` presented in [this tutorial](AddingNGraphOps.md), which is used in [TemplateExtension](Extension.md).
-@snippet template_extension/extension.cpp extension:ctor
+@snippet template_extension/old/extension.cpp extension:ctor
Here, the `register_operator` function is called in the constructor of Extension. The constructor makes sure that the function is called before InferenceEngine::Core::ReadNetwork, because InferenceEngine::Core::AddExtension must be called before a model with a custom operator is read.
The example below demonstrates how to unregister an operator from the destructor of Extension:
-@snippet template_extension/extension.cpp extension:dtor
+@snippet template_extension/old/extension.cpp extension:dtor
> **REQUIRED**: It is mandatory to unregister a custom ONNX operator if it is defined in a dynamic shared library.
diff --git a/docs/IE_DG/Extensibility_DG/Extension.md b/docs/IE_DG/Extensibility_DG/Extension.md
index e941cb9c13c..a8394ecb93d 100644
--- a/docs/IE_DG/Extensibility_DG/Extension.md
+++ b/docs/IE_DG/Extensibility_DG/Extension.md
@@ -8,11 +8,11 @@ used as an example in this document and `FFT` used as a more complex example fro
Based on that, the declaration of an extension class can look as follows:
-@snippet template_extension/extension.hpp extension:header
+@snippet template_extension/old/extension.hpp extension:header
The extension library should contain and export the InferenceEngine::CreateExtension method, which creates an `Extension` class:
-@snippet template_extension/extension.cpp extension:CreateExtension
+@snippet template_extension/old/extension.cpp extension:CreateExtension
Also, an `Extension` object should implement the following methods:
@@ -20,7 +20,7 @@ Also, an `Extension` object should implement the following methods:
* InferenceEngine::IExtension::GetVersion returns information about the version of the library.
-@snippet template_extension/extension.cpp extension:GetVersion
+@snippet template_extension/old/extension.cpp extension:GetVersion
Implement the InferenceEngine::IExtension::getOpSets method if the extension contains custom layers.
Read [Custom nGraph Operation](AddingNGraphOps.md) for more information.
diff --git a/docs/template_extension/CMakeLists.txt b/docs/template_extension/CMakeLists.txt
index 90a9e886b35..7296d14328f 100644
--- a/docs/template_extension/CMakeLists.txt
+++ b/docs/template_extension/CMakeLists.txt
@@ -2,36 +2,5 @@
# SPDX-License-Identifier: Apache-2.0
#
-# [cmake:extension]
-set(CMAKE_CXX_STANDARD 11)
-
-set(TARGET_NAME "template_extension")
-
-find_package(OpenVINO REQUIRED COMPONENTS Runtime OPTIONAL_COMPONENTS ONNX)
-find_package(OpenCV QUIET COMPONENTS core)
-
-set(SRC cpu_kernel.cpp extension.cpp op.cpp)
-
-if(OpenCV_FOUND)
- set(SRC ${SRC} fft_kernel.cpp fft_op.cpp)
-endif()
-
-add_library(${TARGET_NAME} MODULE ${SRC})
-
-if(OpenCV_FOUND)
- target_compile_definitions(${TARGET_NAME} PRIVATE OPENCV_IMPORT_ENABLED)
- target_link_libraries(${TARGET_NAME} PRIVATE opencv_core)
-endif()
-
-target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_EXTENSION_API)
-target_link_libraries(${TARGET_NAME} PRIVATE openvino::core openvino::runtime)
-
-if(OpenVINO_Frontend_ONNX_FOUND)
- target_link_libraries(${TARGET_NAME} PRIVATE openvino::frontend::onnx)
- target_compile_definitions(${TARGET_NAME} PRIVATE OPENVINO_ONNX_FRONTEND_ENABLED)
-endif()
-# [cmake:extension]
-
-# Enable code style check
-file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
-add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src})
+add_subdirectory(old)
+add_subdirectory(new)
diff --git a/docs/template_extension/new/CMakeLists.txt b/docs/template_extension/new/CMakeLists.txt
new file mode 100644
index 00000000000..7229eba8def
--- /dev/null
+++ b/docs/template_extension/new/CMakeLists.txt
@@ -0,0 +1,22 @@
+# Copyright (C) 2018-2021 Intel Corporation
+# SPDX-License-Identifier: Apache-2.0
+#
+
+# [cmake:extension]
+set(CMAKE_CXX_STANDARD 11)
+
+set(TARGET_NAME "template_ov_extension")
+
+find_package(OpenVINO)
+
+set(SRC identity.cpp ov_extension.cpp)
+
+add_library(${TARGET_NAME} MODULE ${SRC})
+
+target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_OPENVINO_EXTENSION_API)
+target_link_libraries(${TARGET_NAME} PRIVATE openvino::core)
+# [cmake:extension]
+
+# Enable code style check
+file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
+add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src})
diff --git a/docs/template_extension/new/identity.cpp b/docs/template_extension/new/identity.cpp
new file mode 100644
index 00000000000..01488fbdc42
--- /dev/null
+++ b/docs/template_extension/new/identity.cpp
@@ -0,0 +1,48 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "identity.hpp"
+
+using namespace TemplateExtension;
+
+//! [op:ctor]
+Identity::Identity(const ov::Output& arg) : Op({arg}) {
+ constructor_validate_and_infer_types();
+}
+//! [op:ctor]
+
+//! [op:validate]
+void Identity::validate_and_infer_types() {
+ // Operation doesn't change shapes end element type
+ set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
+}
+//! [op:validate]
+
+//! [op:copy]
+std::shared_ptr Identity::clone_with_new_inputs(const ov::OutputVector& new_args) const {
+ OPENVINO_ASSERT(new_args.size() != 1, "Incorrect number of new arguments");
+
+ return std::make_shared(new_args.at(0));
+}
+//! [op:copy]
+
+//! [op:visit_attributes]
+bool Identity::visit_attributes(ov::AttributeVisitor& visitor) {
+ return true;
+}
+//! [op:visit_attributes]
+
+//! [op:evaluate]
+bool Identity::evaluate(ov::runtime::TensorVector& outputs, const ov::runtime::TensorVector& inputs) const {
+ auto in = inputs[0];
+ auto out = outputs[0];
+ out.set_shape(in.get_shape());
+ memcpy(out.data(), in.data(), in.get_size());
+ return true;
+}
+
+bool Identity::has_evaluate() const {
+ return true;
+}
+//! [op:evaluate]
diff --git a/docs/template_extension/new/identity.hpp b/docs/template_extension/new/identity.hpp
new file mode 100644
index 00000000000..db08b0514ba
--- /dev/null
+++ b/docs/template_extension/new/identity.hpp
@@ -0,0 +1,27 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#pragma once
+
+#include
+
+//! [op:header]
+namespace TemplateExtension {
+
+class Identity : public ov::op::Op {
+public:
+ OPENVINO_OP("Identity");
+
+ Identity() = default;
+ Identity(const ov::Output& arg);
+ void validate_and_infer_types() override;
+ std::shared_ptr clone_with_new_inputs(const ov::OutputVector& new_args) const override;
+ bool visit_attributes(ov::AttributeVisitor& visitor) override;
+
+ bool evaluate(ov::runtime::TensorVector& outputs, const ov::runtime::TensorVector& inputs) const override;
+ bool has_evaluate() const override;
+};
+//! [op:header]
+
+} // namespace TemplateExtension
diff --git a/docs/template_extension/new/ov_extension.cpp b/docs/template_extension/new/ov_extension.cpp
new file mode 100644
index 00000000000..c240328771b
--- /dev/null
+++ b/docs/template_extension/new/ov_extension.cpp
@@ -0,0 +1,11 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include
+#include
+
+#include "identity.hpp"
+
+OPENVINO_CREATE_EXTENSIONS(
+ std::vector({std::make_shared>()}));
diff --git a/docs/template_extension/old/CMakeLists.txt b/docs/template_extension/old/CMakeLists.txt
new file mode 100644
index 00000000000..90a9e886b35
--- /dev/null
+++ b/docs/template_extension/old/CMakeLists.txt
@@ -0,0 +1,37 @@
+# Copyright (C) 2018-2021 Intel Corporation
+# SPDX-License-Identifier: Apache-2.0
+#
+
+# [cmake:extension]
+set(CMAKE_CXX_STANDARD 11)
+
+set(TARGET_NAME "template_extension")
+
+find_package(OpenVINO REQUIRED COMPONENTS Runtime OPTIONAL_COMPONENTS ONNX)
+find_package(OpenCV QUIET COMPONENTS core)
+
+set(SRC cpu_kernel.cpp extension.cpp op.cpp)
+
+if(OpenCV_FOUND)
+ set(SRC ${SRC} fft_kernel.cpp fft_op.cpp)
+endif()
+
+add_library(${TARGET_NAME} MODULE ${SRC})
+
+if(OpenCV_FOUND)
+ target_compile_definitions(${TARGET_NAME} PRIVATE OPENCV_IMPORT_ENABLED)
+ target_link_libraries(${TARGET_NAME} PRIVATE opencv_core)
+endif()
+
+target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_EXTENSION_API)
+target_link_libraries(${TARGET_NAME} PRIVATE openvino::core openvino::runtime)
+
+if(OpenVINO_Frontend_ONNX_FOUND)
+ target_link_libraries(${TARGET_NAME} PRIVATE openvino::frontend::onnx)
+ target_compile_definitions(${TARGET_NAME} PRIVATE OPENVINO_ONNX_FRONTEND_ENABLED)
+endif()
+# [cmake:extension]
+
+# Enable code style check
+file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
+add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src})
diff --git a/docs/template_extension/cpu_kernel.cpp b/docs/template_extension/old/cpu_kernel.cpp
similarity index 100%
rename from docs/template_extension/cpu_kernel.cpp
rename to docs/template_extension/old/cpu_kernel.cpp
diff --git a/docs/template_extension/cpu_kernel.hpp b/docs/template_extension/old/cpu_kernel.hpp
similarity index 100%
rename from docs/template_extension/cpu_kernel.hpp
rename to docs/template_extension/old/cpu_kernel.hpp
diff --git a/docs/template_extension/extension.cpp b/docs/template_extension/old/extension.cpp
similarity index 100%
rename from docs/template_extension/extension.cpp
rename to docs/template_extension/old/extension.cpp
diff --git a/docs/template_extension/extension.hpp b/docs/template_extension/old/extension.hpp
similarity index 100%
rename from docs/template_extension/extension.hpp
rename to docs/template_extension/old/extension.hpp
diff --git a/docs/template_extension/fft_kernel.cpp b/docs/template_extension/old/fft_kernel.cpp
similarity index 100%
rename from docs/template_extension/fft_kernel.cpp
rename to docs/template_extension/old/fft_kernel.cpp
diff --git a/docs/template_extension/fft_kernel.hpp b/docs/template_extension/old/fft_kernel.hpp
similarity index 100%
rename from docs/template_extension/fft_kernel.hpp
rename to docs/template_extension/old/fft_kernel.hpp
diff --git a/docs/template_extension/fft_op.cpp b/docs/template_extension/old/fft_op.cpp
similarity index 100%
rename from docs/template_extension/fft_op.cpp
rename to docs/template_extension/old/fft_op.cpp
diff --git a/docs/template_extension/fft_op.hpp b/docs/template_extension/old/fft_op.hpp
similarity index 100%
rename from docs/template_extension/fft_op.hpp
rename to docs/template_extension/old/fft_op.hpp
diff --git a/docs/template_extension/op.cpp b/docs/template_extension/old/op.cpp
similarity index 100%
rename from docs/template_extension/op.cpp
rename to docs/template_extension/old/op.cpp
diff --git a/docs/template_extension/op.hpp b/docs/template_extension/old/op.hpp
similarity index 100%
rename from docs/template_extension/op.hpp
rename to docs/template_extension/old/op.hpp
diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp
index e1442b139c0..d77671f9497 100644
--- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp
+++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp
@@ -6,6 +6,7 @@
#include "ie_plugin_config.hpp"
#include "ngraph/partial_shape.hpp"
+#include "openvino/op/util/framework_node.hpp"
const std::string EXPORTED_NETWORK_NAME = "undefined";
std::map precision_map = {{"FP32", InferenceEngine::Precision::FP32},
@@ -197,7 +198,11 @@ public:
}
std::map getOpSets() override {
- return {{"framework_node_ext", ngraph::OpSet()}};
+ std::map opsets;
+ ngraph::OpSet opset;
+ opset.insert();
+ opsets["util"] = opset;
+ return opsets;
}
void Unload() noexcept override {}
diff --git a/inference-engine/src/inference_engine/CMakeLists.txt b/inference-engine/src/inference_engine/CMakeLists.txt
index f927c906c91..4dc8fe70d70 100644
--- a/inference-engine/src/inference_engine/CMakeLists.txt
+++ b/inference-engine/src/inference_engine/CMakeLists.txt
@@ -19,6 +19,10 @@ file (GLOB LIBRARY_SRC
${CMAKE_CURRENT_SOURCE_DIR}/src/cpp_interfaces/interface/*.cpp
)
+# Add include path to so_extension.hpp
+set_property(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/src/ie_core.cpp
+ APPEND PROPERTY INCLUDE_DIRECTORIES "${OpenVINO_SOURCE_DIR}/ngraph/core/src/")
+
# TODO: WA for OneHot pass usage in reshape
set(LEGACY_SRC_ROOT "${IE_MAIN_SOURCE_DIR}/src/legacy_api/src")
set(LEGACY_LIBRARY_SHARED_SRCS
diff --git a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp
index 140d757bc8c..ca4e85de4e3 100644
--- a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp
+++ b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp
@@ -16,7 +16,10 @@
#include
#include "ie_plugin_config.hpp"
+#include "openvino/core/extension.hpp"
+#include "openvino/core/op_extension.hpp"
#include "openvino/core/version.hpp"
+#include "openvino/op/op.hpp"
#include "openvino/runtime/common.hpp"
#include "openvino/runtime/executable_network.hpp"
#include "openvino/runtime/remote_context.hpp"
@@ -147,9 +150,79 @@ public:
/**
* @brief Registers extension
+ * @deprecated This method is deprecated. Please use other add_extension methods
* @param extension Pointer to already loaded extension
*/
+ OPENVINO_DEPRECATED("Please use add_extension(ov::Extension) or add_extension(path_to_library) instead.")
void add_extension(const std::shared_ptr& extension);
+ /**
+ * @brief Registers extension
+ * @param library_path path to library with ov::Extension
+ */
+ void add_extension(const std::string& library_path);
+#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
+ /**
+ * @brief Registers extension
+ * @param library_path path to library with ov::Extension
+ */
+ void add_extension(const std::wstring& library_path);
+#endif
+ /**
+ * @brief Registers extension
+ * @param extension Pointer to base extension
+ */
+ void add_extension(const std::shared_ptr& extension);
+ /**
+ * @brief Registers extensions
+ * @param extensions Vector of loaded base extensions
+ */
+ void add_extension(const std::vector>& extensions);
+
+ /**
+ * @brief Registers extension
+ * @param extension Extension class which is inherited from ov::Extension class
+ */
+ template ::value, bool>::type = true>
+ void add_extension(const T& extension) {
+ std::shared_ptr ext = std::make_shared(extension);
+ add_extension(ext);
+ }
+
+ /**
+ * @brief Registers extensions
+ * @param extension Extension class which is inherited from ov::Extension class
+ * @param args list of extensions
+ */
+ template ::value, bool>::type = true>
+ void add_extension(const T& extension, Targs... args) {
+ std::shared_ptr ext = std::make_shared(extension);
+ add_extension(ext);
+ add_extension(args...);
+ }
+
+ /**
+ * @brief Registers custom operation
+ */
+ template ::value, bool>::type = true>
+ void add_extension() {
+ std::shared_ptr ext = std::make_shared>();
+ add_extension(ext);
+ }
+
+ /**
+ * @brief Registers custom operations
+ */
+ template ::value && sizeof...(Targs), bool>::type = true>
+ void add_extension() {
+ std::shared_ptr ext = std::make_shared>();
+ add_extension(ext);
+ if (sizeof...(Targs) > 0)
+ add_extension();
+ }
/**
* @brief Creates an executable network from a previously exported network
diff --git a/inference-engine/src/inference_engine/src/ie_core.cpp b/inference-engine/src/inference_engine/src/ie_core.cpp
index 7ab62534904..5f55efdd789 100644
--- a/inference-engine/src/inference_engine/src/ie_core.cpp
+++ b/inference-engine/src/inference_engine/src/ie_core.cpp
@@ -38,6 +38,7 @@
#include "openvino/runtime/executable_network.hpp"
#include "openvino/util/file_util.hpp"
#include "openvino/util/shared_object.hpp"
+#include "so_extension.hpp"
#include "xml_parse_utils.h"
#ifdef OPENVINO_STATIC_LIBRARY
@@ -195,6 +196,7 @@ class CoreImpl : public ie::ICore, public std::enable_shared_from_this opsetNames;
// TODO: make extensions to be optional with conditional compilation
mutable std::vector extensions;
+ std::vector ov_extensions;
std::map pluginRegistry;
mutable std::mutex pluginsMutex; // to lock parallel access to pluginRegistry and plugins
@@ -483,12 +485,12 @@ public:
ie::CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const override {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from file");
- return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, newAPI);
+ return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, ov_extensions, newAPI);
}
ie::CNNNetwork ReadNetwork(const std::string& model, const ie::Blob::CPtr& weights) const override {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from memory");
- return InferenceEngine::details::ReadNetwork(model, weights, extensions, newAPI);
+ return InferenceEngine::details::ReadNetwork(model, weights, extensions, ov_extensions, newAPI);
}
bool isNewAPI() const override {
@@ -986,6 +988,13 @@ public:
AddExtensionUnsafe(extension);
}
+ void AddOVExtensions(const std::vector& extensions) {
+ std::lock_guard lock(pluginsMutex);
+ for (const auto& ext : extensions) {
+ ov_extensions.emplace_back(ext);
+ }
+ }
+
/**
* @brief Provides a list of extensions
* @return A list of registered extensions
@@ -994,6 +1003,10 @@ public:
return extensions;
}
+ const std::vector& GetOVExtensions() const {
+ return ov_extensions;
+ }
+
std::map GetVersions(const std::string& deviceName) const {
std::map versions;
std::vector deviceNames;
@@ -1480,6 +1493,22 @@ void Core::add_extension(const ie::IExtensionPtr& extension) {
OV_CORE_CALL_STATEMENT(_impl->AddExtension(extension););
}
+void Core::add_extension(const std::string& library_path) {
+ add_extension(ov::detail::load_extensions(library_path));
+}
+#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
+void Core::add_extension(const std::wstring& library_path) {
+ add_extension(ov::detail::load_extensions(library_path));
+}
+#endif
+
+void Core::add_extension(const std::shared_ptr& extension) {
+ add_extension(std::vector>{extension});
+}
+void Core::add_extension(const std::vector>& extensions) {
+ OV_CORE_CALL_STATEMENT({ _impl->AddOVExtensions(extensions); });
+}
+
ExecutableNetwork Core::import_model(std::istream& modelStream,
const std::string& deviceName,
const ConfigMap& config) {
diff --git a/inference-engine/src/inference_engine/src/ie_network_reader.cpp b/inference-engine/src/inference_engine/src/ie_network_reader.cpp
index cb4b6f8b360..cb55c706c89 100644
--- a/inference-engine/src/inference_engine/src/ie_network_reader.cpp
+++ b/inference-engine/src/inference_engine/src/ie_network_reader.cpp
@@ -35,6 +35,42 @@
#include "transformations/rt_info/old_api_map_attribute.hpp"
#include "transformations/utils/utils.hpp"
+namespace ov {
+
+/*
+ * @brief Wrapper for old IE extensions to new API
+ */
+class ExtensionWrapper : public ov::BaseOpExtension {
+public:
+ ExtensionWrapper(const InferenceEngine::IExtensionPtr& ext, const std::string& opset, const std::string& name)
+ : m_ext(ext),
+ m_opset_name(opset),
+ m_type(name),
+ m_ext_type(m_type.c_str(), 0, m_opset_name.c_str()) {}
+
+ const ov::DiscreteTypeInfo& get_type_info() const override {
+ return m_ext_type;
+ }
+
+ ngraph::OutputVector create(const ngraph::OutputVector& inputs, ngraph::AttributeVisitor& visitor) const override {
+ std::shared_ptr node(m_ext->getOpSets().at(m_opset_name).create_insensitive(m_ext_type.name));
+
+ node->set_arguments(inputs);
+ if (node->visit_attributes(visitor)) {
+ node->constructor_validate_and_infer_types();
+ }
+ return node->outputs();
+ }
+
+private:
+ InferenceEngine::IExtensionPtr m_ext;
+ std::string m_opset_name;
+ std::string m_type;
+ ov::DiscreteTypeInfo m_ext_type;
+};
+
+} // namespace ov
+
namespace InferenceEngine {
#ifdef ENABLE_IR_V7_READER
@@ -391,14 +427,13 @@ ngraph::frontend::FrontEndManager& get_frontend_manager() {
return manager;
}
-ov::Extensions get_extensions_map(const std::vector& exts) {
- ov::Extensions extensions;
+std::vector wrap_old_extensions(const std::vector& exts) {
+ std::vector extensions;
for (const auto& ext : exts) {
for (const auto& item : ext->getOpSets()) {
- if (extensions.count(item.first)) {
- IE_THROW() << "Extension with " << item.first << " name already exists";
+ for (const auto& type_info : item.second.get_types_info()) {
+ extensions.emplace_back(std::make_shared(ext, item.first, type_info.name));
}
- extensions[item.first] = item.second;
}
}
return extensions;
@@ -409,6 +444,7 @@ ov::Extensions get_extensions_map(const std::vector& exts,
+ const std::vector& ov_exts,
bool newAPI) {
#ifdef ENABLE_IR_V7_READER
// IR v7 obsolete code
@@ -439,9 +475,6 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
ngraph::frontend::InputModel::Ptr inputModel;
ov::VariantVector params{ov::make_variant(model_path)};
- if (!exts.empty()) {
- params.emplace_back(ov::make_variant(get_extensions_map(exts)));
- }
if (!binPath.empty()) {
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
@@ -453,8 +486,12 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
}
FE = manager.load_by_model(params);
- if (FE)
+ if (FE) {
+ FE->add_extension(ov_exts);
+ if (!exts.empty())
+ FE->add_extension(wrap_old_extensions(exts));
inputModel = FE->load(params);
+ }
if (inputModel) {
auto ngFunc = FE->convert(inputModel);
@@ -470,6 +507,7 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
CNNNetwork details::ReadNetwork(const std::string& model,
const Blob::CPtr& weights,
const std::vector& exts,
+ const std::vector& ov_exts,
bool newAPI) {
std::istringstream modelStringStream(model);
std::istream& modelStream = modelStringStream;
@@ -501,17 +539,18 @@ CNNNetwork details::ReadNetwork(const std::string& model,
ov::VariantVector params{ov::make_variant(&modelStream)};
if (weights) {
char* data = weights->cbuffer().as();
- ov::Weights weights_buffer =
+ std::shared_ptr weights_buffer =
std::make_shared>(data, weights->byteSize(), weights);
params.emplace_back(ov::make_variant(weights_buffer));
}
- if (!exts.empty()) {
- params.emplace_back(ov::make_variant(get_extensions_map(exts)));
- }
FE = manager.load_by_model(params);
- if (FE)
+ if (FE) {
+ FE->add_extension(ov_exts);
+ if (!exts.empty())
+ FE->add_extension(wrap_old_extensions(exts));
inputModel = FE->load(params);
+ }
if (inputModel) {
auto ngFunc = FE->convert(inputModel);
return convert_to_cnnnetwork(ngFunc, exts, newAPI);
diff --git a/inference-engine/src/inference_engine/src/ie_network_reader.hpp b/inference-engine/src/inference_engine/src/ie_network_reader.hpp
index ab370123bf4..bee8c832216 100644
--- a/inference-engine/src/inference_engine/src/ie_network_reader.hpp
+++ b/inference-engine/src/inference_engine/src/ie_network_reader.hpp
@@ -9,6 +9,7 @@
#include "cpp/ie_cnn_network.h"
#include "ie_blob.h"
#include "ie_iextension.h"
+#include "openvino/core/extension.hpp"
namespace InferenceEngine {
namespace details {
@@ -19,27 +20,28 @@ namespace details {
* @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
* if bin file with the same name was not found, will load IR without weights.
* @param exts vector with extensions
+ * @param ov_exts vector with OpenVINO extensions
* @param newAPI Whether this function is called from OpenVINO 2.0 API
* @return CNNNetwork
*/
CNNNetwork ReadNetwork(const std::string& modelPath,
const std::string& binPath,
const std::vector& exts,
+ const std::vector& ov_exts,
bool newAPI);
/**
* @brief Reads IR xml and bin (with the same name) files
* @param model string with IR
* @param weights shared pointer to constant blob with weights
* @param exts vector with extensions
- * @note Reading ONNX models doesn't support loading weights from data blobs.
- If you are using an ONNX model with external data files, please use the
- ReadNetwork function overload which takes a filesystem path to the model.
+ * @param ov_exts vector with OpenVINO extensions
* @param newAPI Whether this function is called from OpenVINO 2.0 API
* @return CNNNetwork
*/
CNNNetwork ReadNetwork(const std::string& model,
const Blob::CPtr& weights,
const std::vector& exts,
+ const std::vector& ov_exts,
bool newAPI);
} // namespace details
diff --git a/inference-engine/tests/functional/inference_engine/CMakeLists.txt b/inference-engine/tests/functional/inference_engine/CMakeLists.txt
index 81c06c3d373..a107a133ca8 100644
--- a/inference-engine/tests/functional/inference_engine/CMakeLists.txt
+++ b/inference-engine/tests/functional/inference_engine/CMakeLists.txt
@@ -26,6 +26,7 @@ set(LINK_LIBRARIES
set(DEPENDENCIES
mock_engine
template_extension
+ template_ov_extension
lptNgraphFunctions
sharedTestClasses
test_model_zoo
diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp
index 6342c3d741a..c5f779aebb7 100644
--- a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp
+++ b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp
@@ -124,7 +124,13 @@ public:
}
std::map getOpSets() override {
- return {{"framework_node_ext", ngraph::OpSet()}};
+ static std::map opsets;
+ if (opsets.empty()) {
+ ngraph::OpSet opset;
+ opset.insert();
+ opsets["util"] = opset;
+ }
+ return opsets;
}
void Unload() noexcept override {}
diff --git a/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp b/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp
new file mode 100644
index 00000000000..a360c063b13
--- /dev/null
+++ b/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp
@@ -0,0 +1,334 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include
+
+#include