diff --git a/cmake/developer_package/ncc_naming_style/openvino.style b/cmake/developer_package/ncc_naming_style/openvino.style index c5fa45b2199..4f9df1dfebd 100644 --- a/cmake/developer_package/ncc_naming_style/openvino.style +++ b/cmake/developer_package/ncc_naming_style/openvino.style @@ -1,8 +1,6 @@ # custom OpenVINO values CppMethod: '^(operator\W+|[a-z_\d]+|signaling_NaN|quiet_NaN)$' -# TODO: remove stopwatch|unsupported_op ClassName: '^([A-Z][\w]+|b?float16|numeric_limits|ngraph_error|stopwatch|unsupported_op)$' -# TODO: remove oi_pair StructName: '^([A-Z][\w]+|element_type_traits|hash|oi_pair)$' FunctionName: '^(operator\W+|[a-z_\d]+)$' Namespace: '^([a-z\d_]+|InferenceEngine)$' @@ -20,11 +18,11 @@ VariableReference: '^\w+$' EnumName: '^[A-Z][\w]+$' # excepts element_type -# TODO: Fix interpolate EnumConstantName: '^([A-Z\d_]+|undefined|dynamic|boolean|bf16|f16|f32|f64|i4|i8|i16|i32|i64|u1|u4|u8|u16|u32|u64|asymmetric|align_corners|round_prefer_floor|round_prefer_ceil|floor|ceil|simple|nearest|linear|linear_onnx|cubic|area|scales|sizes|half_pixel|tf_half_pixel_for_nn|pytorch_half_pixel|asymetric)$' # TODO: align UsingDeclaration: '^.*$' TypedefName: '^.*$' +CxxDynamicCastExpression: '^.*$' # not needed values ClassTemplatePartialSpecialization: 'XXXX' @@ -65,7 +63,6 @@ StatementExpression: 'XXXX' GenericSelectionExpression: 'XXXX' GnuNullExpression: 'XXXX' CxxStaticCastExpression: '^.*$' -CxxDynamicCastExpression: 'XXXX' CxxReinterpretCastExpression: '^.*$' CxxConstCastExpression: 'XXXX' CxxFunctionalCastExpression: '^.*$' diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt index 95b8a3f2359..93d1feed405 100644 --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@ -36,7 +36,7 @@ if(NOT ENABLE_DOCKER) # install - install(TARGETS templatePlugin template_extension + install(TARGETS templatePlugin template_extension template_ov_extension LIBRARY DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests EXCLUDE_FROM_ALL) endif() diff --git a/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md b/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md index ed4d6559532..8b3d50f6660 100644 --- a/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md +++ b/docs/IE_DG/Extensibility_DG/AddingNGraphOps.md @@ -20,7 +20,7 @@ To add your custom nGraph operation, create a new class that extends `ngraph::Op Based on that, declaration of an operation class can look as follows: -@snippet template_extension/op.hpp op:header +@snippet template_extension/old/op.hpp op:header ### Class Fields @@ -35,37 +35,37 @@ nGraph operation contains two constructors: * Default constructor, which enables you to create an operation without attributes * Constructor that creates and validates an operation with specified inputs and attributes -@snippet template_extension/op.cpp op:ctor +@snippet template_extension/old/op.cpp op:ctor ### `validate_and_infer_types()` `ngraph::Node::validate_and_infer_types` method validates operation attributes and calculates output shapes using attributes of the operation. -@snippet template_extension/op.cpp op:validate +@snippet template_extension/old/op.cpp op:validate ### `clone_with_new_inputs()` `ngraph::Node::clone_with_new_inputs` method creates a copy of the nGraph operation with new inputs. -@snippet template_extension/op.cpp op:copy +@snippet template_extension/old/op.cpp op:copy ### `visit_attributes()` `ngraph::Node::visit_attributes` method enables you to visit all operation attributes. -@snippet template_extension/op.cpp op:visit_attributes +@snippet template_extension/old/op.cpp op:visit_attributes ### `evaluate()` and `has_evaluate()` `ngraph::Node::evaluate` method enables you to apply constant folding to an operation. -@snippet template_extension/op.cpp op:evaluate +@snippet template_extension/old/op.cpp op:evaluate ## Register Custom Operations in Extension Class To add custom operations to the [Extension](Extension.md) class, create an operation set with custom operations and implement the `InferenceEngine::IExtension::getOpSets` method: -@snippet template_extension/extension.cpp extension:getOpSets +@snippet template_extension/old/extension.cpp extension:getOpSets This method returns a map of opsets that exist in the extension library. diff --git a/docs/IE_DG/Extensibility_DG/Building.md b/docs/IE_DG/Extensibility_DG/Building.md index be93c5a06d3..b1435914ccc 100644 --- a/docs/IE_DG/Extensibility_DG/Building.md +++ b/docs/IE_DG/Extensibility_DG/Building.md @@ -4,14 +4,14 @@ Inference Engine build infrastructure provides the Inference Engine Package for To build an extension library, use the following CMake script: -@snippet template_extension/CMakeLists.txt cmake:extension +@snippet template_extension/old/CMakeLists.txt cmake:extension This CMake script finds the Inference Engine and nGraph using the `find_package` CMake command. To build an extension library, run the commands below: ```sh -$ cd template_extension +$ cd template_extension/old $ mkdir build $ cd build $ cmake -DOpenVINO_DIR=[OpenVINO_DIR] ../ diff --git a/docs/IE_DG/Extensibility_DG/CPU_Kernel.md b/docs/IE_DG/Extensibility_DG/CPU_Kernel.md index 923bcc36bc2..09f1838ca88 100644 --- a/docs/IE_DG/Extensibility_DG/CPU_Kernel.md +++ b/docs/IE_DG/Extensibility_DG/CPU_Kernel.md @@ -7,7 +7,7 @@ The primary means of the performance of the CPU codepath in the Inference Engine All custom kernels for the CPU plugin should be inherited from the InferenceEngine::ILayerExecImpl interface. Based on that, declaration of a kernel implementation class can look as follows: -@snippet template_extension/cpu_kernel.hpp cpu_implementation:header +@snippet template_extension/old/cpu_kernel.hpp cpu_implementation:header ### Class Fields @@ -22,25 +22,25 @@ The provided implementation has several fields: An implementation constructor checks parameters of an nGraph operation, stores required attributes, and stores an error message in the case of an error. -@snippet template_extension/cpu_kernel.cpp cpu_implementation:ctor +@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:ctor ### `getSupportedConfigurations` InferenceEngine::ILayerExecImpl::getSupportedConfigurations method returns all supported configuration formats (input/output tensor layouts) for your implementation. To specify formats of data, use InferenceEngine::TensorDesc. Refer to the [Memory Primitives](../Memory_primitives.md) section for instructions. -@snippet template_extension/cpu_kernel.cpp cpu_implementation:getSupportedConfigurations +@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:getSupportedConfigurations ### `init` InferenceEngine::ILayerExecImpl::init method gets a runtime-selected configuration from a vector that is populated from the `getSupportedConfigurations` method and checks the parameters: -@snippet template_extension/cpu_kernel.cpp cpu_implementation:init +@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:init ### `execute` InferenceEngine::ILayerExecImpl::execute method accepts and processes the actual tenors as input/output blobs: -@snippet template_extension/cpu_kernel.cpp cpu_implementation:execute +@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:execute ## Register Implementation in `Extension` Class @@ -52,13 +52,13 @@ To register custom kernel implementation in the [Extension](Extension.md) class, InferenceEngine::IExtension::getImplTypes returns a vector of implementation types for an operation. -@snippet template_extension/extension.cpp extension:getImplTypes +@snippet template_extension/old/extension.cpp extension:getImplTypes ### getImplementation InferenceEngine::IExtension::getImplementation returns the kernel implementation with a specified type for an operation. -@snippet template_extension/extension.cpp extension:getImplementation +@snippet template_extension/old/extension.cpp extension:getImplementation ## Load Extension with Executable Kernels to Plugin diff --git a/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md b/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md index eb7183f0dc2..dd554320241 100644 --- a/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md +++ b/docs/IE_DG/Extensibility_DG/Custom_ONNX_Ops.md @@ -39,12 +39,12 @@ If you do not need an operator anymore, unregister it by calling `unregister_ope The same principles apply when registering a custom ONNX operator based on custom nGraph operations. This example shows how to register a custom ONNX operator based on `Operation` presented in [this tutorial](AddingNGraphOps.md), which is used in [TemplateExtension](Extension.md). -@snippet template_extension/extension.cpp extension:ctor +@snippet template_extension/old/extension.cpp extension:ctor Here, the `register_operator` function is called in the constructor of Extension. The constructor makes sure that the function is called before InferenceEngine::Core::ReadNetwork, because InferenceEngine::Core::AddExtension must be called before a model with a custom operator is read. The example below demonstrates how to unregister an operator from the destructor of Extension: -@snippet template_extension/extension.cpp extension:dtor +@snippet template_extension/old/extension.cpp extension:dtor > **REQUIRED**: It is mandatory to unregister a custom ONNX operator if it is defined in a dynamic shared library. diff --git a/docs/IE_DG/Extensibility_DG/Extension.md b/docs/IE_DG/Extensibility_DG/Extension.md index e941cb9c13c..a8394ecb93d 100644 --- a/docs/IE_DG/Extensibility_DG/Extension.md +++ b/docs/IE_DG/Extensibility_DG/Extension.md @@ -8,11 +8,11 @@ used as an example in this document and `FFT` used as a more complex example fro Based on that, the declaration of an extension class can look as follows: -@snippet template_extension/extension.hpp extension:header +@snippet template_extension/old/extension.hpp extension:header The extension library should contain and export the InferenceEngine::CreateExtension method, which creates an `Extension` class: -@snippet template_extension/extension.cpp extension:CreateExtension +@snippet template_extension/old/extension.cpp extension:CreateExtension Also, an `Extension` object should implement the following methods: @@ -20,7 +20,7 @@ Also, an `Extension` object should implement the following methods: * InferenceEngine::IExtension::GetVersion returns information about the version of the library. -@snippet template_extension/extension.cpp extension:GetVersion +@snippet template_extension/old/extension.cpp extension:GetVersion Implement the InferenceEngine::IExtension::getOpSets method if the extension contains custom layers. Read [Custom nGraph Operation](AddingNGraphOps.md) for more information. diff --git a/docs/template_extension/CMakeLists.txt b/docs/template_extension/CMakeLists.txt index 90a9e886b35..7296d14328f 100644 --- a/docs/template_extension/CMakeLists.txt +++ b/docs/template_extension/CMakeLists.txt @@ -2,36 +2,5 @@ # SPDX-License-Identifier: Apache-2.0 # -# [cmake:extension] -set(CMAKE_CXX_STANDARD 11) - -set(TARGET_NAME "template_extension") - -find_package(OpenVINO REQUIRED COMPONENTS Runtime OPTIONAL_COMPONENTS ONNX) -find_package(OpenCV QUIET COMPONENTS core) - -set(SRC cpu_kernel.cpp extension.cpp op.cpp) - -if(OpenCV_FOUND) - set(SRC ${SRC} fft_kernel.cpp fft_op.cpp) -endif() - -add_library(${TARGET_NAME} MODULE ${SRC}) - -if(OpenCV_FOUND) - target_compile_definitions(${TARGET_NAME} PRIVATE OPENCV_IMPORT_ENABLED) - target_link_libraries(${TARGET_NAME} PRIVATE opencv_core) -endif() - -target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_EXTENSION_API) -target_link_libraries(${TARGET_NAME} PRIVATE openvino::core openvino::runtime) - -if(OpenVINO_Frontend_ONNX_FOUND) - target_link_libraries(${TARGET_NAME} PRIVATE openvino::frontend::onnx) - target_compile_definitions(${TARGET_NAME} PRIVATE OPENVINO_ONNX_FRONTEND_ENABLED) -endif() -# [cmake:extension] - -# Enable code style check -file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp") -add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src}) +add_subdirectory(old) +add_subdirectory(new) diff --git a/docs/template_extension/new/CMakeLists.txt b/docs/template_extension/new/CMakeLists.txt new file mode 100644 index 00000000000..7229eba8def --- /dev/null +++ b/docs/template_extension/new/CMakeLists.txt @@ -0,0 +1,22 @@ +# Copyright (C) 2018-2021 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +# [cmake:extension] +set(CMAKE_CXX_STANDARD 11) + +set(TARGET_NAME "template_ov_extension") + +find_package(OpenVINO) + +set(SRC identity.cpp ov_extension.cpp) + +add_library(${TARGET_NAME} MODULE ${SRC}) + +target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_OPENVINO_EXTENSION_API) +target_link_libraries(${TARGET_NAME} PRIVATE openvino::core) +# [cmake:extension] + +# Enable code style check +file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp") +add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src}) diff --git a/docs/template_extension/new/identity.cpp b/docs/template_extension/new/identity.cpp new file mode 100644 index 00000000000..01488fbdc42 --- /dev/null +++ b/docs/template_extension/new/identity.cpp @@ -0,0 +1,48 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "identity.hpp" + +using namespace TemplateExtension; + +//! [op:ctor] +Identity::Identity(const ov::Output& arg) : Op({arg}) { + constructor_validate_and_infer_types(); +} +//! [op:ctor] + +//! [op:validate] +void Identity::validate_and_infer_types() { + // Operation doesn't change shapes end element type + set_output_type(0, get_input_element_type(0), get_input_partial_shape(0)); +} +//! [op:validate] + +//! [op:copy] +std::shared_ptr Identity::clone_with_new_inputs(const ov::OutputVector& new_args) const { + OPENVINO_ASSERT(new_args.size() != 1, "Incorrect number of new arguments"); + + return std::make_shared(new_args.at(0)); +} +//! [op:copy] + +//! [op:visit_attributes] +bool Identity::visit_attributes(ov::AttributeVisitor& visitor) { + return true; +} +//! [op:visit_attributes] + +//! [op:evaluate] +bool Identity::evaluate(ov::runtime::TensorVector& outputs, const ov::runtime::TensorVector& inputs) const { + auto in = inputs[0]; + auto out = outputs[0]; + out.set_shape(in.get_shape()); + memcpy(out.data(), in.data(), in.get_size()); + return true; +} + +bool Identity::has_evaluate() const { + return true; +} +//! [op:evaluate] diff --git a/docs/template_extension/new/identity.hpp b/docs/template_extension/new/identity.hpp new file mode 100644 index 00000000000..db08b0514ba --- /dev/null +++ b/docs/template_extension/new/identity.hpp @@ -0,0 +1,27 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include + +//! [op:header] +namespace TemplateExtension { + +class Identity : public ov::op::Op { +public: + OPENVINO_OP("Identity"); + + Identity() = default; + Identity(const ov::Output& arg); + void validate_and_infer_types() override; + std::shared_ptr clone_with_new_inputs(const ov::OutputVector& new_args) const override; + bool visit_attributes(ov::AttributeVisitor& visitor) override; + + bool evaluate(ov::runtime::TensorVector& outputs, const ov::runtime::TensorVector& inputs) const override; + bool has_evaluate() const override; +}; +//! [op:header] + +} // namespace TemplateExtension diff --git a/docs/template_extension/new/ov_extension.cpp b/docs/template_extension/new/ov_extension.cpp new file mode 100644 index 00000000000..c240328771b --- /dev/null +++ b/docs/template_extension/new/ov_extension.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include + +#include "identity.hpp" + +OPENVINO_CREATE_EXTENSIONS( + std::vector({std::make_shared>()})); diff --git a/docs/template_extension/old/CMakeLists.txt b/docs/template_extension/old/CMakeLists.txt new file mode 100644 index 00000000000..90a9e886b35 --- /dev/null +++ b/docs/template_extension/old/CMakeLists.txt @@ -0,0 +1,37 @@ +# Copyright (C) 2018-2021 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +# [cmake:extension] +set(CMAKE_CXX_STANDARD 11) + +set(TARGET_NAME "template_extension") + +find_package(OpenVINO REQUIRED COMPONENTS Runtime OPTIONAL_COMPONENTS ONNX) +find_package(OpenCV QUIET COMPONENTS core) + +set(SRC cpu_kernel.cpp extension.cpp op.cpp) + +if(OpenCV_FOUND) + set(SRC ${SRC} fft_kernel.cpp fft_op.cpp) +endif() + +add_library(${TARGET_NAME} MODULE ${SRC}) + +if(OpenCV_FOUND) + target_compile_definitions(${TARGET_NAME} PRIVATE OPENCV_IMPORT_ENABLED) + target_link_libraries(${TARGET_NAME} PRIVATE opencv_core) +endif() + +target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_EXTENSION_API) +target_link_libraries(${TARGET_NAME} PRIVATE openvino::core openvino::runtime) + +if(OpenVINO_Frontend_ONNX_FOUND) + target_link_libraries(${TARGET_NAME} PRIVATE openvino::frontend::onnx) + target_compile_definitions(${TARGET_NAME} PRIVATE OPENVINO_ONNX_FRONTEND_ENABLED) +endif() +# [cmake:extension] + +# Enable code style check +file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp") +add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src}) diff --git a/docs/template_extension/cpu_kernel.cpp b/docs/template_extension/old/cpu_kernel.cpp similarity index 100% rename from docs/template_extension/cpu_kernel.cpp rename to docs/template_extension/old/cpu_kernel.cpp diff --git a/docs/template_extension/cpu_kernel.hpp b/docs/template_extension/old/cpu_kernel.hpp similarity index 100% rename from docs/template_extension/cpu_kernel.hpp rename to docs/template_extension/old/cpu_kernel.hpp diff --git a/docs/template_extension/extension.cpp b/docs/template_extension/old/extension.cpp similarity index 100% rename from docs/template_extension/extension.cpp rename to docs/template_extension/old/extension.cpp diff --git a/docs/template_extension/extension.hpp b/docs/template_extension/old/extension.hpp similarity index 100% rename from docs/template_extension/extension.hpp rename to docs/template_extension/old/extension.hpp diff --git a/docs/template_extension/fft_kernel.cpp b/docs/template_extension/old/fft_kernel.cpp similarity index 100% rename from docs/template_extension/fft_kernel.cpp rename to docs/template_extension/old/fft_kernel.cpp diff --git a/docs/template_extension/fft_kernel.hpp b/docs/template_extension/old/fft_kernel.hpp similarity index 100% rename from docs/template_extension/fft_kernel.hpp rename to docs/template_extension/old/fft_kernel.hpp diff --git a/docs/template_extension/fft_op.cpp b/docs/template_extension/old/fft_op.cpp similarity index 100% rename from docs/template_extension/fft_op.cpp rename to docs/template_extension/old/fft_op.cpp diff --git a/docs/template_extension/fft_op.hpp b/docs/template_extension/old/fft_op.hpp similarity index 100% rename from docs/template_extension/fft_op.hpp rename to docs/template_extension/old/fft_op.hpp diff --git a/docs/template_extension/op.cpp b/docs/template_extension/old/op.cpp similarity index 100% rename from docs/template_extension/op.cpp rename to docs/template_extension/old/op.cpp diff --git a/docs/template_extension/op.hpp b/docs/template_extension/old/op.hpp similarity index 100% rename from docs/template_extension/op.hpp rename to docs/template_extension/old/op.hpp diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp index e1442b139c0..d77671f9497 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp @@ -6,6 +6,7 @@ #include "ie_plugin_config.hpp" #include "ngraph/partial_shape.hpp" +#include "openvino/op/util/framework_node.hpp" const std::string EXPORTED_NETWORK_NAME = "undefined"; std::map precision_map = {{"FP32", InferenceEngine::Precision::FP32}, @@ -197,7 +198,11 @@ public: } std::map getOpSets() override { - return {{"framework_node_ext", ngraph::OpSet()}}; + std::map opsets; + ngraph::OpSet opset; + opset.insert(); + opsets["util"] = opset; + return opsets; } void Unload() noexcept override {} diff --git a/inference-engine/src/inference_engine/CMakeLists.txt b/inference-engine/src/inference_engine/CMakeLists.txt index f927c906c91..4dc8fe70d70 100644 --- a/inference-engine/src/inference_engine/CMakeLists.txt +++ b/inference-engine/src/inference_engine/CMakeLists.txt @@ -19,6 +19,10 @@ file (GLOB LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/cpp_interfaces/interface/*.cpp ) +# Add include path to so_extension.hpp +set_property(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/src/ie_core.cpp + APPEND PROPERTY INCLUDE_DIRECTORIES "${OpenVINO_SOURCE_DIR}/ngraph/core/src/") + # TODO: WA for OneHot pass usage in reshape set(LEGACY_SRC_ROOT "${IE_MAIN_SOURCE_DIR}/src/legacy_api/src") set(LEGACY_LIBRARY_SHARED_SRCS diff --git a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp index 140d757bc8c..ca4e85de4e3 100644 --- a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp +++ b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp @@ -16,7 +16,10 @@ #include #include "ie_plugin_config.hpp" +#include "openvino/core/extension.hpp" +#include "openvino/core/op_extension.hpp" #include "openvino/core/version.hpp" +#include "openvino/op/op.hpp" #include "openvino/runtime/common.hpp" #include "openvino/runtime/executable_network.hpp" #include "openvino/runtime/remote_context.hpp" @@ -147,9 +150,79 @@ public: /** * @brief Registers extension + * @deprecated This method is deprecated. Please use other add_extension methods * @param extension Pointer to already loaded extension */ + OPENVINO_DEPRECATED("Please use add_extension(ov::Extension) or add_extension(path_to_library) instead.") void add_extension(const std::shared_ptr& extension); + /** + * @brief Registers extension + * @param library_path path to library with ov::Extension + */ + void add_extension(const std::string& library_path); +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT + /** + * @brief Registers extension + * @param library_path path to library with ov::Extension + */ + void add_extension(const std::wstring& library_path); +#endif + /** + * @brief Registers extension + * @param extension Pointer to base extension + */ + void add_extension(const std::shared_ptr& extension); + /** + * @brief Registers extensions + * @param extensions Vector of loaded base extensions + */ + void add_extension(const std::vector>& extensions); + + /** + * @brief Registers extension + * @param extension Extension class which is inherited from ov::Extension class + */ + template ::value, bool>::type = true> + void add_extension(const T& extension) { + std::shared_ptr ext = std::make_shared(extension); + add_extension(ext); + } + + /** + * @brief Registers extensions + * @param extension Extension class which is inherited from ov::Extension class + * @param args list of extensions + */ + template ::value, bool>::type = true> + void add_extension(const T& extension, Targs... args) { + std::shared_ptr ext = std::make_shared(extension); + add_extension(ext); + add_extension(args...); + } + + /** + * @brief Registers custom operation + */ + template ::value, bool>::type = true> + void add_extension() { + std::shared_ptr ext = std::make_shared>(); + add_extension(ext); + } + + /** + * @brief Registers custom operations + */ + template ::value && sizeof...(Targs), bool>::type = true> + void add_extension() { + std::shared_ptr ext = std::make_shared>(); + add_extension(ext); + if (sizeof...(Targs) > 0) + add_extension(); + } /** * @brief Creates an executable network from a previously exported network diff --git a/inference-engine/src/inference_engine/src/ie_core.cpp b/inference-engine/src/inference_engine/src/ie_core.cpp index 7ab62534904..5f55efdd789 100644 --- a/inference-engine/src/inference_engine/src/ie_core.cpp +++ b/inference-engine/src/inference_engine/src/ie_core.cpp @@ -38,6 +38,7 @@ #include "openvino/runtime/executable_network.hpp" #include "openvino/util/file_util.hpp" #include "openvino/util/shared_object.hpp" +#include "so_extension.hpp" #include "xml_parse_utils.h" #ifdef OPENVINO_STATIC_LIBRARY @@ -195,6 +196,7 @@ class CoreImpl : public ie::ICore, public std::enable_shared_from_this opsetNames; // TODO: make extensions to be optional with conditional compilation mutable std::vector extensions; + std::vector ov_extensions; std::map pluginRegistry; mutable std::mutex pluginsMutex; // to lock parallel access to pluginRegistry and plugins @@ -483,12 +485,12 @@ public: ie::CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const override { OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from file"); - return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, newAPI); + return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, ov_extensions, newAPI); } ie::CNNNetwork ReadNetwork(const std::string& model, const ie::Blob::CPtr& weights) const override { OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from memory"); - return InferenceEngine::details::ReadNetwork(model, weights, extensions, newAPI); + return InferenceEngine::details::ReadNetwork(model, weights, extensions, ov_extensions, newAPI); } bool isNewAPI() const override { @@ -986,6 +988,13 @@ public: AddExtensionUnsafe(extension); } + void AddOVExtensions(const std::vector& extensions) { + std::lock_guard lock(pluginsMutex); + for (const auto& ext : extensions) { + ov_extensions.emplace_back(ext); + } + } + /** * @brief Provides a list of extensions * @return A list of registered extensions @@ -994,6 +1003,10 @@ public: return extensions; } + const std::vector& GetOVExtensions() const { + return ov_extensions; + } + std::map GetVersions(const std::string& deviceName) const { std::map versions; std::vector deviceNames; @@ -1480,6 +1493,22 @@ void Core::add_extension(const ie::IExtensionPtr& extension) { OV_CORE_CALL_STATEMENT(_impl->AddExtension(extension);); } +void Core::add_extension(const std::string& library_path) { + add_extension(ov::detail::load_extensions(library_path)); +} +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT +void Core::add_extension(const std::wstring& library_path) { + add_extension(ov::detail::load_extensions(library_path)); +} +#endif + +void Core::add_extension(const std::shared_ptr& extension) { + add_extension(std::vector>{extension}); +} +void Core::add_extension(const std::vector>& extensions) { + OV_CORE_CALL_STATEMENT({ _impl->AddOVExtensions(extensions); }); +} + ExecutableNetwork Core::import_model(std::istream& modelStream, const std::string& deviceName, const ConfigMap& config) { diff --git a/inference-engine/src/inference_engine/src/ie_network_reader.cpp b/inference-engine/src/inference_engine/src/ie_network_reader.cpp index cb4b6f8b360..cb55c706c89 100644 --- a/inference-engine/src/inference_engine/src/ie_network_reader.cpp +++ b/inference-engine/src/inference_engine/src/ie_network_reader.cpp @@ -35,6 +35,42 @@ #include "transformations/rt_info/old_api_map_attribute.hpp" #include "transformations/utils/utils.hpp" +namespace ov { + +/* + * @brief Wrapper for old IE extensions to new API + */ +class ExtensionWrapper : public ov::BaseOpExtension { +public: + ExtensionWrapper(const InferenceEngine::IExtensionPtr& ext, const std::string& opset, const std::string& name) + : m_ext(ext), + m_opset_name(opset), + m_type(name), + m_ext_type(m_type.c_str(), 0, m_opset_name.c_str()) {} + + const ov::DiscreteTypeInfo& get_type_info() const override { + return m_ext_type; + } + + ngraph::OutputVector create(const ngraph::OutputVector& inputs, ngraph::AttributeVisitor& visitor) const override { + std::shared_ptr node(m_ext->getOpSets().at(m_opset_name).create_insensitive(m_ext_type.name)); + + node->set_arguments(inputs); + if (node->visit_attributes(visitor)) { + node->constructor_validate_and_infer_types(); + } + return node->outputs(); + } + +private: + InferenceEngine::IExtensionPtr m_ext; + std::string m_opset_name; + std::string m_type; + ov::DiscreteTypeInfo m_ext_type; +}; + +} // namespace ov + namespace InferenceEngine { #ifdef ENABLE_IR_V7_READER @@ -391,14 +427,13 @@ ngraph::frontend::FrontEndManager& get_frontend_manager() { return manager; } -ov::Extensions get_extensions_map(const std::vector& exts) { - ov::Extensions extensions; +std::vector wrap_old_extensions(const std::vector& exts) { + std::vector extensions; for (const auto& ext : exts) { for (const auto& item : ext->getOpSets()) { - if (extensions.count(item.first)) { - IE_THROW() << "Extension with " << item.first << " name already exists"; + for (const auto& type_info : item.second.get_types_info()) { + extensions.emplace_back(std::make_shared(ext, item.first, type_info.name)); } - extensions[item.first] = item.second; } } return extensions; @@ -409,6 +444,7 @@ ov::Extensions get_extensions_map(const std::vector& exts, + const std::vector& ov_exts, bool newAPI) { #ifdef ENABLE_IR_V7_READER // IR v7 obsolete code @@ -439,9 +475,6 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath, ngraph::frontend::InputModel::Ptr inputModel; ov::VariantVector params{ov::make_variant(model_path)}; - if (!exts.empty()) { - params.emplace_back(ov::make_variant(get_extensions_map(exts))); - } if (!binPath.empty()) { #if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) @@ -453,8 +486,12 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath, } FE = manager.load_by_model(params); - if (FE) + if (FE) { + FE->add_extension(ov_exts); + if (!exts.empty()) + FE->add_extension(wrap_old_extensions(exts)); inputModel = FE->load(params); + } if (inputModel) { auto ngFunc = FE->convert(inputModel); @@ -470,6 +507,7 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath, CNNNetwork details::ReadNetwork(const std::string& model, const Blob::CPtr& weights, const std::vector& exts, + const std::vector& ov_exts, bool newAPI) { std::istringstream modelStringStream(model); std::istream& modelStream = modelStringStream; @@ -501,17 +539,18 @@ CNNNetwork details::ReadNetwork(const std::string& model, ov::VariantVector params{ov::make_variant(&modelStream)}; if (weights) { char* data = weights->cbuffer().as(); - ov::Weights weights_buffer = + std::shared_ptr weights_buffer = std::make_shared>(data, weights->byteSize(), weights); params.emplace_back(ov::make_variant(weights_buffer)); } - if (!exts.empty()) { - params.emplace_back(ov::make_variant(get_extensions_map(exts))); - } FE = manager.load_by_model(params); - if (FE) + if (FE) { + FE->add_extension(ov_exts); + if (!exts.empty()) + FE->add_extension(wrap_old_extensions(exts)); inputModel = FE->load(params); + } if (inputModel) { auto ngFunc = FE->convert(inputModel); return convert_to_cnnnetwork(ngFunc, exts, newAPI); diff --git a/inference-engine/src/inference_engine/src/ie_network_reader.hpp b/inference-engine/src/inference_engine/src/ie_network_reader.hpp index ab370123bf4..bee8c832216 100644 --- a/inference-engine/src/inference_engine/src/ie_network_reader.hpp +++ b/inference-engine/src/inference_engine/src/ie_network_reader.hpp @@ -9,6 +9,7 @@ #include "cpp/ie_cnn_network.h" #include "ie_blob.h" #include "ie_iextension.h" +#include "openvino/core/extension.hpp" namespace InferenceEngine { namespace details { @@ -19,27 +20,28 @@ namespace details { * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and * if bin file with the same name was not found, will load IR without weights. * @param exts vector with extensions + * @param ov_exts vector with OpenVINO extensions * @param newAPI Whether this function is called from OpenVINO 2.0 API * @return CNNNetwork */ CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath, const std::vector& exts, + const std::vector& ov_exts, bool newAPI); /** * @brief Reads IR xml and bin (with the same name) files * @param model string with IR * @param weights shared pointer to constant blob with weights * @param exts vector with extensions - * @note Reading ONNX models doesn't support loading weights from data blobs. - If you are using an ONNX model with external data files, please use the - ReadNetwork function overload which takes a filesystem path to the model. + * @param ov_exts vector with OpenVINO extensions * @param newAPI Whether this function is called from OpenVINO 2.0 API * @return CNNNetwork */ CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights, const std::vector& exts, + const std::vector& ov_exts, bool newAPI); } // namespace details diff --git a/inference-engine/tests/functional/inference_engine/CMakeLists.txt b/inference-engine/tests/functional/inference_engine/CMakeLists.txt index 81c06c3d373..a107a133ca8 100644 --- a/inference-engine/tests/functional/inference_engine/CMakeLists.txt +++ b/inference-engine/tests/functional/inference_engine/CMakeLists.txt @@ -26,6 +26,7 @@ set(LINK_LIBRARIES set(DEPENDENCIES mock_engine template_extension + template_ov_extension lptNgraphFunctions sharedTestClasses test_model_zoo diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp index 6342c3d741a..c5f779aebb7 100644 --- a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp @@ -124,7 +124,13 @@ public: } std::map getOpSets() override { - return {{"framework_node_ext", ngraph::OpSet()}}; + static std::map opsets; + if (opsets.empty()) { + ngraph::OpSet opset; + opset.insert(); + opsets["util"] = opset; + } + return opsets; } void Unload() noexcept override {} diff --git a/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp b/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp new file mode 100644 index 00000000000..a360c063b13 --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/ov_extension_test.cpp @@ -0,0 +1,334 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include +#include +#include +#include +#include + +#include "common_test_utils/test_common.hpp" +#include "file_utils.h" +#include "ie_iextension.h" +#include "ngraph/op/op.hpp" +#include "openvino/core/op_extension.hpp" +#include "openvino/runtime/core.hpp" + +using namespace testing; +using namespace InferenceEngine; +using namespace CommonTestUtils; + +class OVExtensionTests : public TestsCommon { +public: + ov::runtime::Core core; + + void test() { + std::string model = R"V0G0N( + + + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + + + + + + + +)V0G0N"; + ov::runtime::Tensor weights; + ov::PartialShape refBeforeReshape{1, 3, 22, 22}; + ov::PartialShape refAfterReshape{8, 9, 33, 66}; + + auto network = core.read_model(model, weights); + std::map newShapes; + newShapes["in_data"] = refAfterReshape; + + EXPECT_EQ(refBeforeReshape, network->output().get_partial_shape()); + EXPECT_NO_THROW(network->reshape(newShapes)); + EXPECT_EQ(refAfterReshape, network->output().get_partial_shape()); + } + + void test_two_op() { + std::string model = R"V0G0N( + + + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + + + + + + + + +)V0G0N"; + ov::runtime::Tensor weights; + ov::PartialShape refBeforeReshape{1, 3, 22, 22}; + ov::PartialShape refAfterReshape{8, 9, 33, 66}; + + auto network = core.read_model(model, weights); + std::map newShapes; + newShapes["in_data"] = refAfterReshape; + + EXPECT_EQ(refBeforeReshape, network->output().get_partial_shape()); + EXPECT_NO_THROW(network->reshape(newShapes)); + EXPECT_EQ(refAfterReshape, network->output().get_partial_shape()); + } +}; + +namespace { + +std::string getOVExtensionPath() { + return FileUtils::makePluginLibraryName({}, std::string("template_ov_extension") + IE_BUILD_POSTFIX); +} + +} // namespace + +class CustomOldIdentity : public ngraph::op::Op { +public: + static constexpr ngraph::NodeTypeInfo type_info{"Identity", 0}; + const ngraph::NodeTypeInfo& get_type_info() const override { + return type_info; + } + + CustomOldIdentity() = default; + CustomOldIdentity(const ngraph::Output& arg) : Op({arg}) { + constructor_validate_and_infer_types(); + } + + void validate_and_infer_types() override { + set_output_type(0, get_input_element_type(0), get_input_partial_shape(0)); + } + + std::shared_ptr clone_with_new_inputs(const ngraph::OutputVector& new_args) const override { + if (new_args.size() != 1) { + throw ngraph::ngraph_error("Incorrect number of new arguments"); + } + + return std::make_shared(new_args.at(0)); + } + + bool visit_attributes(ngraph::AttributeVisitor& visitor) override { + return true; + } +}; + +constexpr ngraph::NodeTypeInfo CustomOldIdentity::type_info; + +class TestTileOldExtension : public InferenceEngine::IExtension { +public: + void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {} + + void Unload() noexcept override {} + + std::map getOpSets() override { + static std::map opsets; + if (opsets.empty()) { + ngraph::OpSet opset; + opset.insert(); + opsets["extension"] = opset; + } + return opsets; + } +}; + +class CustomNewIdentity : public ov::op::Op { +public: + OPENVINO_OP("Identity") + + CustomNewIdentity() = default; + CustomNewIdentity(const ov::Output& arg) : Op({arg}) { + constructor_validate_and_infer_types(); + } + + void validate_and_infer_types() override { + set_output_type(0, get_input_element_type(0), get_input_partial_shape(0)); + } + + std::shared_ptr clone_with_new_inputs(const ov::OutputVector& new_args) const override { + OPENVINO_ASSERT(new_args.size() != 1, "Incorrect number of new arguments"); + + return std::make_shared(new_args.at(0)); + } + + bool visit_attributes(ov::AttributeVisitor& visitor) override { + return true; + } +}; + +class CustomReLU : public ov::op::Op { +public: + OPENVINO_OP("CustomReLU") + + CustomReLU() = default; + CustomReLU(const ov::Output& arg) : Op({arg}) { + constructor_validate_and_infer_types(); + } + + void validate_and_infer_types() override { + set_output_type(0, get_input_element_type(0), get_input_partial_shape(0)); + } + + std::shared_ptr clone_with_new_inputs(const ov::OutputVector& new_args) const override { + OPENVINO_ASSERT(new_args.size() != 1, "Incorrect number of new arguments"); + + return std::make_shared(new_args.at(0)); + } + + bool visit_attributes(ov::AttributeVisitor& visitor) override { + return true; + } +}; + +TEST_F(OVExtensionTests, ReshapeIRWithOldExtension) { + OPENVINO_SUPPRESS_DEPRECATED_START + core.add_extension(std::make_shared()); + OPENVINO_SUPPRESS_DEPRECATED_END + test(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithNewExtensionsLib) { + core.add_extension(getOVExtensionPath()); + test(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithNewExtensionPtr) { + core.add_extension(std::make_shared>()); + test(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithNewExtension) { + core.add_extension(ov::OpExtension()); + test(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithNewOp) { + core.add_extension(); + test(); +} + +TEST_F(OVExtensionTests, IncorrectReshapeIRWithNewExtensionPtr) { + core.add_extension(std::make_shared>()); + EXPECT_ANY_THROW(test_two_op()); +} + +TEST_F(OVExtensionTests, IncorrectReshapeIRWithNewExtension) { + core.add_extension(ov::OpExtension()); + EXPECT_ANY_THROW(test_two_op()); +} + +TEST_F(OVExtensionTests, IncorrectReshapeIRWithNewOp) { + core.add_extension(); + EXPECT_ANY_THROW(test_two_op()); +} + +TEST_F(OVExtensionTests, ReshapeIRWithSeveralNewExtensionPtrs) { + core.add_extension( + {std::make_shared>(), std::make_shared>()}); + test_two_op(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithSeveralNewExtensions) { + core.add_extension(ov::OpExtension(), ov::OpExtension()); + test_two_op(); +} + +TEST_F(OVExtensionTests, ReshapeIRWithSeveralNewOps) { + core.add_extension(); + test_two_op(); +} diff --git a/inference-engine/tests/functional/inference_engine/ov_shared_object_test.cpp b/inference-engine/tests/functional/inference_engine/ov_shared_object_test.cpp index 2f30c30d5cb..8ae3850c0ee 100644 --- a/inference-engine/tests/functional/inference_engine/ov_shared_object_test.cpp +++ b/inference-engine/tests/functional/inference_engine/ov_shared_object_test.cpp @@ -11,17 +11,17 @@ using namespace ::testing; using namespace std; -class SharedObjectOVTests: public ::testing::Test { +class SharedObjectOVTests : public ::testing::Test { protected: std::string get_mock_engine_name() { return FileUtils::makePluginLibraryName(InferenceEngine::getIELibraryPath(), - std::string("mock_engine") + IE_BUILD_POSTFIX); + std::string("mock_engine") + IE_BUILD_POSTFIX); } void loadDll(const string &libraryName) { shared_object = ov::util::load_shared_object(libraryName.c_str()); } - std::shared_ptr shared_object; + std::shared_ptr shared_object; using CreateF = void(std::shared_ptr&); diff --git a/ngraph/core/include/openvino/core/extension.hpp b/ngraph/core/include/openvino/core/extension.hpp new file mode 100644 index 00000000000..9e997336ac9 --- /dev/null +++ b/ngraph/core/include/openvino/core/extension.hpp @@ -0,0 +1,56 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include +#include + +#include "openvino/core/core_visibility.hpp" +#include "openvino/core/type.hpp" + +// Use extern "C" in order to avoid issues with mangling +#if defined(_WIN32) && defined(IMPLEMENT_OPENVINO_EXTENSION_API) +# define OPENVINO_EXTENSION_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS +# define OPENVINO_EXTENSION_API OPENVINO_CORE_EXPORTS +#else +# define OPENVINO_EXTENSION_C_API OPENVINO_EXTERN_C OPENVINO_API +# define OPENVINO_EXTENSION_API OPENVINO_API +#endif + +namespace ov { + +class Extension; + +/** + * @brief The class provides the base interface for OpenVINO extensions + */ +class OPENVINO_API Extension : public std::enable_shared_from_this { +public: + using Ptr = std::shared_ptr; + + virtual ~Extension(); +}; + +/** + * @brief The entry point for library with OpenVINO extensions + * + * @param vector of extensions + */ +OPENVINO_EXTENSION_C_API +void create_extensions(std::vector&); + +} // namespace ov + +/** + * @brief Macro generates the entry point for the library + * + * @param vector of extensions + */ +#define OPENVINO_CREATE_EXTENSIONS(extensions) \ + OPENVINO_EXTENSION_C_API \ + void ::ov::create_extensions(std::vector<::ov::Extension::Ptr>& ext) { \ + ext = extensions; \ + } diff --git a/ngraph/core/include/openvino/core/op_extension.hpp b/ngraph/core/include/openvino/core/op_extension.hpp new file mode 100644 index 00000000000..735d2dba358 --- /dev/null +++ b/ngraph/core/include/openvino/core/op_extension.hpp @@ -0,0 +1,72 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/core/attribute_visitor.hpp" +#include "openvino/core/extension.hpp" +#include "openvino/core/node.hpp" +#include "openvino/core/node_vector.hpp" + +namespace ov { + +/** + * @brief The base interface for OpenVINO operation extensions + */ +class OPENVINO_EXTENSION_API BaseOpExtension : public Extension { +public: + using Ptr = std::shared_ptr; + /** + * @brief Returns the type info of operation + * + * @return ov::DiscreteTypeInfo + */ + virtual const ov::DiscreteTypeInfo& get_type_info() const = 0; + /** + * @brief Method creates an OpenVINO operation + * + * @param inputs vector of input ports + * @param visitor attribute visitor which allows to read necessaty arguments + * + * @return vector of output ports + */ + virtual ov::OutputVector create(const ov::OutputVector& inputs, ov::AttributeVisitor& visitor) const = 0; + + /** + * @brief Destructor + */ + ~BaseOpExtension() override; +}; + +/** + * @brief The default implementation of OpenVINO operation extensions + */ +template +class OpExtension : public BaseOpExtension { +public: + /** + * @brief Default constructor + */ + OpExtension() { + const auto& ext_type = get_type_info(); + OPENVINO_ASSERT(ext_type.name != nullptr && ext_type.version_id != nullptr, + "Extension type should have information about operation set and operation type."); + } + + const ov::DiscreteTypeInfo& get_type_info() const override { + return T::get_type_info_static(); + } + + ov::OutputVector create(const ov::OutputVector& inputs, ov::AttributeVisitor& visitor) const override { + std::shared_ptr node = std::make_shared(); + + node->set_arguments(inputs); + if (node->visit_attributes(visitor)) { + node->constructor_validate_and_infer_types(); + } + return node->outputs(); + } +}; + +} // namespace ov diff --git a/ngraph/core/src/extension.cpp b/ngraph/core/src/extension.cpp new file mode 100644 index 00000000000..8c8941e4910 --- /dev/null +++ b/ngraph/core/src/extension.cpp @@ -0,0 +1,17 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/core/extension.hpp" + +#include + +#include "openvino/core/except.hpp" +#include "openvino/core/op_extension.hpp" +#include "openvino/util/file_util.hpp" +#include "openvino/util/shared_object.hpp" + +using namespace ov; + +ov::Extension::~Extension() = default; +ov::BaseOpExtension::~BaseOpExtension() = default; diff --git a/ngraph/core/src/so_extension.cpp b/ngraph/core/src/so_extension.cpp new file mode 100644 index 00000000000..0b9a95a4c66 --- /dev/null +++ b/ngraph/core/src/so_extension.cpp @@ -0,0 +1,13 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "so_extension.hpp" + +const ov::Extension::Ptr& ov::detail::SOExtension::extension() const { + return m_ext; +} + +const std::shared_ptr ov::detail::SOExtension::shared_object() const { + return m_so; +} diff --git a/ngraph/core/src/so_extension.hpp b/ngraph/core/src/so_extension.hpp new file mode 100644 index 00000000000..7a7a892b229 --- /dev/null +++ b/ngraph/core/src/so_extension.hpp @@ -0,0 +1,49 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#pragma once + +#include "openvino/core/extension.hpp" +#include "openvino/core/visibility.hpp" +#include "openvino/util/file_util.hpp" +#include "openvino/util/shared_object.hpp" + +namespace ov { +namespace detail { + +class OPENVINO_API SOExtension : public Extension { +public: + SOExtension(const std::shared_ptr& so, const Extension::Ptr& ext) : m_so(so), m_ext(ext) {} + + const Extension::Ptr& extension() const; + + const std::shared_ptr shared_object() const; + +private: + std::shared_ptr m_so; + Extension::Ptr m_ext; +}; + +inline std::vector load_extensions(const std::string& path) { + auto so = ov::util::load_shared_object(path.c_str()); + using CreateFunction = void(std::vector&); + std::vector extensions; + reinterpret_cast(ov::util::get_symbol(so, "create_extensions"))(extensions); + + std::vector so_extensions; + so_extensions.reserve(extensions.size()); + + for (auto&& ex : extensions) { + so_extensions.emplace_back(std::make_shared(so, ex)); + } + return so_extensions; +} + +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT +inline std::vector load_extensions(const std::wstring& path) { + return load_extensions(ov::util::wstring_to_string(path).c_str()); +} +#endif + +} // namespace detail +} // namespace ov diff --git a/ngraph/frontend/frontend_manager/CMakeLists.txt b/ngraph/frontend/frontend_manager/CMakeLists.txt index 0dd4224da6f..8ac00ebef87 100644 --- a/ngraph/frontend/frontend_manager/CMakeLists.txt +++ b/ngraph/frontend/frontend_manager/CMakeLists.txt @@ -12,6 +12,10 @@ file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp) file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/src/*.hpp) file(GLOB_RECURSE LIBRARY_PUBLIC_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp) +# Add include path to so_extension.hpp +set_property(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/src/frontend_manager.cpp + APPEND PROPERTY INCLUDE_DIRECTORIES "${OpenVINO_SOURCE_DIR}/ngraph/core/src/") + set(FRONTEND_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include) source_group("src" FILES ${LIBRARY_SRC}) diff --git a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp index bd92665030e..a58bf9de640 100644 --- a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp +++ b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp @@ -12,6 +12,8 @@ #include "input_model.hpp" #include "ngraph/function.hpp" #include "ngraph/variant.hpp" +#include "openvino/core/extension.hpp" +#include "openvino/core/op_extension.hpp" namespace ngraph { namespace frontend { @@ -88,6 +90,40 @@ public: /// \return Current frontend name. Empty string if not implemented virtual std::string get_name() const; + /// \brief Register base extension in the FrontEnd + /// \param extension base extension + virtual void add_extension(const std::shared_ptr& extension); + /// \brief Register base extensions in the FrontEnd + /// \param extensions vector of extensions + void add_extension(const std::vector>& extensions); + /// \brief Registers extension + /// \param library_path path to library with ov::Extension + void add_extension(const std::string& library_path); + +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT + /// \brief Registers extension + /// \param library_path path to library with ov::Extension + void add_extension(const std::wstring& library_path); +#endif + + /// @brief Registers extension + /// @param extension Extension class which is inherited from ov::BaseExtension class + template ::value, bool>::type = true> + void add_extension(const T& extension) { + std::shared_ptr ext = std::make_shared(extension); + add_extension(ext); + } + /// @brief Registers extensions + /// @param extension Extension class which is inherited from ov::Extension class + template ::value, bool>::type = true> + void add_extension(const T& extension, Targs... args) { + std::shared_ptr ext = std::make_shared(extension); + add_extension(ext); + add_extension(args...); + } + protected: virtual bool supported_impl(const std::vector>& variants) const; virtual InputModel::Ptr load_impl(const std::vector>& variants) const; diff --git a/ngraph/frontend/frontend_manager/include/frontend_manager/parameters.hpp b/ngraph/frontend/frontend_manager/include/frontend_manager/parameters.hpp index b68572e99d1..d5f1ec28e6e 100644 --- a/ngraph/frontend/frontend_manager/include/frontend_manager/parameters.hpp +++ b/ngraph/frontend/frontend_manager/include/frontend_manager/parameters.hpp @@ -56,8 +56,6 @@ public: VariantWrapper(const value_type& value) : VariantImpl(value) {} }; -using WeightsVariant = VariantWrapper; - using Extensions = std::map; template <> @@ -69,6 +67,4 @@ public: VariantWrapper(const value_type& value) : VariantImpl(value) {} }; -using ExtensionsVariant = VariantWrapper; - -} // namespace ov \ No newline at end of file +} // namespace ov diff --git a/ngraph/frontend/frontend_manager/src/frontend_manager.cpp b/ngraph/frontend/frontend_manager/src/frontend_manager.cpp index a5fbc864b69..68e15dc3e1f 100644 --- a/ngraph/frontend/frontend_manager/src/frontend_manager.cpp +++ b/ngraph/frontend/frontend_manager/src/frontend_manager.cpp @@ -4,12 +4,12 @@ #include "frontend_manager/frontend_manager.hpp" -#include -#include -#include - #include "frontend_manager/frontend_exceptions.hpp" +#include "frontend_manager/place.hpp" +#include "ngraph/except.hpp" +#include "openvino/util/env_util.hpp" #include "plugin_loader.hpp" +#include "so_extension.hpp" #include "utils.hpp" using namespace ngraph; @@ -149,6 +149,25 @@ void FrontEnd::normalize(std::shared_ptr function) const { FRONT_END_NOT_IMPLEMENTED(normalize); } +void FrontEnd::add_extension(const std::shared_ptr& extension) { + // Each frontend can support own set of extensions, so this method should be implemented on the frontend side +} + +void FrontEnd::add_extension(const std::vector>& extensions) { + for (const auto& ext : extensions) + add_extension(ext); +} + +void FrontEnd::add_extension(const std::string& library_path) { + add_extension(ov::detail::load_extensions(library_path)); +} + +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT +void FrontEnd::add_extension(const std::wstring& library_path) { + add_extension(ov::detail::load_extensions(library_path)); +} +#endif + std::string FrontEnd::get_name() const { return std::string(); } diff --git a/ngraph/frontend/ir/CMakeLists.txt b/ngraph/frontend/ir/CMakeLists.txt index 09d3da692db..922cc0bc2cd 100644 --- a/ngraph/frontend/ir/CMakeLists.txt +++ b/ngraph/frontend/ir/CMakeLists.txt @@ -10,6 +10,10 @@ file(GLOB_RECURSE LIBRARY_PUBLIC_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.h set(${TARGET_NAME}_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include) +# Add include path to so_extension.hpp +set_property(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/src/frontend.cpp + APPEND PROPERTY INCLUDE_DIRECTORIES "${OpenVINO_SOURCE_DIR}/ngraph/core/src/") + # Create named folders for the sources within the .vcproj # Empty name lists them directly under the .vcproj diff --git a/ngraph/frontend/ir/include/ir_frontend/frontend.hpp b/ngraph/frontend/ir/include/ir_frontend/frontend.hpp index 11da7a7a8dc..7b1faa012ad 100644 --- a/ngraph/frontend/ir/include/ir_frontend/frontend.hpp +++ b/ngraph/frontend/ir/include/ir_frontend/frontend.hpp @@ -27,6 +27,10 @@ public: /// \return IR frontend name. std::string get_name() const override; + /// \brief Register extension in the FrontEnd + /// \param extension base extension + void add_extension(const ov::Extension::Ptr& extension) override; + protected: /// \brief Check if FrontEndIR can recognize model from given parts /// \param params Can be path to the model file or std::istream @@ -37,6 +41,10 @@ protected: /// \param params Can be path to the model file or std::istream /// \return InputModel::Ptr InputModel::Ptr load_impl(const std::vector>& params) const override; + +private: + std::vector> shared_objects; + std::vector extensions; }; } // namespace frontend diff --git a/ngraph/frontend/ir/include/ir_frontend/model.hpp b/ngraph/frontend/ir/include/ir_frontend/model.hpp index 38c7fe95f31..2116a0fe50e 100644 --- a/ngraph/frontend/ir/include/ir_frontend/model.hpp +++ b/ngraph/frontend/ir/include/ir_frontend/model.hpp @@ -9,6 +9,7 @@ #include #include #include +#include namespace ngraph { namespace frontend { @@ -18,7 +19,9 @@ class IR_API InputModelIR : public InputModel { std::shared_ptr _impl; public: - InputModelIR(std::istream& stream, const ov::Weights& weights, const ov::Extensions& extensions); + InputModelIR(std::istream& stream, + const std::shared_ptr& weights, + const std::unordered_map& extensions); std::shared_ptr convert(); }; diff --git a/ngraph/frontend/ir/src/frontend.cpp b/ngraph/frontend/ir/src/frontend.cpp index c82372d0146..91ffea9996a 100644 --- a/ngraph/frontend/ir/src/frontend.cpp +++ b/ngraph/frontend/ir/src/frontend.cpp @@ -2,16 +2,20 @@ // SPDX-License-Identifier: Apache-2.0 // -#include +#include "ir_frontend/frontend.hpp" #include -#include -#include -#include -#include -#include #include +#include "ir_deserializer.hpp" +#include "ir_frontend/model.hpp" +#include "ir_frontend/utility.hpp" +#include "ngraph/variant.hpp" +#include "openvino/core/op_extension.hpp" +#include "openvino/util/file_util.hpp" +#include "so_extension.hpp" +#include "xml_parse_utils.h" + using namespace ngraph; namespace ngraph { @@ -95,17 +99,36 @@ bool FrontEndIR::supported_impl(const std::vector>& var return version >= 10 && version <= 11; } +void FrontEndIR::add_extension(const ov::Extension::Ptr& ext) { + if (auto so_ext = std::dynamic_pointer_cast(ext)) { + if (std::dynamic_pointer_cast(so_ext->extension())) { + shared_objects.emplace_back(so_ext->shared_object()); + extensions.emplace_back(so_ext->extension()); + } + } + if (std::dynamic_pointer_cast(ext)) + extensions.emplace_back(ext); +} + InputModel::Ptr FrontEndIR::load_impl(const std::vector>& variants) const { std::ifstream local_model_stream; std::istream* provided_model_stream = nullptr; - ov::Weights weights; - ov::Extensions extensions; + std::shared_ptr weights; + + auto create_extensions_map = [&]() -> std::unordered_map { + std::unordered_map exts; + for (const auto& ext : extensions) { + if (auto base_ext = std::dynamic_pointer_cast(ext)) + exts.insert({base_ext->get_type_info(), base_ext}); + } + return exts; + }; auto create_input_model = [&]() -> std::shared_ptr { if (provided_model_stream) { - return std::make_shared(*provided_model_stream, weights, extensions); + return std::make_shared(*provided_model_stream, weights, create_extensions_map()); } else if (local_model_stream.is_open()) { - auto input_model = std::make_shared(local_model_stream, weights, extensions); + auto input_model = std::make_shared(local_model_stream, weights, create_extensions_map()); local_model_stream.close(); return input_model; } @@ -153,10 +176,8 @@ InputModel::Ptr FrontEndIR::load_impl(const std::vector } else if (ov::is_type>(variant)) { weights_path = ov::as_type_ptr>(variant)->get(); #endif - } else if (ov::is_type(variant)) { - weights = ov::as_type_ptr(variant)->get(); - } else if (ov::is_type(variant)) { - extensions = ov::as_type_ptr(variant)->get(); + } else if (ov::is_type>>(variant)) { + weights = ov::as_type_ptr>>(variant)->get(); } } @@ -205,6 +226,7 @@ InputModel::Ptr FrontEndIR::load_impl(const std::vector std::shared_ptr FrontEndIR::convert(InputModel::Ptr model) const { auto ir_model = std::dynamic_pointer_cast(model); + OPENVINO_ASSERT(ir_model != nullptr); return ir_model->convert(); } diff --git a/ngraph/frontend/ir/src/ir_deserializer.cpp b/ngraph/frontend/ir/src/ir_deserializer.cpp index 0c8d512430d..4ced1f2f2eb 100644 --- a/ngraph/frontend/ir/src/ir_deserializer.cpp +++ b/ngraph/frontend/ir/src/ir_deserializer.cpp @@ -397,8 +397,9 @@ void XmlDeserializer::on_adapter(const std::string& name, adapter.set(ngraph_function); } -std::shared_ptr XmlDeserializer::parse_function(const pugi::xml_node& root, - const ov::Weights& weights) { +std::shared_ptr XmlDeserializer::parse_function( + const pugi::xml_node& root, + const std::shared_ptr& weights) { // OV_ITT_SCOPE_CHAIN(FIRST_INFERENCE, taskChain, itt::domains::V10Reader_RT, "V10Parser", "Parse"); struct FunctionNodes { @@ -615,6 +616,13 @@ std::shared_ptr XmlDeserializer::createNode(const std::vector ngraphNode; + ov::DiscreteTypeInfo type(params.type.c_str(), 0, params.version.c_str()); + auto extensionIt = m_extensions.find(type); + + if (extensionIt != m_extensions.end()) { + XmlDeserializer visitor(node, weights, m_opsets, m_extensions, m_variables, m_version); + ngraphNode = (*extensionIt->second).create(inputs, visitor).at(0).get_node_shared_ptr(); + } // Find registered opset auto opsetIt = m_opsets.find(params.version); @@ -660,7 +668,7 @@ std::shared_ptr XmlDeserializer::createNode(const std::vectoralloc_buffer_on_visit_attributes(false); } ngraphNode->set_arguments(inputs); - XmlDeserializer visitor(node, weights, m_opsets, m_variables, m_version); + XmlDeserializer visitor(node, weights, m_opsets, m_extensions, m_variables, m_version); if (ngraphNode->visit_attributes(visitor)) { ngraphNode->constructor_validate_and_infer_types(); @@ -669,10 +677,9 @@ std::shared_ptr XmlDeserializer::createNode(const std::vectorclone_with_new_inputs(ngraphNode->input_values()); } - - if (!ngraphNode && m_use_framework_node) { - ngraphNode = std::make_shared(inputs); - XmlDeserializer visitor(node, weights, m_opsets, m_variables, m_version); + if (!ngraphNode && m_extensions.count(ov::op::util::FrameworkNode::get_type_info_static())) { + ngraphNode = std::make_shared(inputs); + XmlDeserializer visitor(node, weights, m_opsets, m_extensions, m_variables, m_version); ngraphNode->visit_attributes(visitor); size_t index{0}; diff --git a/ngraph/frontend/ir/src/ir_deserializer.hpp b/ngraph/frontend/ir/src/ir_deserializer.hpp index 8182f748d3d..90398fc42f2 100644 --- a/ngraph/frontend/ir/src/ir_deserializer.hpp +++ b/ngraph/frontend/ir/src/ir_deserializer.hpp @@ -14,7 +14,10 @@ #include #include +#include "openvino/core/op_extension.hpp" + namespace ov { + struct GenericLayerParams { struct LayerPortData { size_t portId; @@ -55,13 +58,15 @@ struct GenericLayerParams { class XmlDeserializer : public ngraph::AttributeVisitor { public: explicit XmlDeserializer(const pugi::xml_node& node, - const ov::Weights& weights, + const std::shared_ptr& weights, const std::unordered_map& opsets, + const std::unordered_map& extensions, std::unordered_map>& variables, size_t version) : m_node(node), m_weights(weights), m_opsets(opsets), + m_extensions(extensions), m_variables(variables), m_version(version) {} @@ -134,10 +139,6 @@ public: adapter.set(value); } - void use_framework_node(bool flag) { - m_use_framework_node = flag; - } - private: struct IoMap { using NodeIdToIoIndex = std::unordered_map; @@ -163,7 +164,8 @@ private: /// \param node xml node representation /// \param weights weights attached to current node /// \return shared pointer to function representing input node - std::shared_ptr parse_function(const pugi::xml_node& root, const ov::Weights& weights); + std::shared_ptr parse_function(const pugi::xml_node& root, + const std::shared_ptr& weights); /// \brief Traverses xml node representation in order to get the purpose attribute of /// inputs/outputs in the body of Loop op. \param node xml node representation \return struct /// with value of purpuse attribute @@ -180,6 +182,7 @@ private: const pugi::xml_node m_node; const ov::Weights& m_weights; const std::unordered_map& m_opsets; + const std::unordered_map& m_extensions; std::unordered_map>& m_variables; /// @@ -188,7 +191,6 @@ private: /// IoMap io_map; - bool m_use_framework_node{false}; int64_t m_version; }; } // namespace ov diff --git a/ngraph/frontend/ir/src/model.cpp b/ngraph/frontend/ir/src/model.cpp index b968659d80c..2fa09ab8c4a 100644 --- a/ngraph/frontend/ir/src/model.cpp +++ b/ngraph/frontend/ir/src/model.cpp @@ -15,7 +15,9 @@ using namespace ngraph; using namespace InferenceEngine; namespace { -void ParsePreProcess(pugi::xml_node& root, ov::Weights weights, std::shared_ptr f) { +void ParsePreProcess(pugi::xml_node& root, + std::shared_ptr weights, + std::shared_ptr f) { /* Preprocessing block can have two preprocessing types: * * @@ -183,13 +185,16 @@ void ParsePreProcess(pugi::xml_node& root, ov::Weights weights, std::shared_ptr< namespace ngraph { namespace frontend { class InputModelIR::InputModelIRImpl { - ov::Weights m_weights; - ov::Extensions m_extensions; + std::shared_ptr m_weights; + std::unordered_map m_extensions; + std::unordered_map m_opsets; pugi::xml_node m_root; pugi::xml_document m_xml_doc; public: - InputModelIRImpl(std::istream& stream, const ov::Weights& weights, const ov::Extensions& extensions) + InputModelIRImpl(std::istream& stream, + const std::shared_ptr& weights, + const std::unordered_map& extensions) : m_weights(weights), m_extensions(extensions) { pugi::xml_parse_result res = m_xml_doc.load(stream); @@ -197,12 +202,22 @@ public: IE_THROW() << res.description() << " at offset " << res.offset; } m_root = m_xml_doc.document_element(); + m_opsets["opset1"] = ngraph::get_opset1(); + m_opsets["opset2"] = ngraph::get_opset2(); + m_opsets["opset3"] = ngraph::get_opset3(); + m_opsets["opset4"] = ngraph::get_opset4(); + m_opsets["opset5"] = ngraph::get_opset5(); + m_opsets["opset6"] = ngraph::get_opset6(); + m_opsets["opset7"] = ngraph::get_opset7(); + m_opsets["opset8"] = ngraph::get_opset8(); } std::shared_ptr convert(); }; -InputModelIR::InputModelIR(std::istream& stream, const ov::Weights& weights, const ov::Extensions& extensions) { +InputModelIR::InputModelIR(std::istream& stream, + const std::shared_ptr& weights, + const std::unordered_map& extensions) { _impl = std::make_shared(stream, weights, extensions); } @@ -211,29 +226,11 @@ std::shared_ptr InputModelIR::convert() { } std::shared_ptr InputModelIR::InputModelIRImpl::convert() { - std::unordered_map opsets; std::unordered_map> variables; // Load default opsets - opsets["opset1"] = ngraph::get_opset1(); - opsets["opset2"] = ngraph::get_opset2(); - opsets["opset3"] = ngraph::get_opset3(); - opsets["opset4"] = ngraph::get_opset4(); - opsets["opset5"] = ngraph::get_opset5(); - opsets["opset6"] = ngraph::get_opset6(); - opsets["opset7"] = ngraph::get_opset7(); - opsets["opset8"] = ngraph::get_opset8(); - - // Load custom opsets - for (const auto& it : m_extensions) { - if (opsets.find(it.first) != opsets.end()) - IE_THROW() << "Cannot add opset with name: " << it.first << ". Opset with the same name already exists."; - opsets[it.first] = it.second; - } - size_t version = XMLParseUtils::GetUIntAttr(m_root, "version", 0); - ov::XmlDeserializer visitor(m_root, m_weights, opsets, variables, version); - visitor.use_framework_node(opsets.count("framework_node_ext")); + ov::XmlDeserializer visitor(m_root, m_weights, m_opsets, m_extensions, variables, version); std::shared_ptr function; visitor.on_attribute("net", function); function->get_rt_info()["version"] = std::make_shared>(version); diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index da883851087..86ffdde6ef1 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -37,12 +37,12 @@ set(SRC copy.cpp element_type.cpp eval.cpp + extension.cpp file_util.cpp float16.cpp framework_node.cpp function.cpp graph_rewrite.cpp - includes.cpp input_output_assign.cpp int4.cpp intervals.cpp @@ -402,13 +402,16 @@ set(SRC util.cpp ) +# Add include path to so_extension.hpp +set_property(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/extension.cpp + APPEND PROPERTY INCLUDE_DIRECTORIES "${OpenVINO_SOURCE_DIR}/ngraph/core/src/") + if(SUGGEST_OVERRIDE_SUPPORTED) set_source_files_properties(ov_tensor_test.cpp PROPERTIES COMPILE_OPTIONS -Wno-suggest-override) endif() -set_source_files_properties(includes.cpp PROPERTIES COMPILE_DEFINITIONS - NGRAPH_INCLUDES="${PROJECT_SOURCE_DIR}/src/ngraph") +list(APPEND UNIT_TESTS_DEPENDENCIES template_ov_extension) if (ENABLE_MKL_DNN AND NGRAPH_UNIT_TEST_BACKENDS_ENABLE) message(STATUS "NGRAPH_TESTS: IE:CPU enabled") @@ -570,8 +573,10 @@ target_include_directories(unit-test PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/runtime get_target_property(NGRAPH_SRC_DIR openvino::core SOURCE_DIR) target_include_directories(unit-test PRIVATE ${NGRAPH_SRC_DIR}/src) -add_definitions("-DCURDIR=\"${CMAKE_CURRENT_SOURCE_DIR}\"") -add_definitions("-DJSON_INCLUDES=\"${JSON_INCLUDE_DIR}\"") +target_compile_definitions(unit-test + PRIVATE + SHARED_LIB_PREFIX="${CMAKE_SHARED_LIBRARY_PREFIX}" + SHARED_LIB_SUFFIX="${IE_BUILD_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}") if(UNIT_TESTS_DEPENDENCIES) add_dependencies(unit-test ${UNIT_TESTS_DEPENDENCIES}) @@ -587,6 +592,7 @@ target_link_libraries(unit-test PRIVATE ngraph_test_util interpreter_backend Threads::Threads openvino::conditional_compilation + openvino::util frontend_manager) # Protobuf-lite does not support parsing files from prototxt format diff --git a/ngraph/test/extension.cpp b/ngraph/test/extension.cpp new file mode 100644 index 00000000000..2fcc2be9be2 --- /dev/null +++ b/ngraph/test/extension.cpp @@ -0,0 +1,71 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/core/extension.hpp" + +#include + +#include "openvino/core/graph_util.hpp" +#include "openvino/core/op_extension.hpp" +#include "openvino/util/file_util.hpp" +#include "so_extension.hpp" + +#ifdef _WIN32 +# ifndef NOMINMAX +# define NOMINMAX +# endif +# include +# if defined(WINAPI_FAMILY) && !WINAPI_PARTITION_DESKTOP +# error "Only WINAPI_PARTITION_DESKTOP is supported, because of LoadLibrary[A|W]" +# endif +#elif defined(__linux) || defined(__APPLE__) +# include +#endif + +static std::string find_my_pathname() { +#ifdef _WIN32 + HMODULE hModule = GetModuleHandleW(SHARED_LIB_PREFIX L"ngraph" SHARED_LIB_SUFFIX); + WCHAR wpath[MAX_PATH]; + GetModuleFileNameW(hModule, wpath, MAX_PATH); + std::wstring ws(wpath); + std::string path(ws.begin(), ws.end()); + replace(path.begin(), path.end(), '\\', '/'); + path = ov::util::get_directory(path); + path += "/"; + return path; +#elif defined(__linux) || defined(__APPLE__) + Dl_info dl_info; + dladdr(reinterpret_cast(ov::replace_output_update_name), &dl_info); + return ov::util::get_directory(dl_info.dli_fname); +#else +# error "Unsupported OS" +#endif +} +std::string get_extension_path() { + return ov::util::make_plugin_library_name(find_my_pathname(), + std::string("template_ov_extension") + IE_BUILD_POSTFIX); +} + +TEST(extension, load_extension) { + EXPECT_NO_THROW(ov::detail::load_extensions(get_extension_path())); +} + +TEST(extension, load_extension_and_cast) { + std::vector so_extensions; + EXPECT_NO_THROW(so_extensions = ov::detail::load_extensions(get_extension_path())); + EXPECT_EQ(1, so_extensions.size()); + std::vector extensions; + std::vector> so; + for (const auto& ext : so_extensions) { + if (auto so_ext = std::dynamic_pointer_cast(ext)) { + extensions.emplace_back(so_ext->extension()); + so.emplace_back(so_ext->shared_object()); + } + } + so_extensions.clear(); + EXPECT_EQ(1, extensions.size()); + EXPECT_NE(nullptr, dynamic_cast(extensions[0].get())); + EXPECT_NE(nullptr, std::dynamic_pointer_cast(extensions[0])); + extensions.clear(); +} diff --git a/ngraph/test/includes.cpp b/ngraph/test/includes.cpp deleted file mode 100644 index 137dc409951..00000000000 --- a/ngraph/test/includes.cpp +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (C) 2018-2021 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include -#include -#include - -#include "gtest/gtest.h" - -// #include "ngraph/codegen/compiler.hpp" -// #include "ngraph/file_util.hpp" -// #include "ngraph/log.hpp" -// #include "ngraph/util.hpp" - -// using namespace std; -// using namespace ngraph; - -// TEST(DISABLED_include, complete) -// { -// vector include_files; -// set ext_list{".hpp"}; -// set exclude{"onnx_import", "onnxifi", "intelgpu", "op_tbl.hpp"}; -// auto func = [&](const std::string& file, bool is_dir) { -// if (!is_dir && file.size() > 4) -// { -// for (const string& x : exclude) -// { -// if (file.find(x) != file.npos) -// { -// return; -// } -// } -// string ext = file.substr(file.size() - 4); -// if (ext_list.find(ext) != ext_list.end()) -// { -// include_files.push_back(file); -// } -// } -// }; -// file_util::iterate_files(NGRAPH_INCLUDES, func, true); - -// for (const string& include : include_files) -// { -// string source = "#include <" + include + ">\n "; - -// codegen::Compiler compiler; -// compiler.add_header_search_path(JSON_INCLUDES); -// auto module = compiler.compile(source); -// if (!module) -// { -// cout << "fail " << include << endl; -// } -// } -// }