From e2635a00532eeadb146ed0929ce2d8a65efb63ea Mon Sep 17 00:00:00 2001 From: Ilya Churaev Date: Fri, 20 Jan 2023 13:43:48 +0400 Subject: [PATCH] Introduce ov::ICore (#15096) * Added new ov::ICore * Fixed style and removed unused functions * Fixed mock core * Fixed linker issue * Fixed inference functional tests * Update copyrights * Use new plugin API inside the new API * Fixed CPU tests * Removed old plugin from ov::IPlugin * Fixed Inference tests * Added comments and some other minor changes * Split core interfaces * Added logic to load v7 IRs without crash * Fixed thread tests * Fixed comments: * Add const keyword for core methods * Rename core methods * Fixed mock core * Added more const methods * Fixed tests * Remove unnecessary exception catch * Fixed some comments * Remove default implementations of get/set property * Fix additional comments * Removed set property * Try to fix linux build * Revert function move --- src/core/include/openvino/core/any.hpp | 4 +- src/core/include/openvino/runtime/tensor.hpp | 2 + .../interface/ie_iplugin_internal.hpp | 49 +- src/inference/dev_api/ie_icore.hpp | 167 +-- .../dev_api/openvino/runtime/icore.hpp | 212 ++++ .../dev_api/openvino/runtime/iplugin.hpp | 237 ++++ .../openvino/runtime/remote_context.hpp | 12 + src/inference/src/check_network_batchable.cpp | 13 +- src/inference/src/check_network_batchable.hpp | 8 +- src/inference/src/compilation_context.cpp | 5 +- src/inference/src/compilation_context.hpp | 2 +- src/inference/src/core.cpp | 65 +- src/inference/src/cpp/ie_plugin.hpp | 225 +--- .../interface/ie_iplugin_internal.cpp | 6 + src/inference/src/dev/converter_utils.cpp | 329 +++++ src/inference/src/dev/converter_utils.hpp | 26 + src/inference/src/dev/core_impl.cpp | 1085 ++++++++--------- src/inference/src/dev/core_impl.hpp | 287 +++-- src/inference/src/dev/core_impl_ie.cpp | 443 +++++++ src/inference/src/dev/iplugin.cpp | 47 + src/inference/src/dev/iplugin_wrapper.cpp | 114 ++ src/inference/src/dev/iplugin_wrapper.hpp | 166 +++ src/inference/src/dev/plugin.cpp | 146 +++ src/inference/src/dev/plugin.hpp | 82 ++ src/inference/src/ie_core.cpp | 27 +- .../cpp_interfaces/interface/mock_icore.hpp | 88 +- .../mocks/mock_engine/mock_plugin.cpp | 73 +- .../common/ie_pipelines/pipelines.cpp | 2 +- .../tests_pipelines_full_pipeline.cpp | 2 +- 29 files changed, 2742 insertions(+), 1182 deletions(-) create mode 100644 src/inference/dev_api/openvino/runtime/icore.hpp create mode 100644 src/inference/dev_api/openvino/runtime/iplugin.hpp create mode 100644 src/inference/src/dev/converter_utils.cpp create mode 100644 src/inference/src/dev/converter_utils.hpp create mode 100644 src/inference/src/dev/core_impl_ie.cpp create mode 100644 src/inference/src/dev/iplugin.cpp create mode 100644 src/inference/src/dev/iplugin_wrapper.cpp create mode 100644 src/inference/src/dev/iplugin_wrapper.hpp create mode 100644 src/inference/src/dev/plugin.cpp create mode 100644 src/inference/src/dev/plugin.hpp diff --git a/src/core/include/openvino/core/any.hpp b/src/core/include/openvino/core/any.hpp index 1de6515f19b..e93eb05537c 100644 --- a/src/core/include/openvino/core/any.hpp +++ b/src/core/include/openvino/core/any.hpp @@ -26,6 +26,7 @@ class ExecutableNetwork; } // namespace InferenceEngine namespace ov { +class Plugin; /** @cond INTERNAL */ class Any; namespace util { @@ -339,7 +340,6 @@ class RuntimeAttribute; class CompiledModel; class RemoteContext; class RemoteTensor; -class InferencePlugin; /** * @brief This class represents an object to work with different types @@ -632,7 +632,7 @@ class OPENVINO_API Any { friend class ::ov::CompiledModel; friend class ::ov::RemoteContext; friend class ::ov::RemoteTensor; - friend class ::ov::InferencePlugin; + friend class ::ov::Plugin; Any(const Any& other, const std::vector>& so); diff --git a/src/core/include/openvino/runtime/tensor.hpp b/src/core/include/openvino/runtime/tensor.hpp index 77b6cf677e4..93bb02c4e0a 100644 --- a/src/core/include/openvino/runtime/tensor.hpp +++ b/src/core/include/openvino/runtime/tensor.hpp @@ -24,6 +24,7 @@ class Blob; namespace ov { class Core; +class CoreImpl; class InferRequest; class RemoteContext; class VariableState; @@ -47,6 +48,7 @@ protected: Tensor(const std::shared_ptr& impl, const std::vector>& so); friend class ov::Core; + friend class ov::CoreImpl; friend class ov::InferRequest; friend class ov::RemoteContext; friend class ov::VariableState; diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index 803b944cea8..23898911263 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -19,19 +19,17 @@ #include "ie_iextension.h" #include "ie_input_info.hpp" #include "ie_parameter.hpp" +#include "openvino/runtime/iplugin.hpp" #include "openvino/util/pp.hpp" #include "so_ptr.hpp" -namespace ov { -class Function; -class ICore; -} // namespace ov namespace InferenceEngine { class ExecutorManager; class IExecutableNetworkInternal; class RemoteContext; class IExtension; +class ICore; /** * @brief Copies preprocess info @@ -287,13 +285,13 @@ public: * @brief Sets pointer to ICore interface * @param core Pointer to Core interface */ - virtual void SetCore(std::weak_ptr core); + virtual void SetCore(std::weak_ptr core); /** * @brief Gets reference to ICore interface * @return Reference to ICore interface */ - virtual std::shared_ptr GetCore() const noexcept; + virtual std::shared_ptr GetCore() const noexcept; /** * @brief Provides an information about used API @@ -374,7 +372,7 @@ protected: std::string _pluginName; //!< A device name that plugins enables std::map _config; //!< A map config keys -> values - std::weak_ptr _core; //!< A pointer to ICore interface + std::weak_ptr _core; //!< A pointer to ICore interface std::shared_ptr _executorManager; //!< A tasks execution manager bool _isNewAPI; //!< A flag which shows used API }; @@ -382,7 +380,7 @@ protected: /** * @private */ -using CreatePluginEngineFunc = void(std::shared_ptr&); +using CreatePluginEngineFunc = void(std::shared_ptr<::ov::IPlugin>&); /** * @private @@ -403,6 +401,9 @@ using CreateExtensionFunc = void(std::shared_ptr&); */ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PLUGIN); +INFERENCE_ENGINE_API_CPP(std::shared_ptr<::ov::IPlugin>) +convert_plugin(const std::shared_ptr& from); + } // namespace InferenceEngine /** @@ -410,20 +411,22 @@ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PL * @brief Defines the exported `IE_CREATE_PLUGIN` function which is used to create a plugin instance * @ingroup ie_dev_api_plugin_api */ -#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ - INFERENCE_PLUGIN_API(void) \ - IE_CREATE_PLUGIN(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false); \ - void IE_CREATE_PLUGIN(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false) { \ - try { \ - plugin = ::std::make_shared(__VA_ARGS__); \ - } catch (const InferenceEngine::Exception&) { \ - throw; \ - } catch (const std::exception& ex) { \ - IE_THROW() << ex.what(); \ - } catch (...) { \ - IE_THROW(Unexpected); \ - } \ - plugin->SetVersion(version); \ +#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ + INFERENCE_PLUGIN_API(void) \ + IE_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false); \ + void IE_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false) { \ + std::shared_ptr<::InferenceEngine::IInferencePlugin> ie_plugin; \ + try { \ + ie_plugin = ::std::make_shared(__VA_ARGS__); \ + } catch (const InferenceEngine::Exception&) { \ + throw; \ + } catch (const std::exception& ex) { \ + IE_THROW() << ex.what(); \ + } catch (...) { \ + IE_THROW(Unexpected); \ + } \ + ie_plugin->SetVersion(version); \ + plugin = convert_plugin(ie_plugin); \ } /** @@ -431,7 +434,7 @@ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PL */ #define IE_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(_IE_CREATE_PLUGIN_FUNC) \ INFERENCE_PLUGIN_API(void) \ - _IE_CREATE_PLUGIN_FUNC(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false) + _IE_CREATE_PLUGIN_FUNC(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false) /** * @private diff --git a/src/inference/dev_api/ie_icore.hpp b/src/inference/dev_api/ie_icore.hpp index b33cff340ad..32f08028d3f 100644 --- a/src/inference/dev_api/ie_icore.hpp +++ b/src/inference/dev_api/ie_icore.hpp @@ -17,16 +17,12 @@ #include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp" #include "ie_parameter.hpp" #include "ie_remote_context.hpp" +#include "openvino/runtime/icore.hpp" #include "openvino/runtime/properties.hpp" -namespace ov { +namespace InferenceEngine { -/** - * @interface ICore - * @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class. - * @ingroup ie_dev_api_plugin_api - */ -class ICore { +class ICore : public ov::ICore { public: /** * @brief Reads IR xml and bin (with the same name) files @@ -35,9 +31,9 @@ public: * @param frontendMode read network without post-processing or other transformations * @return CNNNetwork */ - virtual ie::CNNNetwork ReadNetwork(const std::string& model, - const ie::Blob::CPtr& weights, - bool frontendMode = false) const = 0; + virtual CNNNetwork ReadNetwork(const std::string& model, + const Blob::CPtr& weights, + bool frontendMode = false) const = 0; /** * @brief Reads IR xml and bin files @@ -46,7 +42,7 @@ public: * if bin file with the same name was not found, will load IR without weights. * @return CNNNetwork */ - virtual ie::CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const = 0; + virtual CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const = 0; /** * @brief Creates an executable network from a network object. @@ -60,9 +56,9 @@ public: * operation * @return An executable network reference */ - virtual ie::SoExecutableNetworkInternal LoadNetwork(const ie::CNNNetwork& network, - const std::string& deviceName, - const std::map& config = {}) = 0; + virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork& network, + const std::string& deviceName, + const std::map& config = {}) = 0; /** * @brief Creates an executable network from a network object. @@ -76,28 +72,9 @@ public: * operation * @return An executable network reference */ - virtual ie::SoExecutableNetworkInternal LoadNetwork(const ie::CNNNetwork& network, - const ie::RemoteContext::Ptr& remoteCtx, - const std::map& config = {}) = 0; - - /** - * @brief Creates an executable network from a model file. - * - * Users can create as many networks as they need and use - * them simultaneously (up to the limitation of the hardware resources) - * - * @param modelPath Path to model - * @param deviceName Name of device to load network to - * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load - * operation - * @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered - * @return An executable network reference - */ - virtual ie::SoExecutableNetworkInternal LoadNetwork( - const std::string& modelPath, - const std::string& deviceName, - const std::map& config, - const std::function& val = nullptr) = 0; + virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork& network, + const RemoteContext::Ptr& remoteCtx, + const std::map& config = {}) = 0; /** * @brief Creates an executable network from a model memory. @@ -113,12 +90,30 @@ public: * @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered * @return An executable network reference */ - virtual ie::SoExecutableNetworkInternal LoadNetwork( + virtual SoExecutableNetworkInternal LoadNetwork( const std::string& modelStr, - const ie::Blob::CPtr& weights, + const InferenceEngine::Blob::CPtr& weights, const std::string& deviceName, const std::map& config, - const std::function& val = nullptr) = 0; + const std::function& val = nullptr) = 0; + + /** + * @brief Creates an executable network from a model file. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param modelPath Path to model + * @param deviceName Name of device to load network to + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered + * @return An executable network reference + */ + virtual SoExecutableNetworkInternal LoadNetwork(const std::string& modelPath, + const std::string& deviceName, + const std::map& config, + const std::function& val = nullptr) = 0; /** * @brief Creates an executable network from a previously exported network @@ -128,9 +123,9 @@ public: * operation* * @return An executable network reference */ - virtual ie::SoExecutableNetworkInternal ImportNetwork(std::istream& networkModel, - const std::string& deviceName = {}, - const std::map& config = {}) = 0; + virtual SoExecutableNetworkInternal ImportNetwork(std::istream& networkModel, + const std::string& deviceName = {}, + const std::map& config = {}) = 0; /** * @brief Query device if it supports specified network with specified configuration @@ -140,9 +135,9 @@ public: * @param config Optional map of pairs: (config parameter name, config parameter value) * @return An object containing a map of pairs a layer name -> a device name supporting this layer. */ - virtual ie::QueryNetworkResult QueryNetwork(const ie::CNNNetwork& network, - const std::string& deviceName, - const std::map& config) const = 0; + virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, + const std::string& deviceName, + const std::map& config) const = 0; /** * @brief Gets general runtime metric for dedicated hardware. @@ -154,7 +149,9 @@ public: * @param name - metric name to request. * @return Metric value corresponding to metric key. */ - virtual Any GetMetric(const std::string& deviceName, const std::string& name, const AnyMap& options = {}) const = 0; + virtual ov::Any GetMetric(const std::string& deviceName, + const std::string& name, + const ov::AnyMap& options = {}) const = 0; /** * @brief Gets configuration dedicated to device behaviour. @@ -165,7 +162,7 @@ public: * @param name - config key. * @return Value of config corresponding to config key. */ - virtual Any GetConfig(const std::string& deviceName, const std::string& name) const = 0; + virtual ov::Any GetConfig(const std::string& deviceName, const std::string& name) const = 0; /** * @brief Returns devices available for neural networks inference @@ -191,7 +188,7 @@ public: * @param params Map of device-specific shared context parameters. * @return A shared pointer to a created remote context. */ - virtual InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, const AnyMap&) = 0; + virtual InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, const ov::AnyMap&) = 0; /** * @brief Get only configs that are suppored by device @@ -209,81 +206,9 @@ public: * @param deviceName - A name of a device to get create shared context from. * @return A shared pointer to a default remote context. */ - virtual ie::RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0; - - /** - * @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp. - * - * @param device_name Name of a device. - * - * @param properties Map of pairs: (property name, property value). - */ - virtual void set_property(const std::string& device_name, const AnyMap& properties) = 0; - - /** - * @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp. - * - * @tparam Properties Should be the pack of `std::pair` types. - * @param device_name Name of a device. - * @param properties Optional pack of pairs: (property name, property value). - */ - template - util::EnableIfAllStringAny set_property(const std::string& device_name, - Properties&&... properties) { - set_property(device_name, AnyMap{std::forward(properties)...}); - } - - /** - * @brief Gets properties related to device behaviour. - * - * - * @param device_name Name of a device to get a property value. - * @param name Property name. - * @param arguments Additional arguments to get a property. - * @return Value of a property corresponding to the property name. - */ - virtual Any get_property(const std::string& device_name, - const std::string& name, - const AnyMap& arguments) const = 0; - - /** - * @brief Gets properties related to device behaviour. - * - * @tparam T Type of a returned value. - * @tparam M Property mutability. - * @param deviceName Name of a device to get a property value. - * @param property Property object. - * @return Property value. - */ - template - T get_property(const std::string& device_name, const Property& property) const { - return get_property(device_name, property.name(), {}).template as(); - } - - /** - * @brief Gets properties related to device behaviour. - * - * @tparam T Type of a returned value. - * @tparam M Property mutability. - * @param deviceName Name of a device to get a property value. - * @param property Property object. - * @param arguments Additional arguments to get a property. - * @return Property value. - */ - template - T get_property(const std::string& device_name, const Property& property, const AnyMap& arguments) const { - return get_property(device_name, property.name(), arguments).template as(); - } - - /** - * @brief Default virtual destructor - */ - virtual ~ICore() = default; + virtual RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0; }; -} // namespace ov -namespace InferenceEngine { -using ICore = ov::ICore; /** * @private */ diff --git a/src/inference/dev_api/openvino/runtime/icore.hpp b/src/inference/dev_api/openvino/runtime/icore.hpp new file mode 100644 index 00000000000..ef606966d48 --- /dev/null +++ b/src/inference/dev_api/openvino/runtime/icore.hpp @@ -0,0 +1,212 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +/** + * @brief A file provides API for Core object + * @file openvino/runtime/icore.hpp + */ + +#pragma once + +#include +#include + +#include "openvino/runtime/tensor.hpp" +#include "so_ptr.hpp" + +namespace ov { + +/** + * @interface ICore + * @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class. + * @ingroup ie_dev_api_plugin_api + */ +class ICore { +public: + /** + * @brief Reads IR xml and bin (with the same name) files + * @param model string with IR + * @param weights shared pointer to constant blob with weights + * @param frontend_mode read network without post-processing or other transformations + * @return shared pointer to ov::Model + */ + virtual std::shared_ptr read_model(const std::string& model, + const ov::Tensor& weights, + bool frontend_mode = false) const = 0; + + /** + * @brief Reads IR xml and bin files + * @param model_path path to IR file + * @param bin_path path to bin file, if path is empty, will try to read bin file with the same name as xml and + * if bin file with the same name was not found, will load IR without weights. + * @return shared pointer to ov::Model + */ + virtual std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path) const = 0; + + /** + * @brief Creates an executable network from a network object. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param model OpenVINO Model + * @param device_name Name of device to load network to + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @return An executable network reference + */ + virtual ov::SoPtr compile_model( + const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config = {}) const = 0; + + /** + * @brief Creates an executable network from a network object. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param model OpenVINO Model + * @param context "Remote" (non-CPU) accelerator device-specific execution context to use + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @return An executable network reference + */ + virtual ov::SoPtr compile_model( + const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& config = {}) const = 0; + + /** + * @brief Creates an executable network from a model file. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param model_path Path to model + * @param device_name Name of device to load network to + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @return An executable network reference + */ + virtual ov::SoPtr compile_model(const std::string& model_path, + const std::string& device_name, + const ov::AnyMap& config) const = 0; + + /** + * @brief Creates an executable network from a model memory. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param model_str String data of model + * @param weights Model's weights + * @param device_name Name of device to load network to + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @return An executable network reference + */ + virtual ov::SoPtr compile_model(const std::string& model_str, + const ov::Tensor& weights, + const std::string& device_name, + const ov::AnyMap& config) const = 0; + + /** + * @brief Creates an executable network from a previously exported network + * @param model model stream + * @param device_name Name of device load executable network on + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation* + * @return An executable network reference + */ + virtual ov::SoPtr + import_model(std::istream& model, const std::string& device_name, const ov::AnyMap& config = {}) const = 0; + + /** + * @brief Query device if it supports specified network with specified configuration + * + * @param model OpenVINO Model + * @param device_name A name of a device to query + * @param config Optional map of pairs: (config parameter name, config parameter value) + * @return An object containing a map of pairs a layer name -> a device name supporting this layer. + */ + virtual ov::SupportedOpsMap query_model(const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config) const = 0; + + /** + * @brief Returns devices available for neural networks inference + * + * @return A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, MYRIAD } + * If there more than one device of specific type, they are enumerated with .# suffix. + */ + virtual std::vector get_available_devices() const = 0; + + /** + * @brief Create a new shared context object on specified accelerator device + * using specified plugin-specific low level device API parameters (device handle, pointer, etc.) + * @param device_name Name of a device to create new shared context on. + * @param params Map of device-specific shared context parameters. + * @return A shared pointer to a created remote context. + */ + virtual ov::RemoteContext create_context(const std::string& device_name, const AnyMap& args) const = 0; + + virtual bool is_new_api() const = 0; + + /** + * @brief Get a pointer to default shared context object for the specified device. + * @param device_name - A name of a device to get create shared context from. + * @return A shared pointer to a default remote context. + */ + virtual ov::RemoteContext get_default_context(const std::string& device_name) const = 0; + + /** + * @brief Gets properties related to device behaviour. + * + * + * @param device_name Name of a device to get a property value. + * @param name Property name. + * @param arguments Additional arguments to get a property. + * @return Value of a property corresponding to the property name. + */ + virtual Any get_property(const std::string& device_name, + const std::string& name, + const AnyMap& arguments) const = 0; + + /** + * @brief Gets properties related to device behaviour. + * + * @tparam T Type of a returned value. + * @tparam M Property mutability. + * @param deviceName Name of a device to get a property value. + * @param property Property object. + * @return Property value. + */ + template + T get_property(const std::string& device_name, const Property& property) const { + return get_property(device_name, property.name(), {}).template as(); + } + + /** + * @brief Gets properties related to device behaviour. + * + * @tparam T Type of a returned value. + * @tparam M Property mutability. + * @param deviceName Name of a device to get a property value. + * @param property Property object. + * @param arguments Additional arguments to get a property. + * @return Property value. + */ + template + T get_property(const std::string& device_name, const Property& property, const AnyMap& arguments) const { + return get_property(device_name, property.name(), arguments).template as(); + } + + /** + * @brief Default virtual destructor + */ + virtual ~ICore(); +}; + +} // namespace ov diff --git a/src/inference/dev_api/openvino/runtime/iplugin.hpp b/src/inference/dev_api/openvino/runtime/iplugin.hpp new file mode 100644 index 00000000000..92d942090a7 --- /dev/null +++ b/src/inference/dev_api/openvino/runtime/iplugin.hpp @@ -0,0 +1,237 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +/** + * @brief OpenVINO Runtime plugin API wrapper + * @file openvino/runtime/iplugin.hpp + */ + +#pragma once + +#include + +#include "openvino/core/any.hpp" +#include "openvino/core/deprecated.hpp" +#include "openvino/core/model.hpp" +#include "openvino/core/version.hpp" +#include "openvino/runtime/common.hpp" +#include "openvino/runtime/icore.hpp" +#include "openvino/runtime/remote_context.hpp" +#include "threading/ie_executor_manager.hpp" + +namespace InferenceEngine { + +class IExecutableNetworkInternal; +class IPluginWrapper; +class IExtension; + +} // namespace InferenceEngine + +namespace ov { + +/** + * @brief OpenVINO Plugin Interface 2.0 + */ +class OPENVINO_RUNTIME_API IPlugin : public std::enable_shared_from_this { +public: + /** + * @brief Sets a plugin version + * + * @param version A version to set + */ + void set_version(const Version& version); + + /** + * @brief Returns a plugin version + * + * @return A constant ov::Version object + */ + const Version& get_version() const; + + /** + * @brief Sets a name for the plugin + * + * @param name Plugin name + */ + void set_device_name(const std::string& name); + + /** + * @brief Provides a plugin name + * + * @return Plugin name + */ + const std::string& get_device_name() const; + + /** + * @brief Compiles model from ov::Model object + * @param model A model object acquired from ov::Core::read_model or source construction + * @param properties A ov::AnyMap of properties relevant only for this load operation + * @return Created Compiled Model object + */ + virtual std::shared_ptr compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties) const = 0; + + /** + * @brief Compiles model from ov::Model object + * @param model_path A path to model (path can be converted from unicode representation) + * @param properties A ov::AnyMap of properties relevant only for this load operation + * @return Created Compiled Model object + */ + virtual std::shared_ptr compile_model( + const std::string& model_path, + const ov::AnyMap& properties) const = 0; + + /** + * @brief Compiles model from ov::Model object, on specified remote context + * @param model A model object acquired from ov::Core::read_model or source construction + * @param properties A ov::AnyMap of properties relevant only for this load operation + * @param context A pointer to plugin context derived from RemoteContext class used to + * execute the model + * @return Created Compiled Model object + */ + virtual std::shared_ptr compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties, + const ov::RemoteContext& context) const = 0; + + /** + * @brief Sets properties for plugin, acceptable keys can be found in openvino/runtime/properties.hpp + * @param properties ov::AnyMap of properties + */ + virtual void set_property(const ov::AnyMap& properties) = 0; + + /** + * @brief Gets properties related to plugin behaviour. + * + * @param name Property name. + * @param arguments Additional arguments to get a property. + * + * @return Value of a property corresponding to the property name. + */ + virtual ov::Any get_property(const std::string& name, const ov::AnyMap& arguments) const = 0; + + /** + * @brief Creates a remote context instance based on a map of properties + * @param remote_properties Map of device-specific shared context remote properties. + * + * @return A remote context object + */ + virtual RemoteContext create_context(const ov::AnyMap& remote_properties) const = 0; + + /** + * @brief Provides a default remote context instance if supported by a plugin + * @param remote_properties Map of device-specific shared context remote properties. + * + * @return The default context. + */ + virtual RemoteContext get_default_context(const ov::AnyMap& remote_properties) const = 0; + + /** + * @brief Creates an compiled model from an previously exported model using plugin implementation + * and removes OpenVINO Runtime magic and plugin name + * @param model Reference to model output stream + * @param properties A ov::AnyMap of properties + * @return An Compiled model + */ + virtual std::shared_ptr import_model( + std::istream& model, + const ov::AnyMap& properties) const = 0; + + /** + * @brief Creates an compiled model from an previously exported model using plugin implementation + * and removes OpenVINO Runtime magic and plugin name + * @param model Reference to model output stream + * @param context A pointer to plugin context derived from RemoteContext class used to + * execute the network + * @param properties A ov::AnyMap of properties + * @return An Compiled model + */ + virtual std::shared_ptr + import_model(std::istream& model, const ov::RemoteContext& context, const ov::AnyMap& properties) const = 0; + + /** + * @brief Queries a plugin about supported layers in model + * @param model Model object to query. + * @param properties Optional map of pairs: (property name, property value). + * @return An object containing a map of pairs an operation name -> a device name supporting this operation. + */ + virtual ov::SupportedOpsMap query_model(const std::shared_ptr& model, + const ov::AnyMap& properties) const = 0; + + /** + * @deprecated This method allows to load legacy Inference Engine Extensions and will be removed in 2024.0 release + * @brief Registers legacy extension within plugin + * @param extension - pointer to already loaded legacy extension + */ + OPENVINO_DEPRECATED( + "This method allows to load legacy Inference Engine Extensions and will be removed in 2024.0 release") + virtual void add_extension(const std::shared_ptr& extension); + + /** + * @brief Sets pointer to ICore interface + * @param core Pointer to Core interface + */ + void set_core(const std::weak_ptr& core); + + /** + * @brief Gets reference to ICore interface + * @return Reference to ICore interface + */ + std::shared_ptr get_core() const; + + /** + * @brief Provides an information about used API + * @return true if new API is used + */ + bool is_new_api() const; + + /** + * @brief Gets reference to tasks execution manager + * @return Reference to ExecutorManager interface + */ + const std::shared_ptr& get_executor_manager() const; + + ~IPlugin() = default; + +protected: + IPlugin(); + +private: + friend ::InferenceEngine::IPluginWrapper; + + std::string m_plugin_name; //!< A device name that plugins enables + std::weak_ptr m_core; //!< A pointer to ICore interface + std::shared_ptr m_executor_manager; //!< A tasks execution manager + ov::Version m_version; //!< Member contains plugin version + bool m_is_new_api; //!< A flag which shows used API +}; + +} // namespace ov +/** + * @def OV_CREATE_PLUGIN + * @brief Defines a name of a function creating plugin instance + * @ingroup ie_dev_api_plugin_api + */ +#ifndef OV_CREATE_PLUGIN +# define OV_CREATE_PLUGIN CreatePluginEngine +#endif + +/** + * @def OV_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version) + * @brief Defines the exported `OV_CREATE_PLUGIN` function which is used to create a plugin instance + * @ingroup ov_dev_api_plugin_api + */ +#define OV_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ + OPENVINO_PLUGIN_API void OV_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false); \ + void OV_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false) { \ + try { \ + plugin = ::std::make_shared(__VA_ARGS__); \ + plugin->set_version(version); \ + } catch (const InferenceEngine::Exception& ex) { \ + throw ov::Exception(ex.what()); \ + } catch (const std::exception& ex) { \ + throw ov::Exception(ex.what()); \ + } \ + } diff --git a/src/inference/include/openvino/runtime/remote_context.hpp b/src/inference/include/openvino/runtime/remote_context.hpp index 02c5eb69480..7f603e6ba16 100644 --- a/src/inference/include/openvino/runtime/remote_context.hpp +++ b/src/inference/include/openvino/runtime/remote_context.hpp @@ -20,11 +20,17 @@ namespace InferenceEngine { class RemoteContext; +class IPluginWrapper; +class Core; } // namespace InferenceEngine namespace ov { class Core; +class CoreImpl; +class Plugin; +class IPlugin; +class IInferencePluginWrapper; class CompiledModel; /** @@ -47,7 +53,13 @@ protected: */ RemoteContext(const std::shared_ptr& impl, const std::vector>& so); + friend class InferenceEngine::Core; + friend class InferenceEngine::IPluginWrapper; friend class ov::Core; + friend class ov::CoreImpl; + friend class ov::Plugin; + friend class ov::IPlugin; + friend class ov::IInferencePluginWrapper; friend class ov::CompiledModel; public: diff --git a/src/inference/src/check_network_batchable.cpp b/src/inference/src/check_network_batchable.cpp index 8ce148dd152..1ae34b29591 100644 --- a/src/inference/src/check_network_batchable.cpp +++ b/src/inference/src/check_network_batchable.cpp @@ -9,14 +9,13 @@ #include "transformations/common_optimizations/dimension_tracking.hpp" #include "transformations/init_node_info.hpp" -namespace InferenceEngine { +namespace ov { namespace details { -NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network, +NetworkBatchAbility is_model_batchable(const std::shared_ptr& model, const std::string& deviceNameWithoutBatch, bool strictly_track_dims) { - CNNNetwork clonedNetwork(cloneNetwork(orig_network)); - auto function = clonedNetwork.getFunction(); + auto function = model->clone(); // find the batch dim ov::pass::Manager m; m.register_pass(); @@ -49,12 +48,12 @@ NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network, if (!any_batched_inputs) return NetworkBatchAbility::NO; - for (auto&& node : orig_network.getFunction()->get_ops()) + for (auto&& node : model->get_ops()) node->get_rt_info()["affinity"] = "BATCH"; // default affinity (ignored if HETERO is not triggered) // have to execute the DetectionOutput separately (without batching) // as this layer does mix-in the values from the different inputs (batch id) bool bDetectionOutput = false; - for (auto& result_node : orig_network.getFunction()->get_results()) { + for (auto& result_node : model->get_results()) { auto do_node = result_node->input_value(0).get_node_shared_ptr(); std::shared_ptr convert_node; if (ov::is_type(do_node)) { // cases with do->convert->result @@ -76,4 +75,4 @@ NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network, } } // namespace details -} // namespace InferenceEngine \ No newline at end of file +} // namespace ov diff --git a/src/inference/src/check_network_batchable.hpp b/src/inference/src/check_network_batchable.hpp index 58fd040a754..ce7181a5079 100644 --- a/src/inference/src/check_network_batchable.hpp +++ b/src/inference/src/check_network_batchable.hpp @@ -7,17 +7,17 @@ #include "cnn_network_ngraph_impl.hpp" -namespace InferenceEngine { +namespace ov { namespace details { /** * @brief Checks if the input network is batch-able (e.g. no dynamic inputs, inputs has the batch dimension, etc) * @param function A ngraph function to check for automatic-batching applicability * @return An enum value indicating whether the network can be safely batched (with HETERO or as is) or not */ -enum NetworkBatchAbility : uint32_t { NO = 0, AS_IS, WITH_HETERO }; -NetworkBatchAbility isNetworkBatchable(const CNNNetwork& network, +enum class NetworkBatchAbility : uint32_t { NO = 0, AS_IS, WITH_HETERO }; +NetworkBatchAbility is_model_batchable(const std::shared_ptr& model, const std::string& deviceNoBatch, bool strictly_track_dims); } // namespace details -} // namespace InferenceEngine +} // namespace ov diff --git a/src/inference/src/compilation_context.cpp b/src/inference/src/compilation_context.cpp index 86ac8ddca29..5d245c592c6 100644 --- a/src/inference/src/compilation_context.cpp +++ b/src/inference/src/compilation_context.cpp @@ -65,7 +65,7 @@ std::string NetworkCompilationContext::calculateFileInfo(const std::string& file return std::to_string(seed); } -std::string NetworkCompilationContext::computeHash(const CNNNetwork& network, +std::string NetworkCompilationContext::computeHash(CNNNetwork& network, const std::map& compileOptions) { OV_ITT_SCOPE(FIRST_INFERENCE, itt::domains::IE_LT, "NetworkCompilationContext::computeHash - CNN"); @@ -73,11 +73,10 @@ std::string NetworkCompilationContext::computeHash(const CNNNetwork& network, uint64_t seed = 0; // 1. Calculate hash on function - CNNNetwork net(network); ov::pass::Manager m; m.register_pass(); m.register_pass(seed); - m.run_passes(net.getFunction()); + m.run_passes(network.getFunction()); // 2. Compute hash on serialized data and options for (const auto& kvp : compileOptions) { diff --git a/src/inference/src/compilation_context.hpp b/src/inference/src/compilation_context.hpp index 96e9cf5cb75..d56a1fe9dd6 100644 --- a/src/inference/src/compilation_context.hpp +++ b/src/inference/src/compilation_context.hpp @@ -20,7 +20,7 @@ class CNNNetwork; struct NetworkCompilationContext final { static std::string calculateFileInfo(const std::string& filePath); - static std::string computeHash(const CNNNetwork& network, const std::map& compileOptions); + static std::string computeHash(CNNNetwork& network, const std::map& compileOptions); static std::string computeHash(const std::string& modelName, const std::map& compileOptions); diff --git a/src/inference/src/core.cpp b/src/inference/src/core.cpp index 387f1346d61..4463addfc7f 100644 --- a/src/inference/src/core.cpp +++ b/src/inference/src/core.cpp @@ -7,6 +7,7 @@ #include "any_copy.hpp" #include "cnn_network_ngraph_impl.hpp" #include "cpp/ie_plugin.hpp" +#include "dev/converter_utils.hpp" #include "dev/core_impl.hpp" #include "ie_itt.hpp" #include "so_extension.hpp" @@ -49,7 +50,7 @@ Core::Core(const std::string& xmlConfigFile) { _impl = std::make_shared(); #ifdef OPENVINO_STATIC_LIBRARY - _impl->RegisterPluginsInRegistry(::getStaticPluginsRegistry()); + _impl->register_plugins_in_registry(::getStaticPluginsRegistry()); #else register_plugins(findPluginXML(xmlConfigFile)); #endif @@ -67,34 +68,18 @@ std::map Core::get_versions(const std::string& deviceName) #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT std::shared_ptr Core::read_model(const std::wstring& modelPath, const std::wstring& binPath) const { OV_CORE_CALL_STATEMENT( - return _impl->ReadNetwork(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath)) - .getFunction();); + return _impl->read_model(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath));); } #endif std::shared_ptr Core::read_model(const std::string& modelPath, const std::string& binPath) const { - OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(modelPath, binPath).getFunction();); + OV_CORE_CALL_STATEMENT(return _impl->read_model(modelPath, binPath);); } std::shared_ptr Core::read_model(const std::string& model, const ov::Tensor& weights) const { - InferenceEngine::Blob::Ptr blob; - if (weights) { - blob = weights._impl; - } - OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(model, blob).getFunction();); + OV_CORE_CALL_STATEMENT(return _impl->read_model(model, weights);); } -namespace { - -ie::CNNNetwork toCNN(const std::shared_ptr& model) { - return ie::CNNNetwork( - std::make_shared(std::const_pointer_cast(model), - std::vector{}, - true)); -} - -} // namespace - CompiledModel Core::compile_model(const std::shared_ptr& model, const AnyMap& config) { return compile_model(model, ov::DEFAULT_DEVICE_NAME, config); } @@ -103,7 +88,7 @@ CompiledModel Core::compile_model(const std::shared_ptr& model, const std::string& deviceName, const AnyMap& config) { OV_CORE_CALL_STATEMENT({ - auto exec = _impl->LoadNetwork(toCNN(model), deviceName, any_copy(flatten_sub_properties(deviceName, config))); + auto exec = _impl->compile_model(model, deviceName, flatten_sub_properties(deviceName, config)); return {exec._ptr, exec._so}; }); } @@ -114,7 +99,7 @@ CompiledModel Core::compile_model(const std::string& modelPath, const AnyMap& co CompiledModel Core::compile_model(const std::string& modelPath, const std::string& deviceName, const AnyMap& config) { OV_CORE_CALL_STATEMENT({ - auto exec = _impl->LoadNetwork(modelPath, deviceName, any_copy(flatten_sub_properties(deviceName, config))); + auto exec = _impl->compile_model(modelPath, deviceName, flatten_sub_properties(deviceName, config)); return {exec._ptr, exec._so}; }); } @@ -123,12 +108,8 @@ CompiledModel Core::compile_model(const std::string& model, const ov::Tensor& weights, const std::string& deviceName, const AnyMap& config) { - InferenceEngine::Blob::Ptr blob; - if (weights) { - blob = weights._impl; - } OV_CORE_CALL_STATEMENT({ - auto exec = _impl->LoadNetwork(model, blob, deviceName, any_copy(flatten_sub_properties(deviceName, config))); + auto exec = _impl->compile_model(model, weights, deviceName, flatten_sub_properties(deviceName, config)); return {exec._ptr, exec._so}; }); } @@ -137,9 +118,7 @@ CompiledModel Core::compile_model(const std::shared_ptr& model, const RemoteContext& context, const AnyMap& config) { OV_CORE_CALL_STATEMENT({ - auto exec = _impl->LoadNetwork(toCNN(model), - context._impl, - any_copy(flatten_sub_properties(context.get_device_name(), config))); + auto exec = _impl->compile_model(model, context, flatten_sub_properties(context.get_device_name(), config)); return {exec._ptr, exec._so}; }); } @@ -187,13 +166,13 @@ void Core::add_extension(const std::shared_ptr& extension) { add_extension(std::vector>{extension}); } void Core::add_extension(const std::vector>& extensions) { - OV_CORE_CALL_STATEMENT({ _impl->AddOVExtensions(extensions); }); + OV_CORE_CALL_STATEMENT({ _impl->add_extension(extensions); }); } CompiledModel Core::import_model(std::istream& modelStream, const std::string& deviceName, const AnyMap& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); OV_CORE_CALL_STATEMENT({ - auto exec = _impl->ImportNetwork(modelStream, deviceName, any_copy(flatten_sub_properties(deviceName, config))); + auto exec = _impl->import_model(modelStream, deviceName, flatten_sub_properties(deviceName, config)); return {exec._ptr, exec._so}; }); } @@ -218,7 +197,7 @@ CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& modelStream.seekg(currentPos, modelStream.beg); OV_CORE_CALL_STATEMENT({ - auto exec = _impl->GetCPPPluginByName(deviceName).import_model(modelStream, {}); + auto exec = _impl->get_plugin(deviceName).import_model(modelStream, {}); return {exec._ptr, exec._so}; }); } @@ -226,11 +205,7 @@ CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& SupportedOpsMap Core::query_model(const std::shared_ptr& model, const std::string& deviceName, const AnyMap& config) const { - OV_CORE_CALL_STATEMENT({ - auto qnResult = - _impl->QueryNetwork(toCNN(model), deviceName, any_copy(flatten_sub_properties(deviceName, config))); - return qnResult.supportedLayersMap; - }); + OV_CORE_CALL_STATEMENT(return _impl->query_model(model, deviceName, flatten_sub_properties(deviceName, config));); } void Core::set_property(const AnyMap& properties) { @@ -254,7 +229,7 @@ std::vector Core::get_available_devices() const { } void Core::register_plugin(const std::string& pluginName, const std::string& deviceName) { - OV_CORE_CALL_STATEMENT(_impl->RegisterPluginByName(pluginName, deviceName);); + OV_CORE_CALL_STATEMENT(_impl->register_plugin(pluginName, deviceName);); } void Core::unload_plugin(const std::string& deviceName) { @@ -262,12 +237,12 @@ void Core::unload_plugin(const std::string& deviceName) { ie::DeviceIDParser parser(deviceName); std::string devName = parser.getDeviceName(); - _impl->UnloadPluginByName(devName); + _impl->unload_plugin(devName); }); } void Core::register_plugins(const std::string& xmlConfigFile) { - OV_CORE_CALL_STATEMENT(_impl->RegisterPluginsInRegistry(xmlConfigFile);); + OV_CORE_CALL_STATEMENT(_impl->register_plugins_in_registry(xmlConfigFile);); } RemoteContext Core::create_context(const std::string& deviceName, const AnyMap& params) { @@ -278,8 +253,8 @@ RemoteContext Core::create_context(const std::string& deviceName, const AnyMap& OV_CORE_CALL_STATEMENT({ auto parsed = parseDeviceNameIntoConfig(deviceName, flatten_sub_properties(deviceName, params)); - auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).create_context(parsed._config); - return {remoteContext._ptr, {remoteContext._so}}; + auto remoteContext = _impl->get_plugin(parsed._deviceName).create_context(parsed._config); + return {remoteContext._impl, {remoteContext._so}}; }); } @@ -291,8 +266,8 @@ RemoteContext Core::get_default_context(const std::string& deviceName) { OV_CORE_CALL_STATEMENT({ auto parsed = parseDeviceNameIntoConfig(deviceName, AnyMap{}); - auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config); - return {remoteContext._ptr, {remoteContext._so}}; + auto remoteContext = _impl->get_plugin(parsed._deviceName).get_default_context(parsed._config); + return {remoteContext._impl, {remoteContext._so}}; }); } diff --git a/src/inference/src/cpp/ie_plugin.hpp b/src/inference/src/cpp/ie_plugin.hpp index df737e99ed0..ca6991102f1 100644 --- a/src/inference/src/cpp/ie_plugin.hpp +++ b/src/inference/src/cpp/ie_plugin.hpp @@ -13,25 +13,28 @@ #include #include -#include "file_utils.h" -#include "cpp/ie_cnn_network.h" -#include "cpp/exception2status.hpp" -#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" -#include "so_ptr.hpp" -#include "openvino/runtime/common.hpp" #include "any_copy.hpp" +#include "cpp/exception2status.hpp" +#include "cpp/ie_cnn_network.h" +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "file_utils.h" #include "ie_plugin_config.hpp" +#include "openvino/runtime/common.hpp" +#include "so_ptr.hpp" #if defined __GNUC__ -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wreturn-type" +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wreturn-type" #endif -#define PLUGIN_CALL_STATEMENT(...) \ - if (!_ptr) IE_THROW() << "Wrapper used in the PLUGIN_CALL_STATEMENT was not initialized."; \ - try { \ - __VA_ARGS__; \ - } catch(...) {::InferenceEngine::details::Rethrow();} +#define PLUGIN_CALL_STATEMENT(...) \ + if (!_ptr) \ + IE_THROW() << "Wrapper used in the PLUGIN_CALL_STATEMENT was not initialized."; \ + try { \ + __VA_ARGS__; \ + } catch (...) { \ + ::InferenceEngine::details::Rethrow(); \ + } namespace InferenceEngine { /** @@ -47,11 +50,11 @@ struct InferencePlugin { _ptr = {}; } - void SetName(const std::string & deviceName) { + void SetName(const std::string& deviceName) { PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName)); } - void SetCore(std::weak_ptr core) { + void SetCore(std::weak_ptr core) { PLUGIN_CALL_STATEMENT(_ptr->SetCore(core)); } @@ -67,17 +70,19 @@ struct InferencePlugin { PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config)); } - ov::SoPtr LoadNetwork(const CNNNetwork& network, const std::map& config) { + ov::SoPtr LoadNetwork(const CNNNetwork& network, + const std::map& config) { PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config), _so}); } ov::SoPtr LoadNetwork(const CNNNetwork& network, - const std::shared_ptr& context, - const std::map& config) { + const std::shared_ptr& context, + const std::map& config) { PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config, context), _so}); } - ov::SoPtr LoadNetwork(const std::string& modelPath, const std::map& config) { + ov::SoPtr LoadNetwork(const std::string& modelPath, + const std::map& config) { ov::SoPtr res; PLUGIN_CALL_STATEMENT(res = _ptr->LoadNetwork(modelPath, config)); if (!res._so) @@ -85,27 +90,27 @@ struct InferencePlugin { return res; } - QueryNetworkResult QueryNetwork(const CNNNetwork& network, - const std::map& config) const { + QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map& config) const { QueryNetworkResult res; PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config)); - if (res.rc != OK) IE_THROW() << res.resp.msg; + if (res.rc != OK) + IE_THROW() << res.resp.msg; return res; } ov::SoPtr ImportNetwork(const std::string& modelFileName, - const std::map& config) { + const std::map& config) { PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(modelFileName, config), _so}); } ov::SoPtr ImportNetwork(std::istream& networkModel, - const std::map& config) { + const std::map& config) { PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, config), _so}); } ov::SoPtr ImportNetwork(std::istream& networkModel, - const std::shared_ptr& context, - const std::map& config) { + const std::shared_ptr& context, + const std::map& config) { PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, context, config), _so}); } @@ -127,174 +132,8 @@ struct InferencePlugin { }; } // namespace InferenceEngine - #if defined __GNUC__ -# pragma GCC diagnostic pop +# pragma GCC diagnostic pop #endif -namespace ov { - -#define OV_PLUGIN_CALL_STATEMENT(...) \ - OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized."); \ - try { \ - __VA_ARGS__; \ - } catch (...) { \ - ::InferenceEngine::details::Rethrow(); \ - } - -/** - * @brief This class is a C++ API wrapper for IInferencePlugin. - * - * It can throw exceptions safely for the application, where it is properly handled. - */ -class InferencePlugin { - std::shared_ptr _ptr; - std::shared_ptr _so; - -public: - InferencePlugin() = default; - - ~InferencePlugin() { - _ptr = {}; - } - - InferencePlugin(const std::shared_ptr& ptr, const std::shared_ptr& so) : - _ptr{ptr}, - _so{so} { - OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized."); - } - - void set_name(const std::string& deviceName) { - OV_PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName)); - } - - void set_core(std::weak_ptr core) { - OV_PLUGIN_CALL_STATEMENT(_ptr->SetCore(core)); - } - - const ie::Version get_version() const { - OV_PLUGIN_CALL_STATEMENT(return _ptr->GetVersion()); - } - - void add_extension(const ie::IExtensionPtr& extension) { - OV_PLUGIN_CALL_STATEMENT(_ptr->AddExtension(extension)); - } - - void set_config(const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config)); - } - - void set_properties(const ov::AnyMap& config) { - OV_PLUGIN_CALL_STATEMENT(_ptr->SetProperties(config)); - } - - SoPtr compile_model(const ie::CNNNetwork& network, - const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config), _so}); - } - - SoPtr compile_model(const ie::CNNNetwork& network, - const std::shared_ptr& context, - const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config, context), _so}); - } - - SoPtr compile_model(const std::string& modelPath, const std::map& config) { - SoPtr res; - OV_PLUGIN_CALL_STATEMENT(res = _ptr->LoadNetwork(modelPath, config)); - if (!res._so) - res._so = _so; - return res; - } - - ie::QueryNetworkResult query_model(const ie::CNNNetwork& network, - const std::map& config) const { - ie::QueryNetworkResult res; - OV_PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config)); - OPENVINO_ASSERT(res.rc == ie::OK, res.resp.msg); - return res; - } - - SoPtr import_model(const std::string& modelFileName, - const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(modelFileName, config), _so}); - } - - SoPtr import_model(std::istream& networkModel, - const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, config), _so}); - } - - SoPtr import_model(std::istream& networkModel, - const std::shared_ptr& context, - const std::map& config) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, context, config), _so}); - } - - Any get_metric(const std::string& name, const AnyMap& options) const { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), {_so}}); - } - - SoPtr create_context(const AnyMap& params) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->CreateContext(params), _so}); - } - - SoPtr get_default_context(const AnyMap& params) { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetDefaultContext(params), _so}); - } - - Any get_config(const std::string& name, const AnyMap& options) const { - OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), {_so}}); - } - - Any get_property(const std::string& name, const AnyMap& arguments) const { - OV_PLUGIN_CALL_STATEMENT({ - if (ov::supported_properties == name) { - try { - return {_ptr->GetMetric(name, arguments), {_so}}; - } catch (ie::Exception&) { - std::vector supported_properties; - try { - auto ro_properties = _ptr->GetMetric(METRIC_KEY(SUPPORTED_METRICS), arguments) - .as>(); - for (auto&& ro_property : ro_properties) { - if (ro_property != METRIC_KEY(SUPPORTED_METRICS) && - ro_property != METRIC_KEY(SUPPORTED_CONFIG_KEYS)) { - supported_properties.emplace_back(ro_property, PropertyMutability::RO); - } - } - } catch (ie::Exception&) {} - try { - auto rw_properties = _ptr->GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), arguments) - .as>(); - for (auto&& rw_property : rw_properties) { - supported_properties.emplace_back(rw_property, PropertyMutability::RW); - } - } catch (ie::Exception&) {} - supported_properties.emplace_back(ov::supported_properties.name(), PropertyMutability::RO); - return supported_properties; - } - } - try { - return {_ptr->GetMetric(name, arguments), {_so}}; - } catch (ie::Exception&) { - return {_ptr->GetConfig(name, arguments), {_so}}; - } - }); - } - - template - T get_property(const ov::Property& property) const { - return get_property(property.name(), {}).template as(); - } - - template - T get_property(const ov::Property& property, const AnyMap& arguments) const { - return get_property(property.name(), arguments).template as(); - } -}; - -} // namespace ov - #undef PLUGIN_CALL_STATEMENT -#undef OV_PLUGIN_CALL_STATEMENT diff --git a/src/inference/src/cpp_interfaces/interface/ie_iplugin_internal.cpp b/src/inference/src/cpp_interfaces/interface/ie_iplugin_internal.cpp index bb608dbd114..a65db624dbd 100644 --- a/src/inference/src/cpp_interfaces/interface/ie_iplugin_internal.cpp +++ b/src/inference/src/cpp_interfaces/interface/ie_iplugin_internal.cpp @@ -13,6 +13,7 @@ #include #include #include +#include #include #include #include @@ -21,6 +22,7 @@ #include "blob_factory.hpp" #include "cnn_network_ngraph_impl.hpp" #include "cpp/ie_cnn_network.h" +#include "dev/converter_utils.hpp" #include "exec_graph_info.hpp" #include "ie_algorithm.hpp" #include "ie_api.h" @@ -529,4 +531,8 @@ void SetExeNetworkInfo(const std::shared_ptr& exeNet exeNetwork->setOutputs(const_results); } +std::shared_ptr<::ov::IPlugin> convert_plugin(const std::shared_ptr& from) { + return ov::legacy_convert::convert_plugin(from); +} + } // namespace InferenceEngine diff --git a/src/inference/src/dev/converter_utils.cpp b/src/inference/src/dev/converter_utils.cpp new file mode 100644 index 00000000000..cc884a8e43d --- /dev/null +++ b/src/inference/src/dev/converter_utils.cpp @@ -0,0 +1,329 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "converter_utils.hpp" + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "any_copy.hpp" +#include "cnn_network_ngraph_impl.hpp" +#include "cpp/ie_plugin.hpp" +#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp" +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "ie_icore.hpp" +#include "ie_ngraph_utils.hpp" +#include "iplugin_wrapper.hpp" +#include "openvino/runtime/iplugin.hpp" +#include "so_ptr.hpp" +#include "transformations/utils/utils.hpp" + +namespace { + +void fill_input_info(ov::Output& input, InferenceEngine::InputInfo::Ptr& input_info) { + const ov::Output const_input(input.get_node(), input.get_index()); + ov::legacy_convert::fill_input_info(const_input, input_info); + auto& rt_info = input.get_rt_info(); + auto it = rt_info.find("ie_legacy_preproc"); + if (it != rt_info.end()) { + rt_info.erase(it); + } + it = rt_info.find("ie_legacy_td"); + if (it != rt_info.end()) { + rt_info.erase(it); + } +} + +void fill_output_info(ov::Output& input, InferenceEngine::DataPtr& output_info) { + const ov::Output const_input(input.get_node(), input.get_index()); + ov::legacy_convert::fill_output_info(const_input, output_info); + auto& rt_info = input.get_rt_info(); + auto it = rt_info.find("ie_legacy_td"); + if (it != rt_info.end()) { + rt_info.erase(it); + } +} + +InferenceEngine::SizeVector get_dims(const ov::Output& port, + const std::function& callback = {}) { + InferenceEngine::SizeVector dims = {}; + const auto& p_shape = port.get_partial_shape(); + if (p_shape.is_static()) + dims = p_shape.get_shape(); + else { + if (!callback || !callback(dims)) { + if (p_shape.rank().is_static()) { + for (size_t i = 0; i < static_cast(p_shape.rank().get_length()); i++) { + dims.emplace_back(0); + } + } + } + } + return dims; +} + +} // namespace + +void ov::legacy_convert::fill_input_info(const ov::Output& input, + InferenceEngine::InputInfo::Ptr& input_info) { + if (!input_info) { + // Create input info + auto param_name = input.get_node()->get_friendly_name(); + auto dims = get_dims(input, [&](InferenceEngine::SizeVector& dims) -> bool { + auto param = std::dynamic_pointer_cast(input.get_node_shared_ptr()); + if (param && param->get_partial_shape().is_static()) { + dims = param->get_partial_shape().get_shape(); + return true; + } + return false; + }); + InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()), + dims, + InferenceEngine::TensorDesc::getLayoutByDims(dims)); + auto data = std::make_shared(param_name, desc); + input_info = std::make_shared(); + input_info->setInputData(data); + } + auto& rt_info = input.get_rt_info(); + auto it = rt_info.find("ie_legacy_preproc"); + if (it != rt_info.end()) { + input_info->getPreProcess() = it->second.as(); + } + it = rt_info.find("ie_legacy_td"); + if (it != rt_info.end()) { + auto td = it->second.as(); + input_info->getInputData()->reshape(td.getDims(), td.getLayout()); + input_info->setPrecision(td.getPrecision()); + } +} +void ov::legacy_convert::fill_output_info(const ov::Output& output, + InferenceEngine::DataPtr& output_info) { + if (!output_info) { + // Create input info + const auto& res_name = ov::op::util::create_ie_output_name(output); + auto dims = get_dims(output); + InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(output.get_element_type()), + dims, + InferenceEngine::TensorDesc::getLayoutByDims(dims)); + output_info = std::make_shared(res_name, desc); + } + auto& rt_info = output.get_rt_info(); + auto it = rt_info.find("ie_legacy_td"); + if (it != rt_info.end()) { + auto td = it->second.as(); + output_info->reshape(td.getDims(), td.getLayout()); + output_info->setPrecision(td.getPrecision()); + } +} + +InferenceEngine::CNNNetwork ov::legacy_convert::convert_model(const std::shared_ptr& model, + bool is_new_api) { + auto network = InferenceEngine::CNNNetwork(std::shared_ptr( + new InferenceEngine::details::CNNNetworkNGraphImpl(model->clone(), {}, is_new_api))); + std::shared_ptr cloned_model = network.getFunction(); + for (auto&& input : cloned_model->inputs()) { + auto param_name = input.get_node()->get_friendly_name(); + + OPENVINO_ASSERT(network.getInputsInfo().find(param_name) != network.getInputsInfo().end()); + + auto input_info = network.getInputsInfo()[param_name]; + ::fill_input_info(input, input_info); + } + for (auto&& result : cloned_model->get_results()) { + auto output = result->input_value(0); + const auto& res_name = ov::op::util::create_ie_output_name(output); + + OPENVINO_ASSERT(network.getOutputsInfo().find(res_name) != network.getOutputsInfo().end()); + auto output_info = network.getOutputsInfo()[res_name]; + + ::fill_output_info(output, output_info); + } + return network; +} +std::shared_ptr ov::legacy_convert::convert_model(const InferenceEngine::CNNNetwork& network, + bool is_new_api) { + OPENVINO_ASSERT(network.getFunction(), + "CNNNetwork can be converted to OpenVINO Model only in case if it contains ngraph::Function"); + if (is_new_api) + return network.getFunction(); + + auto cloned_model = network.getFunction()->clone(); + for (auto&& input : cloned_model->inputs()) { + auto param_name = input.get_node()->get_friendly_name(); + + OPENVINO_ASSERT(network.getInputsInfo().find(param_name) != network.getInputsInfo().end()); + + auto input_info = network.getInputsInfo().at(param_name); + auto& rt_info = input.get_rt_info(); + rt_info["ie_legacy_preproc"] = input_info->getPreProcess(); + rt_info["ie_legacy_td"] = input_info->getTensorDesc(); + } + for (auto&& result : cloned_model->get_results()) { + auto output = result->input_value(0); + const auto& res_name = ov::op::util::create_ie_output_name(output); + + OPENVINO_ASSERT(network.getOutputsInfo().find(res_name) != network.getOutputsInfo().end()); + auto output_info = network.getOutputsInfo().at(res_name); + + auto& rt_info = output.get_rt_info(); + rt_info["ie_legacy_td"] = output_info->getTensorDesc(); + } + return cloned_model; +} + +namespace ov { + +class IInferencePluginWrapper : public InferenceEngine::IInferencePlugin { +public: + IInferencePluginWrapper(const std::shared_ptr& plugin) { + auto& ver = plugin->get_version(); + InferenceEngine::Version version; + version.buildNumber = ver.buildNumber; + version.description = ver.description; + SetVersion(version); + _isNewAPI = plugin->is_new_api(); + _executorManager = plugin->get_executor_manager(); + } + std::string GetName() const noexcept override { + return m_plugin->get_device_name(); + } + + void SetName(const std::string& name) noexcept override { + m_plugin->set_device_name(name); + } + + std::shared_ptr LoadNetwork( + const InferenceEngine::CNNNetwork& network, + const std::map& config) override { + return m_plugin->compile_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()), + ov::any_copy(config)); + } + + std::shared_ptr LoadNetwork( + const InferenceEngine::CNNNetwork& network, + const std::map& config, + const std::shared_ptr& context) override { + return m_plugin->compile_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()), + ov::any_copy(config), + ov::RemoteContext{context, {}}); + } + + ov::SoPtr LoadNetwork( + const std::string& modelPath, + const std::map& config) override { + return ov::SoPtr( + m_plugin->compile_model(modelPath, ov::any_copy(config)), + {}); + } + + void AddExtension(const std::shared_ptr& extension) override { + m_plugin->add_extension(extension); + } + + void SetConfig(const std::map& config) override { + m_plugin->set_property(ov::any_copy(config)); + } + + void SetProperties(const ov::AnyMap& config) override { + m_plugin->set_property(config); + } + + InferenceEngine::Parameter GetConfig( + const std::string& name, + const std::map& options) const override { + return m_plugin->get_property(name, options); + } + + InferenceEngine::Parameter GetMetric( + const std::string& name, + const std::map& options) const override { + return m_plugin->get_property(name, options); + } + + std::shared_ptr CreateContext(const InferenceEngine::ParamMap& params) override { + return m_plugin->create_context(params)._impl; + } + + std::shared_ptr GetDefaultContext( + const InferenceEngine::ParamMap& params) override { + return m_plugin->get_default_context(params)._impl; + } + + std::shared_ptr ImportNetwork( + const std::string& modelFileName, + const std::map& config) override { + std::ifstream model(modelFileName, std::ios::binary); + return m_plugin->import_model(model, ov::any_copy(config)); + } + + std::shared_ptr ImportNetwork( + std::istream& networkModel, + const std::map& config) override { + return m_plugin->import_model(networkModel, ov::any_copy(config)); + } + + std::shared_ptr ImportNetwork( + std::istream& networkModel, + const std::shared_ptr& context, + const std::map& config) override { + return m_plugin->import_model(networkModel, ov::RemoteContext{context, {}}, ov::any_copy(config)); + } + + void SetCore(std::weak_ptr core) override { + return m_plugin->set_core(std::dynamic_pointer_cast(core)); + } + + std::shared_ptr GetCore() const noexcept override { + auto core = m_plugin->get_core(); + return std::dynamic_pointer_cast(core); + } + + InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network, + const std::map& config) const override { + auto res = m_plugin->query_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()), + ov::any_copy(config)); + ie::QueryNetworkResult ret; + if (!network.getFunction() || res.empty()) { + ret.rc = InferenceEngine::GENERAL_ERROR; + return ret; + } + ret.supportedLayersMap = res; + + return ret; + } + + std::shared_ptr get_plugin() { + return m_plugin; + } + +private: + std::shared_ptr m_plugin; +}; + +} // namespace ov + +std::shared_ptr<::InferenceEngine::IInferencePlugin> ov::legacy_convert::convert_plugin( + const std::shared_ptr<::ov::IPlugin>& plugin) { + if (auto wrapper = std::dynamic_pointer_cast(plugin)) + return wrapper->get_plugin(); + return std::make_shared(plugin); +} + +std::shared_ptr<::ov::IPlugin> ov::legacy_convert::convert_plugin( + const std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) { + std::shared_ptr<::ov::IPlugin> ov_plugin(new ::InferenceEngine::IPluginWrapper(plugin)); + return ov_plugin; +} diff --git a/src/inference/src/dev/converter_utils.hpp b/src/inference/src/dev/converter_utils.hpp new file mode 100644 index 00000000000..5d5ef8f8690 --- /dev/null +++ b/src/inference/src/dev/converter_utils.hpp @@ -0,0 +1,26 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "cpp/ie_cnn_network.h" +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "openvino/core/model.hpp" +#include "openvino/runtime/iplugin.hpp" + +namespace ov { +namespace legacy_convert { + +void fill_input_info(const ov::Output& input, InferenceEngine::InputInfo::Ptr& inputInfo); +void fill_output_info(const ov::Output& output, InferenceEngine::DataPtr& outputInfo); + +InferenceEngine::CNNNetwork convert_model(const std::shared_ptr& model, bool is_new_api); +std::shared_ptr convert_model(const InferenceEngine::CNNNetwork& model, bool is_new_api); + +std::shared_ptr<::InferenceEngine::IInferencePlugin> convert_plugin(const std::shared_ptr<::ov::IPlugin>& plugin); +std::shared_ptr<::ov::IPlugin> convert_plugin(const std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin); + +} // namespace legacy_convert +} // namespace ov + diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index 24d73b8aabc..0a0142155b9 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -4,22 +4,35 @@ #include "core_impl.hpp" +#include + +#include "any_copy.hpp" #include "check_network_batchable.hpp" #include "compilation_context.hpp" #include "cpp/ie_plugin.hpp" +#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp" #include "cpp_interfaces/interface/ie_internal_plugin_config.hpp" #include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "dev/converter_utils.hpp" #include "file_utils.h" #include "ie_itt.hpp" #include "ie_network_reader.hpp" +#include "ie_ngraph_utils.hpp" +#include "iplugin_wrapper.hpp" #include "ngraph/op/constant.hpp" #include "ngraph/pass/constant_folding.hpp" +#include "openvino/core/any.hpp" +#include "openvino/core/except.hpp" #include "openvino/core/op_extension.hpp" +#include "openvino/core/preprocess/pre_post_process.hpp" #include "openvino/core/version.hpp" +#include "openvino/runtime/remote_context.hpp" #include "openvino/util/common_util.hpp" #include "openvino/util/shared_object.hpp" #include "xml_parse_utils.h" +ov::ICore::~ICore() = default; + namespace { template @@ -82,7 +95,7 @@ void stripDeviceName(std::string& device, const std::string& substr) { } // namespace -ov::CoreImpl::CoreImpl(bool _newAPI) : newAPI(_newAPI) { +ov::CoreImpl::CoreImpl(bool _newAPI) : m_new_api(_newAPI) { add_mutex(""); // Register global mutex executorManagerPtr = InferenceEngine::executorManager(); for (const auto& it : ov::get_available_opsets()) { @@ -90,7 +103,7 @@ ov::CoreImpl::CoreImpl(bool _newAPI) : newAPI(_newAPI) { } } -void ov::CoreImpl::RegisterPluginsInRegistry(const std::string& xmlConfigFile) { +void ov::CoreImpl::register_plugins_in_registry(const std::string& xmlConfigFile) { std::lock_guard lock(get_mutex()); auto parse_result = ParseXml(xmlConfigFile.c_str()); @@ -147,8 +160,9 @@ void ov::CoreImpl::RegisterPluginsInRegistry(const std::string& xmlConfigFile) { } } } -ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginName) const { - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "CoreImpl::GetCPPPluginByName"); + +ov::Plugin ov::CoreImpl::get_plugin(const std::string& pluginName) const { + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "CoreImpl::get_plugin"); auto deviceName = pluginName; if (deviceName == ov::DEFAULT_DEVICE_NAME) @@ -185,18 +199,18 @@ ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginNa // Plugin is in registry, but not created, let's create std::shared_ptr so; try { - ov::InferencePlugin plugin; + ov::Plugin plugin; if (desc.pluginCreateFunc) { // static OpenVINO case - std::shared_ptr plugin_impl; + std::shared_ptr plugin_impl; desc.pluginCreateFunc(plugin_impl); - plugin = InferencePlugin{plugin_impl, {}}; + plugin = Plugin{plugin_impl, {}}; } else { so = ov::util::load_shared_object(desc.libraryLocation.c_str()); - std::shared_ptr plugin_impl; + std::shared_ptr plugin_impl; reinterpret_cast( ov::util::get_symbol(so, InferenceEngine::create_plugin_function))(plugin_impl); - plugin = InferencePlugin{plugin_impl, so}; + plugin = Plugin{plugin_impl, so}; } { @@ -217,8 +231,8 @@ ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginNa // configuring { - if (DeviceSupportsCacheDir(plugin)) { - auto cacheConfig = coreConfig.getCacheConfigForDevice(deviceName); + if (device_supports_cache_dir(plugin)) { + auto cacheConfig = coreConfig.get_cache_config_for_device(deviceName); if (cacheConfig._cacheManager) { desc.defaultConfig[CONFIG_KEY(CACHE_DIR)] = cacheConfig._cacheDir; } @@ -228,7 +242,8 @@ ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginNa } allowNotImplemented([&]() { // Add device specific value to support device_name.device_id cases - std::vector supportedConfigKeys = plugin.get_metric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {}); + std::vector supportedConfigKeys = + plugin.get_property(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {}); auto config_iter = std::find(supportedConfigKeys.begin(), supportedConfigKeys.end(), CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID)); @@ -240,10 +255,10 @@ ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginNa InferenceEngine::DeviceIDParser parser(pluginDesc.first); if (pluginDesc.first.find(deviceName) != std::string::npos && !parser.getDeviceID().empty()) { pluginDesc.second.defaultConfig[deviceKey] = parser.getDeviceID(); - plugin.set_properties(pluginDesc.second.defaultConfig); + plugin.set_property(pluginDesc.second.defaultConfig); } } - plugin.set_properties(desc.defaultConfig); + plugin.set_property(desc.defaultConfig); }); allowNotImplemented([&]() { @@ -276,76 +291,16 @@ ov::InferencePlugin ov::CoreImpl::GetCPPPluginByName(const std::string& pluginNa } } -bool ov::CoreImpl::isNewAPI() const { - return newAPI; -} - -InferenceEngine::RemoteContext::Ptr ov::CoreImpl::GetDefaultContext(const std::string& deviceName) { - auto parsed = parseDeviceNameIntoConfig(deviceName, ov::AnyMap{}); - return GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config)._ptr; -} - -InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& modelPath, const std::string& binPath) const { - OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from file"); - return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, ov_extensions, newAPI); -} - -InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& model, - const InferenceEngine::Blob::CPtr& weights, - bool frontendMode) const { - OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from memory"); - return InferenceEngine::details::ReadNetwork(model, weights, extensions, ov_extensions, newAPI, frontendMode); -} - -ov::SoPtr ov::CoreImpl::LoadNetwork( - const InferenceEngine::CNNNetwork& network, - const std::shared_ptr& context, - const std::map& config) { - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::RemoteContext"); - if (context == nullptr) { - IE_THROW() << "Remote context is null"; - } - // have to deduce the device name/config from the context first - auto parsed = parseDeviceNameIntoConfig(context->getDeviceName(), config); - std::string& deviceName = parsed._deviceName; - std::map& config_with_batch = parsed._config; +ov::SoPtr ov::CoreImpl::compile_model( + const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ie::itt::domains::IE_LT, "Core::compile_model::model"); + std::string deviceName = device_name; + ov::AnyMap config_with_batch = config; // if auto-batching is applicable, the below function will patch the device name and config accordingly: - ApplyAutoBatching(network, deviceName, config_with_batch); - CleanUpProperties(deviceName, config_with_batch, ov::auto_batch_timeout); - parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch); - - auto plugin = GetCPPPluginByName(parsed._deviceName); - ov::SoPtr res; - auto cacheManager = - coreConfig.getCacheConfigForDevice(parsed._deviceName, DeviceSupportsCacheDir(plugin), parsed._config) - ._cacheManager; - auto cacheContent = CacheContent{cacheManager}; - if (cacheManager && DeviceSupportsImportExport(plugin)) { - cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, parsed._config); - bool loadedFromCache = false; - auto lock = cacheGuard.getHashLock(cacheContent.blobId); - res = LoadNetworkFromCache(cacheContent, plugin, parsed._config, context, loadedFromCache); - if (!loadedFromCache) { - res = compile_model_impl(network, plugin, parsed._config, context, cacheContent); - } else { - // Temporary workaround until all plugins support caching of original model inputs - InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI()); - } - } else { - res = compile_model_impl(network, plugin, parsed._config, context, cacheContent); - } - return res; -} -InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( - const InferenceEngine::CNNNetwork& network, - const std::string& deviceNameOrig, - const std::map& config) { - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::CNN"); - std::string deviceName = deviceNameOrig; - std::map config_with_batch = config; - // if auto-batching is applicable, the below function will patch the device name and config accordingly: - ApplyAutoBatching(network, deviceName, config_with_batch); - CleanUpProperties(deviceName, config_with_batch, ov::auto_batch_timeout); + apply_auto_batching(model, deviceName, config_with_batch); + clean_properties(deviceName, config_with_batch, ov::auto_batch_timeout); bool forceDisableCache = config_with_batch.count(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE)) > 0; auto parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch); @@ -353,376 +308,188 @@ InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( // remove this config key from parsed as plugins can throw unsupported exception parsed._config.erase(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE)); } - auto plugin = GetCPPPluginByName(parsed._deviceName); + auto plugin = get_plugin(parsed._deviceName); ov::SoPtr res; auto cacheManager = - coreConfig.getCacheConfigForDevice(parsed._deviceName, DeviceSupportsCacheDir(plugin), parsed._config) + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), parsed._config) ._cacheManager; auto cacheContent = CacheContent{cacheManager}; - if (!forceDisableCache && cacheManager && DeviceSupportsImportExport(plugin)) { - cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, parsed._config); + if (!forceDisableCache && cacheManager && device_supports_import_export(plugin)) { + cacheContent.blobId = CalculateNetworkHash(ov::legacy_convert::convert_model(model, is_new_api()), + parsed._deviceName, + plugin, + parsed._config); bool loadedFromCache = false; auto lock = cacheGuard.getHashLock(cacheContent.blobId); - res = LoadNetworkFromCache(cacheContent, plugin, parsed._config, nullptr, loadedFromCache); + res = load_model_from_cache(cacheContent, plugin, parsed._config, {}, loadedFromCache); if (!loadedFromCache) { - res = compile_model_impl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache); + res = compile_model_impl(model, plugin, parsed._config, {}, cacheContent, forceDisableCache); } else { // Temporary workaround until all plugins support caching of original model inputs - InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI()); + InferenceEngine::SetExeNetworkInfo(res._ptr, model, is_new_api()); } } else { - res = compile_model_impl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache); + res = compile_model_impl(model, plugin, parsed._config, {}, cacheContent, forceDisableCache); } return {res._ptr, res._so}; } -InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( - const std::string& modelPath, - const std::string& deviceName, - const std::map& config, - const std::function& val) { - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Path"); - auto parsed = parseDeviceNameIntoConfig(deviceName, config); - auto plugin = GetCPPPluginByName(parsed._deviceName); - ov::SoPtr res; - auto cacheManager = - coreConfig.getCacheConfigForDevice(parsed._deviceName, DeviceSupportsCacheDir(plugin), parsed._config) - ._cacheManager; - auto cacheContent = CacheContent{cacheManager, modelPath}; - if (cacheManager && DeviceSupportsImportExport(plugin)) { - bool loadedFromCache = false; - cacheContent.blobId = CalculateFileHash(modelPath, parsed._deviceName, plugin, parsed._config); - auto lock = cacheGuard.getHashLock(cacheContent.blobId); - res = LoadNetworkFromCache(cacheContent, plugin, parsed._config, nullptr, loadedFromCache); - if (!loadedFromCache) { - auto cnnNetwork = ReadNetwork(modelPath, std::string()); - if (val) { - val(cnnNetwork); - } - res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); - } - } else if (cacheManager) { - // TODO: 'validation' for dynamic API doesn't work for this case, as it affects a lot of plugin API - res = plugin.compile_model(modelPath, parsed._config); - } else { - auto cnnNetwork = ReadNetwork(modelPath, std::string()); - if (val) { - val(cnnNetwork); - } - res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); +ov::SoPtr ov::CoreImpl::compile_model( + const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& config) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ie::itt::domains::IE_LT, "Core::compile_model::RemoteContext"); + if (context._impl == nullptr) { + IE_THROW() << "Remote context is null"; } - return {res._ptr, res._so}; -} + // have to deduce the device name/config from the context first + auto parsed = parseDeviceNameIntoConfig(context.get_device_name(), config); + std::string& deviceName = parsed._deviceName; + auto& config_with_batch = parsed._config; + // if auto-batching is applicable, the below function will patch the device name and config accordingly: + apply_auto_batching(model, deviceName, config_with_batch); + clean_properties(deviceName, config_with_batch, ov::auto_batch_timeout); + parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch); -InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( - const std::string& modelStr, - const InferenceEngine::Blob::CPtr& weights, - const std::string& deviceName, - const std::map& config, - const std::function& val) { - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Memory"); - auto parsed = parseDeviceNameIntoConfig(deviceName, config); - auto plugin = GetCPPPluginByName(parsed._deviceName); + auto plugin = get_plugin(parsed._deviceName); ov::SoPtr res; - auto cacheManager = - coreConfig.getCacheConfigForDevice(parsed._deviceName, DeviceSupportsCacheDir(plugin), parsed._config) + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), parsed._config) ._cacheManager; auto cacheContent = CacheContent{cacheManager}; - if (cacheManager && DeviceSupportsImportExport(plugin)) { + if (cacheManager && device_supports_import_export(plugin)) { + cacheContent.blobId = CalculateNetworkHash(ov::legacy_convert::convert_model(model, is_new_api()), + parsed._deviceName, + plugin, + parsed._config); bool loadedFromCache = false; - ov::Tensor tensor = ov::Tensor(); - if (weights) { - tensor = ov::Tensor(element::u8, {weights->byteSize()}, weights->cbuffer().as()); - } - cacheContent.blobId = CalculateMemoryHash(modelStr, tensor, parsed._deviceName, plugin, parsed._config); auto lock = cacheGuard.getHashLock(cacheContent.blobId); - res = LoadNetworkFromCache(cacheContent, plugin, parsed._config, nullptr, loadedFromCache); + res = load_model_from_cache(cacheContent, plugin, parsed._config, context, loadedFromCache); if (!loadedFromCache) { - auto cnnNetwork = ReadNetwork(modelStr, weights); - if (val) { - val(cnnNetwork); - } - res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); + res = compile_model_impl(model, plugin, parsed._config, context, cacheContent); + } else { + // Temporary workaround until all plugins support caching of original model inputs + InferenceEngine::SetExeNetworkInfo(res._ptr, model, isNewAPI()); } } else { - auto cnnNetwork = ReadNetwork(modelStr, weights); - if (val) { - val(cnnNetwork); + res = compile_model_impl(model, plugin, parsed._config, context, cacheContent); + } + return res; +} +ov::SoPtr ov::CoreImpl::compile_model( + ov::Plugin& plugin, + const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& config) const { + std::shared_ptr cloned_model = model->clone(); + ov::SoPtr compiled_model; + + if (!is_new_api() && !std::dynamic_pointer_cast(plugin.m_ptr)) { + OPENVINO_NOT_IMPLEMENTED; + } + + if (!context._impl) { + compiled_model = plugin.compile_model(cloned_model, config); + } else { + compiled_model = plugin.compile_model(cloned_model, context, config); + } + return compiled_model; +} + +ov::SoPtr ov::CoreImpl::compile_model(const std::string& model_path, + const std::string& device_name, + const ov::AnyMap& config) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ie::itt::domains::IE_LT, "Core::compile_model::Path"); + auto parsed = parseDeviceNameIntoConfig(device_name, config); + auto plugin = get_plugin(parsed._deviceName); + ov::SoPtr res; + auto cacheManager = + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), parsed._config) + ._cacheManager; + auto cacheContent = CacheContent{cacheManager, model_path}; + if (cacheManager && device_supports_import_export(plugin)) { + bool loadedFromCache = false; + cacheContent.blobId = calculate_file_hash(model_path, parsed._deviceName, plugin, parsed._config); + auto lock = cacheGuard.getHashLock(cacheContent.blobId); + res = load_model_from_cache(cacheContent, plugin, parsed._config, {}, loadedFromCache); + if (!loadedFromCache) { + auto cnnNetwork = ReadNetwork(model_path, std::string()); + res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()), + plugin, + parsed._config, + {}, + cacheContent); } - res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); + } else if (cacheManager) { + auto cnnNetwork = ReadNetwork(model_path, std::string()); + // TODO: 'validation' for dynamic API doesn't work for this case, as it affects a lot of plugin API + res = compile_model(plugin, ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()), {}, parsed._config); + } else { + auto cnnNetwork = ReadNetwork(model_path, std::string()); + res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()), + plugin, + parsed._config, + {}, + cacheContent); } return {res._ptr, res._so}; } -InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::ImportNetwork( - std::istream& networkModel, - const std::string& deviceName, - const std::map& config) { - auto parsed = parseDeviceNameIntoConfig(deviceName, config); - auto exec = GetCPPPluginByName(parsed._deviceName).import_model(networkModel, parsed._config); +ov::SoPtr ov::CoreImpl::compile_model(const std::string& model_str, + const ov::Tensor& weights, + const std::string& device_name, + const ov::AnyMap& config) const { + auto parsed = parseDeviceNameIntoConfig(device_name, config); + auto plugin = get_plugin(parsed._deviceName); + ov::SoPtr res; + + auto cacheManager = + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), parsed._config) + ._cacheManager; + auto cacheContent = CacheContent{cacheManager}; + if (cacheManager && device_supports_import_export(plugin)) { + bool loadedFromCache = false; + cacheContent.blobId = calculate_memory_hash(model_str, weights, parsed._deviceName, plugin, parsed._config); + auto lock = cacheGuard.getHashLock(cacheContent.blobId); + res = load_model_from_cache(cacheContent, plugin, parsed._config, {}, loadedFromCache); + if (!loadedFromCache) { + auto cnnNetwork = read_model(model_str, weights); + res = compile_model_impl(cnnNetwork, plugin, parsed._config, {}, cacheContent); + } + } else { + auto cnnNetwork = read_model(model_str, weights); + res = compile_model_impl(cnnNetwork, plugin, parsed._config, {}, cacheContent); + } + return {res._ptr, res._so}; +} + +ov::SoPtr ov::CoreImpl::import_model(std::istream& model, + const std::string& device_name, + const ov::AnyMap& config) const { + auto parsed = parseDeviceNameIntoConfig(device_name, config); + auto exec = get_plugin(parsed._deviceName).import_model(model, config); return {exec._ptr, exec._so}; } -InferenceEngine::QueryNetworkResult ov::CoreImpl::QueryNetwork(const InferenceEngine::CNNNetwork& network, - const std::string& deviceName, - const std::map& config) const { - OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::QueryNetwork"); - auto parsed = parseDeviceNameIntoConfig(deviceName, config); - auto res = GetCPPPluginByName(parsed._deviceName).query_model(network, parsed._config); - if (!network.getFunction() || res.supportedLayersMap.empty()) - return res; - - const auto& func = network.getFunction(); - auto specialized_function = func->clone(); - - std::string defDevice = res.supportedLayersMap.begin()->second; - ngraph::pass::ConstantFolding().run_on_model(specialized_function); - std::unordered_set opNames; - - for (const auto& op : specialized_function->get_ops()) - opNames.emplace(op->get_friendly_name()); - - for (const auto& op : func->get_ops()) { - if (opNames.find(op->get_friendly_name()) == opNames.end()) { - res.supportedLayersMap[op->get_friendly_name()] = defDevice; - } - } - - for (const auto& op : func->get_ops()) { - if (!res.supportedLayersMap.count(op->get_friendly_name()) && - std::dynamic_pointer_cast(op)) { - bool are_all_users_supported = true; - for (const auto& user : op->output(0).get_target_inputs()) { - if (!res.supportedLayersMap.count(user.get_node()->get_friendly_name())) { - are_all_users_supported = false; - break; - } - } - if (are_all_users_supported) { - res.supportedLayersMap[op->get_friendly_name()] = defDevice; - } - } - } - return res; +ov::SupportedOpsMap ov::CoreImpl::query_model(const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config) const { + OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::query_model"); + auto parsed = parseDeviceNameIntoConfig(device_name, config); + return get_plugin(parsed._deviceName).query_model(model, parsed._config); } -void ov::CoreImpl::ApplyAutoBatching(const InferenceEngine::CNNNetwork& network, - std::string& deviceName, - std::map& config) { - std::string deviceNameWithBatchSize, deviceNameWithoutBatch; - // fully strict dims tracking by default (Auto-Batching is enabled implicitly) - bool strictly_check_dims = true; - if (deviceName.find("BATCH") != std::string::npos) { - // explicitly enabled Auto-Batching - auto pos = deviceName.find_first_of(":"); - if (pos == std::string::npos) - return; // BATCH device is already configured via the config - deviceNameWithBatchSize = deviceName.substr(pos + 1); - deviceNameWithoutBatch = InferenceEngine::DeviceIDParser::getBatchDevice(deviceNameWithBatchSize); - // when user sets the BATCH device explicitly, we may check the dims less strictly - // as the result is being checked by the user - strictly_check_dims = false; - } else { - // check if Auto-Batch plugin registered - try { - GetCPPPluginByName("BATCH"); - } catch (const std::runtime_error&) { - return; - } - // check whether the Auto-Batching is disabled explicitly - const auto& batch_mode = config.find(ov::hint::allow_auto_batching.name()); - if (batch_mode != config.end()) { - const auto disabled = batch_mode->second == CONFIG_VALUE(NO); - // virtual plugins like AUTO/MULTI will need the config - // e.g to deduce the #requests correctly - // otherwise, no need for this config key in the rest of loading - if (deviceName.find("AUTO") == std::string::npos && deviceName.find("MULTI") == std::string::npos) - config.erase(batch_mode); - if (disabled) - return; - } else if (!coreConfig.flag_allow_auto_batching) { - return; - } - // check whether if the Auto-Batching is applicable to the device - auto device = parseDeviceNameIntoConfig(deviceName); - deviceNameWithoutBatch = deviceName; - auto d = device._deviceName; - std::vector metrics = GetCPPPluginByName(d).get_metric(METRIC_KEY(SUPPORTED_METRICS), {}); - auto it = std::find(metrics.begin(), metrics.end(), METRIC_KEY(OPTIMAL_BATCH_SIZE)); - if (metrics.end() == it) - return; - // if applicable, the Auto-Batching is implicitly enabled via the performance hints - bool bTputInPlg = GetConfig(d, CONFIG_KEY(PERFORMANCE_HINT)).as() == CONFIG_VALUE(THROUGHPUT); - const auto& mode = config.find(CONFIG_KEY(PERFORMANCE_HINT)); - bool bTputInLoadCfg = (mode != config.end() && mode->second == CONFIG_VALUE(THROUGHPUT)); - const auto& excl = config.find(CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS)); - bool bExclReqsEnabled = (excl != config.end() && excl->second == CONFIG_VALUE(YES)); - if (bExclReqsEnabled || (!bTputInPlg && !bTputInLoadCfg)) - return; - } - auto batchConfig = deviceNameWithBatchSize.empty() ? deviceNameWithoutBatch : deviceNameWithBatchSize; - auto res = InferenceEngine::details::isNetworkBatchable(network, deviceNameWithoutBatch, strictly_check_dims); - switch (res) { - case InferenceEngine::details::NetworkBatchAbility::NO: - return; - case InferenceEngine::details::NetworkBatchAbility::AS_IS: - deviceName = "BATCH:" + batchConfig; - break; - case InferenceEngine::details::NetworkBatchAbility::WITH_HETERO: - deviceName = "HETERO:BATCH," + deviceNameWithoutBatch; - config[CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG)] = batchConfig; - break; - } -} - -void ov::CoreImpl::CleanUpProperties(std::string& deviceName, - std::map& config, - ov::Any property) { - // auto-batching is not applicable, if there is auto_batch_timeout, delete it - if (deviceName.find("BATCH") == std::string::npos) { - const auto& batch_timeout_mode = config.find(property.as()); - if (batch_timeout_mode != config.end()) { - if (deviceName.find("AUTO") == std::string::npos && deviceName.find("MULTI") == std::string::npos) - config.erase(batch_timeout_mode); - } - } -} - -ov::Any ov::CoreImpl::GetMetric(const std::string& deviceName, - const std::string& name, - const ov::AnyMap& options) const { - // HETERO case - { - if (deviceName.find("HETERO:") == 0) { - IE_THROW() - << "You can get specific metrics with the GetMetric only for the HETERO itself (without devices). " - "To get individual devices's metrics call GetMetric for each device separately"; - } - } - - // MULTI case - { - if (deviceName.find("MULTI:") == 0) { - IE_THROW() - << "You can get specific metrics with the GetMetric only for the MULTI itself (without devices). " - "To get individual devices's metrics call GetMetric for each device separately"; - } - } - - // AUTO case - { - if (deviceName.find("AUTO:") == 0) { - IE_THROW() << "You can get specific metrics with the GetMetric only for the AUTO itself (without devices). " - "To get individual devices's metrics call GetMetric for each device separately"; - } - } - - // BATCH case - { - if (deviceName.find("BATCH:") == 0) { - IE_THROW() - << "You can get specific metrics with the GetMetric only for the BATCH itself (without devices). " - "To get individual devices's metrics call GetMetric for each device separately"; - } - } - - auto parsed = parseDeviceNameIntoConfig(deviceName); - for (auto o : options) { - parsed._config.insert(o); - } - - return GetCPPPluginByName(parsed._deviceName).get_metric(name, parsed._config); -} - -void ov::CoreImpl::set_property(const std::string& device_name, const AnyMap& properties) { - OPENVINO_ASSERT(device_name.find("HETERO:") != 0, - "set_property is supported only for HETERO itself (without devices). " - "You can configure the devices with set_property before creating the HETERO on top."); - OPENVINO_ASSERT(device_name.find("MULTI:") != 0, - "set_property is supported only for MULTI itself (without devices). " - "You can configure the devices with set_property before creating the MULTI on top."); - OPENVINO_ASSERT(device_name.find("AUTO:") != 0, - "set_property is supported only for AUTO itself (without devices). " - "You can configure the devices with set_property before creating the AUTO on top."); - OPENVINO_ASSERT(device_name.find("BATCH:") != 0, - "set_property is supported only for BATCH itself (without devices). " - "You can configure the devices with set_property before creating the BATCH on top."); - - bool isMetaDevice = device_name.find("AUTO") != std::string::npos || - device_name.find("MULTI") != std::string::npos || - device_name.find("HETERO") != std::string::npos; - if (!isMetaDevice) { - // unsupport to set ov::device::properties to HW device through this function - auto devices = GetListOfDevicesInRegistry(); - for (auto&& config : properties) { - auto parsed = parseDeviceNameIntoConfig(config.first); - auto is_secondary_config_for_hw_device = - std::any_of(devices.begin(), devices.end(), [&](const std::string& device) { - return device == parsed._deviceName; - }); - OPENVINO_ASSERT(!is_secondary_config_for_hw_device, - "set_property only supported ov::device::propreties for Meta device (AUTO/MULTI/HETERO). " - "You can configure the devices through the compile_model()/loadNetwork() API."); - } - } - SetConfigForPlugins(properties, device_name); -} - -ov::Any ov::CoreImpl::get_property_for_core(const std::string& name) const { - if (name == ov::force_tbb_terminate.name()) { - const auto flag = InferenceEngine::executorManager()->getTbbFlag(); - return decltype(ov::force_tbb_terminate)::value_type(flag); - } else if (name == ov::cache_dir.name()) { - return ov::Any(coreConfig.get_cache_dir()); - } else if (name == ov::hint::allow_auto_batching.name()) { - const auto flag = coreConfig.flag_allow_auto_batching; - return decltype(ov::hint::allow_auto_batching)::value_type(flag); - } - - IE_THROW() << "Exception is thrown while trying to call get_property with unsupported property: '" << name << "'"; -} - -ov::Any ov::CoreImpl::get_property(const std::string& device_name, - const std::string& name, - const AnyMap& arguments) const { - OPENVINO_ASSERT(device_name.find("HETERO:") != 0, - "You can only get_property of the HETERO itself (without devices). " - "get_property is also possible for the individual devices before creating the HETERO on top."); - OPENVINO_ASSERT(device_name.find("MULTI:") != 0, - "You can only get_property of the MULTI itself (without devices). " - "get_property is also possible for the individual devices before creating the MULTI on top."); - OPENVINO_ASSERT(device_name.find("AUTO:") != 0, - "You can only get_property of the AUTO itself (without devices). " - "get_property is also possible for the individual devices before creating the AUTO on top."); - OPENVINO_ASSERT(device_name.find("BATCH:") != 0, - "You can only get_property of the BATCH itself (without devices). " - "get_property is also possible for the individual devices before creating the BATCH on top."); - - if (device_name.empty()) { - return get_property_for_core(name); - } - - auto parsed = parseDeviceNameIntoConfig(device_name, arguments); - return GetCPPPluginByName(parsed._deviceName).get_property(name, parsed._config); -} - -ov::Any ov::CoreImpl::GetConfig(const std::string& deviceName, const std::string& name) const { - auto parsed = parseDeviceNameIntoConfig(deviceName); - return GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config); -} - -std::vector ov::CoreImpl::GetAvailableDevices() const { +std::vector ov::CoreImpl::get_available_devices() const { std::vector devices; const std::string propertyName = METRIC_KEY(AVAILABLE_DEVICES); - for (auto&& deviceName : GetListOfDevicesInRegistry()) { + for (auto&& deviceName : get_registered_devices()) { std::vector devicesIDs; try { - const InferenceEngine::Parameter p = GetMetric(deviceName, propertyName); + const ie::Parameter p = GetMetric(deviceName, propertyName); devicesIDs = p.as>(); - } catch (const InferenceEngine::Exception&) { + } catch (const ie::Exception&) { // plugin is not created by e.g. invalid env } catch (const ov::Exception&) { // plugin is not created by e.g. invalid env @@ -748,13 +515,216 @@ std::vector ov::CoreImpl::GetAvailableDevices() const { return devices; } -InferenceEngine::RemoteContext::Ptr ov::CoreImpl::CreateContext(const std::string& deviceName, - const InferenceEngine::ParamMap& params) { - auto parsed = parseDeviceNameIntoConfig(deviceName, params); - return GetCPPPluginByName(parsed._deviceName).create_context(parsed._config)._ptr; +ov::RemoteContext ov::CoreImpl::create_context(const std::string& device_name, const AnyMap& args) const { + auto parsed = ov::parseDeviceNameIntoConfig(device_name, args); + return get_plugin(parsed._deviceName).create_context(parsed._config); } -void ov::CoreImpl::UnloadPluginByName(const std::string& deviceName) { +ov::AnyMap ov::CoreImpl::get_supported_property(const std::string& device_name, const ov::AnyMap& config) const { + std::vector supportedConfigKeys; + try { + supportedConfigKeys = GetMetric(device_name, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as>(); + } catch (ov::Exception&) { + } + try { + for (auto&& property : ICore::get_property(device_name, ov::supported_properties)) { + if (property.is_mutable()) { + supportedConfigKeys.emplace_back(std::move(property)); + } + } + } catch (ov::Exception&) { + } + ov::AnyMap supportedConfig; + for (auto&& key : supportedConfigKeys) { + auto itKey = config.find(key); + if (config.end() != itKey) { + supportedConfig[key] = itKey->second; + } + } + for (auto&& config : config) { + auto parsed = parseDeviceNameIntoConfig(config.first); + if (device_name.find(parsed._deviceName) != std::string::npos) { + std::stringstream strm(config.second.as()); + std::map device_configs; + util::Read>{}(strm, device_configs); + for (auto&& device_config : device_configs) { + if (util::contains(supportedConfigKeys, device_config.first)) { + supportedConfig[device_config.first] = device_config.second; + } + } + for (auto&& config : parsed._config) { + supportedConfig[config.first] = config.second.as(); + } + } + } + return supportedConfig; +} + +bool ov::CoreImpl::is_new_api() const { + return m_new_api; +} + +ov::RemoteContext ov::CoreImpl::get_default_context(const std::string& device_name) const { + auto parsed = ov::parseDeviceNameIntoConfig(device_name, ov::AnyMap{}); + return get_plugin(parsed._deviceName).get_default_context(parsed._config); +} + +void ov::CoreImpl::apply_auto_batching(const std::shared_ptr& model, + std::string& deviceName, + ov::AnyMap& config) const { + std::string deviceNameWithBatchSize, deviceNameWithoutBatch; + // fully strict dims tracking by default (Auto-Batching is enabled implicitly) + bool strictly_check_dims = true; + if (deviceName.find("BATCH") != std::string::npos) { + // explicitly enabled Auto-Batching + auto pos = deviceName.find_first_of(":"); + if (pos == std::string::npos) + return; // BATCH device is already configured via the config + deviceNameWithBatchSize = deviceName.substr(pos + 1); + deviceNameWithoutBatch = InferenceEngine::DeviceIDParser::getBatchDevice(deviceNameWithBatchSize); + // when user sets the BATCH device explicitly, we may check the dims less strictly + // as the result is being checked by the user + strictly_check_dims = false; + } else { + // check if Auto-Batch plugin registered + try { + get_plugin("BATCH"); + } catch (const std::runtime_error&) { + return; + } + // check whether the Auto-Batching is disabled explicitly + const auto& batch_mode = config.find(ov::hint::allow_auto_batching.name()); + if (batch_mode != config.end()) { + const auto disabled = batch_mode->second.as() == CONFIG_VALUE(NO); + // virtual plugins like AUTO/MULTI will need the config + // e.g to deduce the #requests correctly + // otherwise, no need for this config key in the rest of loading + if (deviceName.find("AUTO") == std::string::npos && deviceName.find("MULTI") == std::string::npos) + config.erase(batch_mode); + if (disabled) + return; + } else if (!coreConfig.flag_allow_auto_batching) { + return; + } + // check whether if the Auto-Batching is applicable to the device + auto device = ov::parseDeviceNameIntoConfig(deviceName); + deviceNameWithoutBatch = deviceName; + auto d = device._deviceName; + std::vector metrics = + get_plugin(d).get_property(METRIC_KEY(SUPPORTED_METRICS), {}).as>(); + auto it = std::find(metrics.begin(), metrics.end(), METRIC_KEY(OPTIMAL_BATCH_SIZE)); + if (metrics.end() == it) + return; + // if applicable, the Auto-Batching is implicitly enabled via the performance hints + bool bTputInPlg = GetConfig(d, CONFIG_KEY(PERFORMANCE_HINT)).as() == CONFIG_VALUE(THROUGHPUT); + const auto& mode = config.find(CONFIG_KEY(PERFORMANCE_HINT)); + bool bTputInLoadCfg = (mode != config.end() && mode->second.as() == CONFIG_VALUE(THROUGHPUT)); + const auto& excl = config.find(CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS)); + bool bExclReqsEnabled = (excl != config.end() && excl->second.as() == CONFIG_VALUE(YES)); + if (bExclReqsEnabled || (!bTputInPlg && !bTputInLoadCfg)) + return; + } + auto batchConfig = deviceNameWithBatchSize.empty() ? deviceNameWithoutBatch : deviceNameWithBatchSize; + auto res = ov::details::is_model_batchable(model, deviceNameWithoutBatch, strictly_check_dims); + switch (res) { + case ov::details::NetworkBatchAbility::NO: + return; + case ov::details::NetworkBatchAbility::AS_IS: + deviceName = "BATCH:" + batchConfig; + break; + case ov::details::NetworkBatchAbility::WITH_HETERO: + deviceName = "HETERO:BATCH," + deviceNameWithoutBatch; + config[CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG)] = batchConfig; + break; + } +} + +void ov::CoreImpl::clean_properties(std::string& deviceName, ov::AnyMap& config, ov::Any property) const { + // auto-batching is not applicable, if there is auto_batch_timeout, delete it + if (deviceName.find("BATCH") == std::string::npos) { + const auto& batch_timeout_mode = config.find(property.as()); + if (batch_timeout_mode != config.end()) { + if (deviceName.find("AUTO") == std::string::npos && deviceName.find("MULTI") == std::string::npos) + config.erase(batch_timeout_mode); + } + } +} + +void ov::CoreImpl::set_property(const std::string& device_name, const AnyMap& properties) { + OPENVINO_ASSERT(device_name.find("HETERO:") != 0, + "set_property is supported only for HETERO itself (without devices). " + "You can configure the devices with set_property before creating the HETERO on top."); + OPENVINO_ASSERT(device_name.find("MULTI:") != 0, + "set_property is supported only for MULTI itself (without devices). " + "You can configure the devices with set_property before creating the MULTI on top."); + OPENVINO_ASSERT(device_name.find("AUTO:") != 0, + "set_property is supported only for AUTO itself (without devices). " + "You can configure the devices with set_property before creating the AUTO on top."); + OPENVINO_ASSERT(device_name.find("BATCH:") != 0, + "set_property is supported only for BATCH itself (without devices). " + "You can configure the devices with set_property before creating the BATCH on top."); + + bool isMetaDevice = device_name.find("AUTO") != std::string::npos || + device_name.find("MULTI") != std::string::npos || + device_name.find("HETERO") != std::string::npos; + if (!isMetaDevice) { + // unsupport to set ov::device::properties to HW device through this function + auto devices = get_registered_devices(); + for (auto&& config : properties) { + auto parsed = parseDeviceNameIntoConfig(config.first); + auto is_secondary_config_for_hw_device = + std::any_of(devices.begin(), devices.end(), [&](const std::string& device) { + return device == parsed._deviceName; + }); + OPENVINO_ASSERT(!is_secondary_config_for_hw_device, + "set_property only supported ov::device::propreties for Meta device (AUTO/MULTI/HETERO). " + "You can configure the devices through the compile_model()/loadNetwork() API."); + } + } + set_property_for_devivce(properties, device_name); +} + +ov::Any ov::CoreImpl::get_property_for_core(const std::string& name) const { + if (name == ov::force_tbb_terminate.name()) { + const auto flag = InferenceEngine::executorManager()->getTbbFlag(); + return decltype(ov::force_tbb_terminate)::value_type(flag); + } else if (name == ov::cache_dir.name()) { + return ov::Any(coreConfig.get_cache_dir()); + } else if (name == ov::hint::allow_auto_batching.name()) { + const auto flag = coreConfig.flag_allow_auto_batching; + return decltype(ov::hint::allow_auto_batching)::value_type(flag); + } + + OPENVINO_UNREACHABLE("Exception is thrown while trying to call get_property with unsupported property: '", + name, + "'"); +} + +ov::Any ov::CoreImpl::get_property(const std::string& device_name, + const std::string& name, + const AnyMap& arguments) const { + OPENVINO_ASSERT(device_name.find("HETERO:") != 0, + "You can only get_property of the HETERO itself (without devices). " + "get_property is also possible for the individual devices before creating the HETERO on top."); + OPENVINO_ASSERT(device_name.find("MULTI:") != 0, + "You can only get_property of the MULTI itself (without devices). " + "get_property is also possible for the individual devices before creating the MULTI on top."); + OPENVINO_ASSERT(device_name.find("AUTO:") != 0, + "You can only get_property of the AUTO itself (without devices). " + "get_property is also possible for the individual devices before creating the AUTO on top."); + OPENVINO_ASSERT(device_name.find("BATCH:") != 0, + "You can only get_property of the BATCH itself (without devices). " + "get_property is also possible for the individual devices before creating the BATCH on top."); + + if (device_name.empty()) { + return get_property_for_core(name); + } + + auto parsed = parseDeviceNameIntoConfig(device_name, arguments); + return get_plugin(parsed._deviceName).get_property(name, parsed._config); +} + +void ov::CoreImpl::unload_plugin(const std::string& deviceName) { std::lock_guard lock(get_mutex()); auto it = plugins.find(deviceName); if (it == plugins.end()) { @@ -768,7 +738,7 @@ void ov::CoreImpl::UnloadPluginByName(const std::string& deviceName) { * @brief Registers plugin meta-data in registry for specified device * @param deviceName A name of device */ -void ov::CoreImpl::RegisterPluginByName(const std::string& pluginName, const std::string& deviceName) { +void ov::CoreImpl::register_plugin(const std::string& pluginName, const std::string& deviceName) { std::lock_guard lock(get_mutex()); auto it = pluginRegistry.find(deviceName); @@ -789,7 +759,7 @@ void ov::CoreImpl::RegisterPluginByName(const std::string& pluginName, const std * @brief Provides a list of plugin names in registry; physically such plugins may not be created * @return A list of plugin names */ -std::vector ov::CoreImpl::GetListOfDevicesInRegistry() const { +std::vector ov::CoreImpl::get_registered_devices() const { std::lock_guard lock(get_mutex()); std::vector listOfDevices; @@ -807,7 +777,7 @@ std::vector ov::CoreImpl::GetListOfDevicesInRegistry() const { * @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU * just simple forms like CPU, GPU, MULTI, GPU.0, etc */ -void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::string& deviceName) { +void ov::CoreImpl::set_property_for_devivce(const ov::AnyMap& configMap, const std::string& deviceName) { auto config = configMap; if (config.empty()) { return; @@ -816,17 +786,17 @@ void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::s InferenceEngine::DeviceIDParser parser(deviceName); std::string clearDeviceName = parser.getDeviceName(); - std::vector> created_plugins; + std::vector> created_plugins; { std::lock_guard lock(get_mutex()); created_plugins.reserve(plugins.size()); if (deviceName.empty()) { - coreConfig.setAndUpdate(config); + coreConfig.set_and_update(config); } else { auto cache_it = config.find(CONFIG_KEY(CACHE_DIR)); if (cache_it != config.end()) { - coreConfig.setCacheForDevice(cache_it->second, clearDeviceName); + coreConfig.set_cache_dir_for_device(cache_it->second, clearDeviceName); } } @@ -854,7 +824,7 @@ void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::s // set config for already created plugins for (auto& plugin : plugins) { if (deviceName.empty() || clearDeviceName == plugin.first) { - created_plugins.emplace_back(std::pair{plugin.first, plugin.second}); + created_plugins.emplace_back(std::pair{plugin.first, plugin.second}); } } } @@ -862,8 +832,8 @@ void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::s allowNotImplemented([&]() { std::lock_guard lock(get_mutex(plugin.first)); auto configCopy = config; - if (DeviceSupportsCacheDir(plugin.second)) { - auto cacheConfig = coreConfig.getCacheConfigForDevice(deviceName); + if (device_supports_cache_dir(plugin.second)) { + auto cacheConfig = coreConfig.get_cache_config_for_device(deviceName); if (cacheConfig._cacheManager) { configCopy[CONFIG_KEY(CACHE_DIR)] = cacheConfig._cacheDir; } @@ -873,7 +843,7 @@ void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::s } // Add device specific value to support device_name.device_id cases std::vector supportedConfigKeys = - plugin.second.get_metric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {}); + plugin.second.get_property(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {}); auto config_iter = std::find(supportedConfigKeys.begin(), supportedConfigKeys.end(), CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID)); @@ -884,82 +854,12 @@ void ov::CoreImpl::SetConfigForPlugins(const ov::AnyMap& configMap, const std::s if (!parser.getDeviceID().empty()) { configCopy[deviceKey] = parser.getDeviceID(); } - plugin.second.set_properties(configCopy); + plugin.second.set_property(configCopy); }); } } -/** - * @brief Get device config it is passed as pair of device_name and `AnyMap` - * @param configs All set of configs - * @note `device_name` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU - * just simple forms like CPU, GPU, MULTI, GPU.0, etc - */ -void ov::CoreImpl::ExtractAndSetDeviceConfig(const ov::AnyMap& configs) { - for (auto&& config : configs) { - auto parsed = parseDeviceNameIntoConfig(config.first); - auto devices = GetListOfDevicesInRegistry(); - auto config_is_device_name_in_regestry = - std::any_of(devices.begin(), devices.end(), [&](const std::string& device) { - return device == parsed._deviceName; - }); - if (config_is_device_name_in_regestry) { - SetConfigForPlugins(config.second.as(), config.first); - } - } -} - -std::map ov::CoreImpl::GetSupportedConfig(const std::string& deviceName, - const std::map& configs) { - std::vector supportedConfigKeys; - try { - supportedConfigKeys = GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as>(); - } catch (ov::Exception&) { - } - try { - for (auto&& property : ICore::get_property(deviceName, ov::supported_properties)) { - if (property.is_mutable()) { - supportedConfigKeys.emplace_back(std::move(property)); - } - } - } catch (ov::Exception&) { - } - std::map supportedConfig; - for (auto&& key : supportedConfigKeys) { - auto itKey = configs.find(key); - if (configs.end() != itKey) { - supportedConfig[key] = itKey->second; - } - } - for (auto&& config : configs) { - auto parsed = parseDeviceNameIntoConfig(config.first); - if (deviceName.find(parsed._deviceName) != std::string::npos) { - std::stringstream strm(config.second); - std::map device_configs; - util::Read>{}(strm, device_configs); - for (auto&& device_config : device_configs) { - if (util::contains(supportedConfigKeys, device_config.first)) { - supportedConfig[device_config.first] = device_config.second; - } - } - for (auto&& config : parsed._config) { - supportedConfig[config.first] = config.second.as(); - } - } - } - return supportedConfig; -} - -/** - * @brief Registers the extension in a Core object - * Such extensions can be used for both CNNNetwork readers and device plugins - */ -void ov::CoreImpl::AddExtension(const InferenceEngine::IExtensionPtr& extension) { - std::lock_guard lock(get_mutex()); - AddExtensionUnsafe(extension); -} - -void ov::CoreImpl::AddOVExtensions(const std::vector& extensions) { +void ov::CoreImpl::add_extension(const std::vector& extensions) { std::lock_guard lock(get_mutex()); for (const auto& ext : extensions) { ov_extensions.emplace_back(ext); @@ -975,74 +875,23 @@ const std::vector& ov::CoreImpl::GetExtensions() return extensions; } -const std::vector& ov::CoreImpl::GetOVExtensions() const { - return ov_extensions; -} - -std::map ov::CoreImpl::GetVersions(const std::string& deviceName) const { - std::map versions; - std::vector deviceNames; - - { - // for compatibility with samples / demo - if (deviceName.find("HETERO") == 0) { - auto pos = deviceName.find_first_of(":"); - if (pos != std::string::npos) { - deviceNames = InferenceEngine::DeviceIDParser::getHeteroDevices(deviceName.substr(pos + 1)); - } - deviceNames.push_back("HETERO"); - } else if (deviceName.find("MULTI") == 0) { - auto pos = deviceName.find_first_of(":"); - if (pos != std::string::npos) { - deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1)); - } - deviceNames.push_back("MULTI"); - } else if (deviceName.find("AUTO") == 0) { - auto pos = deviceName.find_first_of(":"); - if (pos != std::string::npos) { - deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1)); - } - deviceNames.emplace_back("AUTO"); - } else if (deviceName.find("BATCH") == 0) { - auto pos = deviceName.find_first_of(":"); - if (pos != std::string::npos) { - deviceNames = {InferenceEngine::DeviceIDParser::getBatchDevice(deviceName.substr(pos + 1))}; - } - deviceNames.push_back("BATCH"); - } else { - deviceNames.push_back(deviceName); - } - } - - for (auto&& deviceName_ : deviceNames) { - InferenceEngine::DeviceIDParser parser(deviceName_); - std::string deviceNameLocal = parser.getDeviceName(); - - ov::InferencePlugin cppPlugin = GetCPPPluginByName(deviceNameLocal); - const InferenceEngine::Version version = cppPlugin.get_version(); - versions[deviceNameLocal] = version; - } - - return versions; -} - -bool ov::CoreImpl::DeviceSupportsImportExport(const std::string& deviceName) const { +bool ov::CoreImpl::device_supports_import_export(const std::string& deviceName) const { auto parsed = parseDeviceNameIntoConfig(deviceName); - auto plugin = GetCPPPluginByName(parsed._deviceName); - return DeviceSupportsImportExport(plugin); + auto plugin = get_plugin(parsed._deviceName); + return device_supports_import_export(plugin); } -bool ov::CoreImpl::DeviceSupportsConfigKey(const ov::InferencePlugin& plugin, const std::string& key) const { +bool ov::CoreImpl::device_supports_property(const ov::Plugin& plugin, const std::string& key) const { return util::contains(plugin.get_property(ov::supported_properties), key); } -bool ov::CoreImpl::DeviceSupportsImportExport(const ov::InferencePlugin& plugin) const { - auto supportedMetricKeys = plugin.get_metric(METRIC_KEY(SUPPORTED_METRICS), {}).as>(); +bool ov::CoreImpl::device_supports_import_export(const ov::Plugin& plugin) const { + auto supportedMetricKeys = plugin.get_property(METRIC_KEY(SUPPORTED_METRICS), {}).as>(); auto it = std::find(supportedMetricKeys.begin(), supportedMetricKeys.end(), METRIC_KEY(IMPORT_EXPORT_SUPPORT)); auto supported = - (it != supportedMetricKeys.end()) && plugin.get_metric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), {}).as(); + (it != supportedMetricKeys.end()) && plugin.get_property(METRIC_KEY(IMPORT_EXPORT_SUPPORT), {}).as(); if (!supported) { - if (DeviceSupportsConfigKey(plugin, ov::device::capabilities.name())) { + if (device_supports_property(plugin, ov::device::capabilities.name())) { supported = util::contains(plugin.get_property(ov::device::capabilities), ov::device::capability::EXPORT_IMPORT); } @@ -1050,25 +899,25 @@ bool ov::CoreImpl::DeviceSupportsImportExport(const ov::InferencePlugin& plugin) return supported; } -bool ov::CoreImpl::DeviceSupportsCacheDir(const ov::InferencePlugin& plugin) const { +bool ov::CoreImpl::device_supports_cache_dir(const ov::Plugin& plugin) const { return util::contains(plugin.get_property(ov::supported_properties), ov::cache_dir); } ov::SoPtr ov::CoreImpl::compile_model_impl( - const InferenceEngine::CNNNetwork& network, - ov::InferencePlugin& plugin, - const std::map& parsedConfig, - const InferenceEngine::RemoteContext::Ptr& context, + const std::shared_ptr& model, + ov::Plugin& plugin, + const ov::AnyMap& parsedConfig, + const ov::RemoteContext& context, const CacheContent& cacheContent, - bool forceDisableCache) { + bool forceDisableCache) const { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "CoreImpl::compile_model_impl"); ov::SoPtr execNetwork; execNetwork = - context ? plugin.compile_model(network, context, parsedConfig) : plugin.compile_model(network, parsedConfig); - if (!forceDisableCache && cacheContent.cacheManager && DeviceSupportsImportExport(plugin)) { + context._impl ? plugin.compile_model(model, context, parsedConfig) : plugin.compile_model(model, parsedConfig); + if (!forceDisableCache && cacheContent.cacheManager && device_supports_import_export(plugin)) { try { // need to export network for further import from "cache" - OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Export"); + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::compile_model::Export"); cacheContent.cacheManager->writeCacheEntry(cacheContent.blobId, [&](std::ostream& networkStream) { networkStream << InferenceEngine::CompiledBlobHeader( InferenceEngine::GetInferenceEngineVersion()->buildNumber, @@ -1083,11 +932,11 @@ ov::SoPtr ov::CoreImpl::compile_mod return execNetwork; } -ov::SoPtr ov::CoreImpl::LoadNetworkFromCache( +ov::SoPtr ov::CoreImpl::load_model_from_cache( const CacheContent& cacheContent, - ov::InferencePlugin& plugin, - const std::map& config, - const std::shared_ptr& context, + ov::Plugin& plugin, + const ov::AnyMap& config, + const ov::RemoteContext& context, bool& networkIsImported) { ov::SoPtr execNetwork; struct HeaderException {}; @@ -1114,8 +963,8 @@ ov::SoPtr ov::CoreImpl::LoadNetwork throw HeaderException(); } - execNetwork = context ? plugin.import_model(networkStream, context, config) - : plugin.import_model(networkStream, config); + execNetwork = context._impl ? plugin.import_model(networkStream, context, config) + : plugin.import_model(networkStream, config); networkIsImported = true; execNetwork->loadedFromCache(); }); @@ -1132,10 +981,9 @@ ov::SoPtr ov::CoreImpl::LoadNetwork return execNetwork; } -std::map ov::CoreImpl::CreateCompileConfig( - const ov::InferencePlugin& plugin, - const std::string& deviceFamily, - const std::map& origConfig) const { +std::map ov::CoreImpl::create_compile_config(const ov::Plugin& plugin, + const std::string& deviceFamily, + const ov::AnyMap& origConfig) const { std::map getMetricConfig; std::map compileConfig; @@ -1145,17 +993,17 @@ std::map ov::CoreImpl::CreateCompileConfig( targetFallbackIt = origConfig.find(ov::device::priorities.name()); } if (targetFallbackIt != origConfig.end()) { - getMetricConfig[targetFallbackIt->first] = targetFallbackIt->second; + getMetricConfig[targetFallbackIt->first] = targetFallbackIt->second.as(); } // 1. Move DEVICE_ID key to getMetricConfig auto deviceIt = origConfig.find(ov::device::id.name()); if (deviceIt != origConfig.end()) { - getMetricConfig[deviceIt->first] = deviceIt->second; + getMetricConfig[deviceIt->first] = deviceIt->second.as(); } // 2. Replace it with DEVICE_ARCHITECTURE value - if (DeviceSupportsConfigKey(plugin, ov::device::architecture.name())) { + if (device_supports_property(plugin, ov::device::architecture.name())) { compileConfig[ov::device::architecture.name()] = plugin.get_property(ov::device::architecture, getMetricConfig); } else { // Take device name if device does not support DEVICE_ARCHITECTURE metric @@ -1163,12 +1011,13 @@ std::map ov::CoreImpl::CreateCompileConfig( } // 3. Extract config keys which affect compile config - if (DeviceSupportsConfigKey(plugin, ov::caching_properties.name())) { + if (device_supports_property(plugin, ov::caching_properties.name())) { auto cachingProps = plugin.get_property(ov::caching_properties); for (const auto& prop : cachingProps) { // origConfig values have higher priority than plugin parameters auto it = origConfig.find(prop); - compileConfig[prop] = it == origConfig.end() ? plugin.get_property(prop, {}).as() : it->second; + compileConfig[prop] = + it == origConfig.end() ? plugin.get_property(prop, {}).as() : it->second.as(); } } return compileConfig; @@ -1176,26 +1025,34 @@ std::map ov::CoreImpl::CreateCompileConfig( std::string ov::CoreImpl::CalculateNetworkHash(const InferenceEngine::CNNNetwork& network, const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const { - auto compileConfig = CreateCompileConfig(plugin, deviceFamily, config); + const ov::Plugin& plugin, + const ov::AnyMap& config) const { + InferenceEngine::CNNNetwork net(network); + return CalculateNetworkHash(net, deviceFamily, plugin, config); +} + +std::string ov::CoreImpl::CalculateNetworkHash(InferenceEngine::CNNNetwork& network, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const { + auto compileConfig = create_compile_config(plugin, deviceFamily, config); return InferenceEngine::NetworkCompilationContext::computeHash(network, compileConfig); } -std::string ov::CoreImpl::CalculateFileHash(const std::string& modelName, - const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const { - auto compileConfig = CreateCompileConfig(plugin, deviceFamily, config); +std::string ov::CoreImpl::calculate_file_hash(const std::string& modelName, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const { + auto compileConfig = create_compile_config(plugin, deviceFamily, config); return InferenceEngine::NetworkCompilationContext::computeHash(modelName, compileConfig); } -std::string ov::CoreImpl::CalculateMemoryHash(const std::string& modelStr, - const ov::Tensor& weights, - const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const { - auto compileConfig = CreateCompileConfig(plugin, deviceFamily, config); +std::string ov::CoreImpl::calculate_memory_hash(const std::string& modelStr, + const ov::Tensor& weights, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const { + auto compileConfig = create_compile_config(plugin, deviceFamily, config); return InferenceEngine::NetworkCompilationContext::computeHash(modelStr, weights, compileConfig); } @@ -1217,13 +1074,13 @@ void ov::CoreImpl::AddExtensionUnsafe(const InferenceEngine::IExtensionPtr& exte extensions.emplace_back(extension); } -void ov::CoreImpl::CoreConfig::setAndUpdate(ov::AnyMap& config) { +void ov::CoreImpl::CoreConfig::set_and_update(ov::AnyMap& config) { auto it = config.find(CONFIG_KEY(CACHE_DIR)); if (it != config.end()) { std::lock_guard lock(_cacheConfigMutex); - fillConfig(_cacheConfig, it->second.as()); + fill_config(_cacheConfig, it->second.as()); for (auto& deviceCfg : _cacheConfigPerDevice) { - fillConfig(deviceCfg.second, it->second.as()); + fill_config(deviceCfg.second, it->second.as()); } config.erase(it); } @@ -1243,9 +1100,9 @@ void ov::CoreImpl::CoreConfig::setAndUpdate(ov::AnyMap& config) { } } -void ov::CoreImpl::CoreConfig::setCacheForDevice(const std::string& dir, const std::string& name) { +void ov::CoreImpl::CoreConfig::set_cache_dir_for_device(const std::string& dir, const std::string& name) { std::lock_guard lock(_cacheConfigMutex); - fillConfig(_cacheConfigPerDevice[name], dir); + fill_config(_cacheConfigPerDevice[name], dir); } std::string ov::CoreImpl::CoreConfig::get_cache_dir() const { @@ -1255,14 +1112,14 @@ std::string ov::CoreImpl::CoreConfig::get_cache_dir() const { // Creating thread-safe copy of config including shared_ptr to ICacheManager // Passing empty or not-existing name will return global cache config -ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::getCacheConfigForDevice( +ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::get_cache_config_for_device( const std::string& device_name, - bool deviceSupportsCacheDir, - std::map& parsedConfig) const { + bool device_supports_cache_dir, + ov::AnyMap& parsedConfig) const { if (parsedConfig.count(CONFIG_KEY(CACHE_DIR))) { CoreConfig::CacheConfig tempConfig; - CoreConfig::fillConfig(tempConfig, parsedConfig.at(CONFIG_KEY(CACHE_DIR))); - if (!deviceSupportsCacheDir) { + CoreConfig::fill_config(tempConfig, parsedConfig.at(CONFIG_KEY(CACHE_DIR))); + if (!device_supports_cache_dir) { parsedConfig.erase(CONFIG_KEY(CACHE_DIR)); } return tempConfig; @@ -1276,7 +1133,7 @@ ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::getCacheConfigFo } } -ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::getCacheConfigForDevice( +ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::get_cache_config_for_device( const std::string& device_name) const { std::lock_guard lock(_cacheConfigMutex); if (_cacheConfigPerDevice.count(device_name) > 0) { @@ -1286,7 +1143,7 @@ ov::CoreImpl::CoreConfig::CacheConfig ov::CoreImpl::CoreConfig::getCacheConfigFo } } -void ov::CoreImpl::CoreConfig::fillConfig(CacheConfig& config, const std::string& dir) { +void ov::CoreImpl::CoreConfig::fill_config(CacheConfig& config, const std::string& dir) { config._cacheDir = dir; if (!dir.empty()) { FileUtils::createDirectoryRecursive(dir); @@ -1391,3 +1248,19 @@ ov::AnyMap ov::flatten_sub_properties(const std::string& device, const ov::AnyMa } return result; } + +std::shared_ptr ov::CoreImpl::read_model(const std::string& modelPath, const std::string& binPath) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::read_model from file"); + return ReadNetwork(modelPath, binPath).getFunction(); +} + +std::shared_ptr ov::CoreImpl::read_model(const std::string& model, + const ov::Tensor& weights, + bool frontendMode) const { + InferenceEngine::Blob::Ptr blob; + if (weights) { + blob = weights._impl; + } + OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::read_model from memory"); + return ReadNetwork(model, blob, frontendMode).getFunction(); +} diff --git a/src/inference/src/dev/core_impl.hpp b/src/inference/src/dev/core_impl.hpp index 6243e4e60d8..611de513d2b 100644 --- a/src/inference/src/dev/core_impl.hpp +++ b/src/inference/src/dev/core_impl.hpp @@ -4,14 +4,19 @@ #pragma once -#include +#include + +#include #include "any_copy.hpp" #include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "dev/plugin.hpp" #include "ie_cache_guard.hpp" #include "ie_cache_manager.hpp" +#include "ie_extension.h" #include "ie_icore.hpp" #include "multi-device/multi_device_config.hpp" +#include "openvino/core/any.hpp" #include "openvino/core/extension.hpp" #include "openvino/core/version.hpp" #include "openvino/runtime/common.hpp" @@ -72,7 +77,7 @@ ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& p class CoreImpl : public InferenceEngine::ICore, public std::enable_shared_from_this { private: - mutable std::map plugins; + mutable std::map plugins; // Mutex is needed to prevent changes of dev mutexes map from different threads mutable std::mutex global_mutex; // Global mutex "" locks parallel access to pluginRegistry and plugins @@ -91,22 +96,22 @@ private: bool flag_allow_auto_batching = true; - void setAndUpdate(ov::AnyMap& config); + void set_and_update(ov::AnyMap& config); - void setCacheForDevice(const std::string& dir, const std::string& name); + void set_cache_dir_for_device(const std::string& dir, const std::string& name); std::string get_cache_dir() const; // Creating thread-safe copy of config including shared_ptr to ICacheManager // Passing empty or not-existing name will return global cache config - CacheConfig getCacheConfigForDevice(const std::string& device_name, - bool deviceSupportsCacheDir, - std::map& parsedConfig) const; + CacheConfig get_cache_config_for_device(const std::string& device_name, + bool device_supports_cache_dir, + ov::AnyMap& parsedConfig) const; - CacheConfig getCacheConfigForDevice(const std::string& device_name) const; + CacheConfig get_cache_config_for_device(const std::string& device_name) const; private: - static void fillConfig(CacheConfig& config, const std::string& dir); + static void fill_config(CacheConfig& config, const std::string& dir); mutable std::mutex _cacheConfigMutex; CacheConfig _cacheConfig; @@ -126,7 +131,9 @@ private: // Core settings (cache config, etc) CoreConfig coreConfig; - InferenceEngine::CacheGuard cacheGuard; + Any get_property_for_core(const std::string& name) const; + + mutable InferenceEngine::CacheGuard cacheGuard; struct PluginDescriptor { ov::util::FilePath libraryLocation; @@ -162,9 +169,51 @@ private: std::map pluginRegistry; - const bool newAPI; - void AddExtensionUnsafe(const InferenceEngine::IExtensionPtr& extension) const; + const bool m_new_api; + ov::SoPtr compile_model_impl( + const std::shared_ptr& model, + ov::Plugin& plugin, + const ov::AnyMap& parsedConfig, + const ov::RemoteContext& context, + const CacheContent& cacheContent, + bool forceDisableCache = false) const; + + static ov::SoPtr load_model_from_cache( + const CacheContent& cacheContent, + ov::Plugin& plugin, + const ov::AnyMap& config, + const ov::RemoteContext& context, + bool& networkIsImported); + + bool device_supports_import_export(const ov::Plugin& plugin) const; + + bool device_supports_property(const ov::Plugin& plugin, const std::string& key) const; + + bool device_supports_cache_dir(const ov::Plugin& plugin) const; + + ov::SoPtr compile_model(ov::Plugin& plugin, + const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& config) const; + + std::map create_compile_config(const ov::Plugin& plugin, + const std::string& deviceFamily, + const ov::AnyMap& origConfig) const; + + std::string calculate_file_hash(const std::string& modelName, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const; + + std::string calculate_memory_hash(const std::string& modelStr, + const ov::Tensor& weights, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const; + + // Legacy API + void AddExtensionUnsafe(const InferenceEngine::IExtensionPtr& extension) const; template > void TryToRegisterLibraryAsExtensionUnsafe(const std::basic_string& path) const { try { @@ -174,6 +223,23 @@ private: // in case of shared library is not opened } } + ov::SoPtr LoadNetworkImpl( + const InferenceEngine::CNNNetwork& model, + ov::Plugin& plugin, + const std::map& parsedConfig, + const InferenceEngine::RemoteContext::Ptr& context, + const CacheContent& cacheContent, + bool forceDisableCache = false); + + std::string CalculateNetworkHash(const InferenceEngine::CNNNetwork& network, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const; + + std::string CalculateNetworkHash(InferenceEngine::CNNNetwork& network, + const std::string& deviceFamily, + const ov::Plugin& plugin, + const ov::AnyMap& config) const; public: CoreImpl(bool _newAPI); @@ -185,7 +251,13 @@ public: * @note The function supports UNICODE path * @param xmlConfigFile An .xml configuraion with device / plugin information */ - void RegisterPluginsInRegistry(const std::string& xmlConfigFile); + void register_plugins_in_registry(const std::string& xmlConfigFile); + + void apply_auto_batching(const std::shared_ptr& model, + std::string& deviceName, + ov::AnyMap& config) const; + + void clean_properties(std::string& deviceName, ov::AnyMap& config, ov::Any property) const; #ifdef OPENVINO_STATIC_LIBRARY @@ -194,7 +266,7 @@ public: * @note The function supports UNICODE path * @param static_registry a statically defined configuration with device / plugin information */ - void RegisterPluginsInRegistry(const decltype(::getStaticPluginsRegistry())& static_registry) { + void register_plugins_in_registry(const decltype(::getStaticPluginsRegistry())& static_registry) { std::lock_guard lock(get_mutex()); for (const auto& plugin : static_registry) { @@ -233,12 +305,6 @@ public: const std::shared_ptr& context, const std::map& config) override; - void ApplyAutoBatching(const InferenceEngine::CNNNetwork& network, - std::string& deviceName, - std::map& config); - - void CleanUpProperties(std::string& deviceName, std::map& config, ov::Any property); - InferenceEngine::SoExecutableNetworkInternal LoadNetwork(const InferenceEngine::CNNNetwork& network, const std::string& deviceNameOrig, const std::map& config) override; @@ -267,12 +333,6 @@ public: Any GetMetric(const std::string& deviceName, const std::string& name, const AnyMap& options = {}) const override; - void set_property(const std::string& device_name, const AnyMap& properties) override; - - Any get_property_for_core(const std::string& name) const; - - Any get_property(const std::string& device_name, const std::string& name, const AnyMap& arguments) const override; - Any GetConfig(const std::string& deviceName, const std::string& name) const override; /** @@ -293,48 +353,6 @@ public: InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, const InferenceEngine::ParamMap& params) override; - /** - * @brief Returns reference to CPP plugin wrapper by a device name - * @param deviceName A name of device - * @return Reference to a CPP plugin wrapper - */ - ov::InferencePlugin GetCPPPluginByName(const std::string& pluginName) const; - - /** - * @brief Unload plugin for specified device, but plugin meta-data is still in plugin registry - * @param deviceName A name of device - */ - void UnloadPluginByName(const std::string& deviceName); - - /** - * @brief Registers plugin meta-data in registry for specified device - * @param deviceName A name of device - */ - void RegisterPluginByName(const std::string& pluginName, const std::string& deviceName); - - /** - * @brief Provides a list of plugin names in registry; physically such plugins may not be created - * @return A list of plugin names - */ - std::vector GetListOfDevicesInRegistry() const; - - /** - * @brief Sets config values for a plugin or set of plugins - * @param deviceName A device name to set config to - * If empty, config is set for all the plugins / plugin's meta-data - * @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU - * just simple forms like CPU, GPU, MULTI, GPU.0, etc - */ - void SetConfigForPlugins(const ov::AnyMap& configMap, const std::string& deviceName); - - /** - * @brief Get device config it is passed as pair of device_name and `AnyMap` - * @param configs All set of configs - * @note `device_name` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU - * just simple forms like CPU, GPU, MULTI, GPU.0, etc - */ - void ExtractAndSetDeviceConfig(const ov::AnyMap& configs); - std::map GetSupportedConfig(const std::string& deviceName, const std::map& configs) override; @@ -344,60 +362,121 @@ public: */ void AddExtension(const InferenceEngine::IExtensionPtr& extension); - void AddOVExtensions(const std::vector& extensions); - /** * @brief Provides a list of extensions * @return A list of registered extensions */ const std::vector& GetExtensions() const; - const std::vector& GetOVExtensions() const; + bool DeviceSupportsImportExport(const std::string& deviceName) const override; std::map GetVersions(const std::string& deviceName) const; - bool DeviceSupportsImportExport(const std::string& deviceName) const override; + // Common API - bool DeviceSupportsConfigKey(const ov::InferencePlugin& plugin, const std::string& key) const; + /** + * @brief Returns reference to CPP plugin wrapper by a device name + * @param deviceName A name of device + * @return Reference to a CPP plugin wrapper + */ + ov::Plugin get_plugin(const std::string& pluginName) const; - bool DeviceSupportsImportExport(const ov::InferencePlugin& plugin) const; + /** + * @brief Unload plugin for specified device, but plugin meta-data is still in plugin registry + * @param deviceName A name of device + */ + void unload_plugin(const std::string& deviceName); - bool DeviceSupportsCacheDir(const ov::InferencePlugin& plugin) const; + /** + * @brief Registers plugin meta-data in registry for specified device + * @param deviceName A name of device + */ + void register_plugin(const std::string& pluginName, const std::string& deviceName); - ov::SoPtr compile_model_impl( - const InferenceEngine::CNNNetwork& network, - ov::InferencePlugin& plugin, - const std::map& parsedConfig, - const InferenceEngine::RemoteContext::Ptr& context, - const CacheContent& cacheContent, - bool forceDisableCache = false); + /** + * @brief Provides a list of plugin names in registry; physically such plugins may not be created + * @return A list of plugin names + */ + std::vector get_registered_devices() const; - static ov::SoPtr LoadNetworkFromCache( - const CacheContent& cacheContent, - ov::InferencePlugin& plugin, - const std::map& config, - const std::shared_ptr& context, - bool& networkIsImported); + /** + * @brief Sets config values for a plugin or set of plugins + * @param deviceName A device name to set config to + * If empty, config is set for all the plugins / plugin's meta-data + * @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU + * just simple forms like CPU, GPU, MULTI, GPU.0, etc + */ + void set_property_for_devivce(const ov::AnyMap& configMap, const std::string& deviceName); - std::map CreateCompileConfig(const ov::InferencePlugin& plugin, - const std::string& deviceFamily, - const std::map& origConfig) const; + void add_extension(const std::vector& extensions); - std::string CalculateNetworkHash(const InferenceEngine::CNNNetwork& network, - const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const; + bool device_supports_import_export(const std::string& deviceName) const; - std::string CalculateFileHash(const std::string& modelName, - const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const; + // ov::ICore + std::shared_ptr read_model(const std::string& model, + const ov::Tensor& weights, + bool frontend_mode = false) const override; - std::string CalculateMemoryHash(const std::string& modelStr, - const ov::Tensor& weights, - const std::string& deviceFamily, - const ov::InferencePlugin& plugin, - const std::map& config) const; + std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path) const override; + + ov::SoPtr compile_model(const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config = {}) const override; + + ov::SoPtr compile_model(const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& config = {}) const override; + + ov::SoPtr compile_model(const std::string& model_path, + const std::string& device_name, + const ov::AnyMap& config) const override; + + ov::SoPtr compile_model(const std::string& model_str, + const ov::Tensor& weights, + const std::string& device_name, + const ov::AnyMap& config) const override; + + ov::SoPtr import_model(std::istream& model, + const std::string& device_name = {}, + const ov::AnyMap& config = {}) const override; + + ov::SupportedOpsMap query_model(const std::shared_ptr& model, + const std::string& device_name, + const ov::AnyMap& config) const override; + + std::vector get_available_devices() const override; + + ov::RemoteContext create_context(const std::string& device_name, const AnyMap& args) const override; + + ov::AnyMap get_supported_property(const std::string& device_name, const ov::AnyMap& config) const; + + bool is_new_api() const override; + + ov::RemoteContext get_default_context(const std::string& device_name) const override; + + /** + * @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp. + * + * @param device_name Name of a device. + * + * @param properties Map of pairs: (property name, property value). + */ + void set_property(const std::string& device_name, const AnyMap& properties); + + /** + * @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp. + * + * @tparam Properties Should be the pack of `std::pair` types. + * @param device_name Name of a device. + * @param properties Optional pack of pairs: (property name, property value). + */ + template + util::EnableIfAllStringAny set_property(const std::string& device_name, + Properties&&... properties) { + set_property(device_name, AnyMap{std::forward(properties)...}); + } + + Any get_property(const std::string& device_name, const std::string& name, const AnyMap& arguments) const override; }; } // namespace ov diff --git a/src/inference/src/dev/core_impl_ie.cpp b/src/inference/src/dev/core_impl_ie.cpp new file mode 100644 index 00000000000..b6fbeb82e36 --- /dev/null +++ b/src/inference/src/dev/core_impl_ie.cpp @@ -0,0 +1,443 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "any_copy.hpp" +#include "compilation_context.hpp" +#include "core_impl.hpp" +#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp" +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "dev/converter_utils.hpp" +#include "ie_network_reader.hpp" +#include "iplugin_wrapper.hpp" +#include "ngraph/op/constant.hpp" +#include "ngraph/pass/constant_folding.hpp" +#include "openvino/itt.hpp" +#include "openvino/util/common_util.hpp" + +bool ov::CoreImpl::isNewAPI() const { + return is_new_api(); +} + +ov::SoPtr ov::CoreImpl::LoadNetworkImpl( + const InferenceEngine::CNNNetwork& network, + ov::Plugin& plugin, + const std::map& parsedConfig, + const InferenceEngine::RemoteContext::Ptr& context, + const CacheContent& cacheContent, + bool forceDisableCache) { + OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "CoreImpl::compile_model_impl"); + ov::SoPtr execNetwork; + auto wrapper = std::dynamic_pointer_cast(plugin.m_ptr); + OPENVINO_ASSERT(wrapper); + auto old_plugin = wrapper->get_plugin(); + execNetwork = {context ? old_plugin->LoadNetwork(network, parsedConfig, context) + : old_plugin->LoadNetwork(network, parsedConfig), + plugin.m_so}; + if (!forceDisableCache && cacheContent.cacheManager && device_supports_import_export(plugin)) { + try { + // need to export network for further import from "cache" + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Export"); + cacheContent.cacheManager->writeCacheEntry(cacheContent.blobId, [&](std::ostream& networkStream) { + networkStream << InferenceEngine::CompiledBlobHeader( + InferenceEngine::GetInferenceEngineVersion()->buildNumber, + InferenceEngine::NetworkCompilationContext::calculateFileInfo(cacheContent.modelPath)); + execNetwork->Export(networkStream); + }); + } catch (...) { + cacheContent.cacheManager->removeCacheEntry(cacheContent.blobId); + throw; + } + } + return execNetwork; +} + +InferenceEngine::RemoteContext::Ptr ov::CoreImpl::GetDefaultContext(const std::string& deviceName) { + return get_default_context(deviceName)._impl; +} + +InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& modelPath, const std::string& binPath) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from file"); + return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, ov_extensions, is_new_api()); +} + +InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& model, + const InferenceEngine::Blob::CPtr& weights, + bool frontendMode) const { + OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from memory"); + return InferenceEngine::details::ReadNetwork(model, weights, extensions, ov_extensions, is_new_api(), frontendMode); +} + +ov::SoPtr ov::CoreImpl::LoadNetwork( + const InferenceEngine::CNNNetwork& network, + const std::shared_ptr& context, + const std::map& config) { + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::RemoteContext"); + if (network.getFunction()) { + ov::RemoteContext ctx{context, {nullptr}}; + auto compiled_model = + compile_model(ov::legacy_convert::convert_model(network, isNewAPI()), ctx, any_copy(config)); + return {compiled_model._ptr, compiled_model._so}; + } + if (context == nullptr) { + IE_THROW() << "Remote context is null"; + } + // have to deduce the device name/config from the context first + auto parsed = parseDeviceNameIntoConfig(context->getDeviceName(), config); + + auto plugin = get_plugin(parsed._deviceName); + + ov::SoPtr res; + auto conf = ov::any_copy(parsed._config); + auto cacheManager = + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf) + ._cacheManager; + auto cacheContent = CacheContent{cacheManager}; + if (cacheManager && device_supports_import_export(plugin)) { + cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, ov::any_copy(parsed._config)); + bool loadedFromCache = false; + auto lock = cacheGuard.getHashLock(cacheContent.blobId); + res = load_model_from_cache(cacheContent, plugin, conf, {context, {}}, loadedFromCache); + if (!loadedFromCache) { + res = LoadNetworkImpl(network, plugin, parsed._config, context, cacheContent); + } else { + // Temporary workaround until all plugins support caching of original model inputs + InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI()); + } + } else { + res = LoadNetworkImpl(network, plugin, parsed._config, context, cacheContent); + } + return res; +} + +InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( + const InferenceEngine::CNNNetwork& network, + const std::string& deviceNameOrig, + const std::map& config) { + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::CNN"); + if (network.getFunction()) { + auto compiled_model = + compile_model(ov::legacy_convert::convert_model(network, isNewAPI()), deviceNameOrig, any_copy(config)); + return {compiled_model._ptr, compiled_model._so}; + } + std::string deviceName = deviceNameOrig; + std::map config_with_batch = config; + bool forceDisableCache = config_with_batch.count(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE)) > 0; + auto parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch); + if (forceDisableCache) { + // remove this config key from parsed as plugins can throw unsupported exception + parsed._config.erase(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE)); + } + auto plugin = get_plugin(parsed._deviceName); + ov::SoPtr res; + auto conf = ov::any_copy(parsed._config); + auto cacheManager = + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf) + ._cacheManager; + auto cacheContent = CacheContent{cacheManager}; + if (!forceDisableCache && cacheManager && device_supports_import_export(plugin)) { + cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, ov::any_copy(parsed._config)); + bool loadedFromCache = false; + auto lock = cacheGuard.getHashLock(cacheContent.blobId); + res = load_model_from_cache(cacheContent, plugin, conf, {}, loadedFromCache); + if (!loadedFromCache) { + res = LoadNetworkImpl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache); + } else { + // Temporary workaround until all plugins support caching of original model inputs + InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI()); + } + } else { + res = LoadNetworkImpl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache); + } + return {res._ptr, res._so}; +} + +InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( + const std::string& modelPath, + const std::string& deviceName, + const std::map& config, + const std::function& val) { + OV_ITT_SCOPE(FIRST_INFERENCE, ie::itt::domains::IE_LT, "Core::LoadNetwork::Path"); + auto parsed = parseDeviceNameIntoConfig(deviceName, config); + auto plugin = get_plugin(parsed._deviceName); + ov::SoPtr res; + auto conf = any_copy(parsed._config); + auto cacheManager = + coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf) + ._cacheManager; + auto cacheContent = CacheContent{cacheManager, modelPath}; + if (cacheManager && device_supports_import_export(plugin)) { + bool loadedFromCache = false; + cacheContent.blobId = calculate_file_hash(modelPath, parsed._deviceName, plugin, conf); + auto lock = cacheGuard.getHashLock(cacheContent.blobId); + res = load_model_from_cache(cacheContent, plugin, conf, {}, loadedFromCache); + if (!loadedFromCache) { + auto cnnNetwork = ReadNetwork(modelPath, std::string()); + if (val) { + val(cnnNetwork); + } + if (cnnNetwork.getFunction()) { + res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()), + plugin, + conf, + {}, + cacheContent); + } else { + res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); + } + } + } else if (cacheManager) { + res = plugin.compile_model(modelPath, conf); + } else { + auto cnnNetwork = ReadNetwork(modelPath, std::string()); + if (val) { + val(cnnNetwork); + } + if (cnnNetwork.getFunction()) { + res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()), + plugin, + conf, + {}, + cacheContent); + } else { + res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent); + } + } + return {res._ptr, res._so}; +} + +InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork( + const std::string& modelStr, + const InferenceEngine::Blob::CPtr& weights, + const std::string& deviceName, + const std::map& config, + const std::function& val) { + OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Memory"); + + auto compiled_model = compile_model(modelStr, + ov::Tensor{std::const_pointer_cast(weights), {}}, + deviceName, + ov::any_copy(config)); + return {compiled_model._ptr, compiled_model._so}; +} + +InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::ImportNetwork( + std::istream& networkModel, + const std::string& deviceName, + const std::map& config) { + auto compiled_model = import_model(networkModel, deviceName, any_copy(config)); + return {compiled_model._ptr, compiled_model._so}; +} + +InferenceEngine::QueryNetworkResult ov::CoreImpl::QueryNetwork(const InferenceEngine::CNNNetwork& network, + const std::string& deviceName, + const std::map& config) const { + OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::QueryNetwork"); + ie::QueryNetworkResult ret; + if (!network.getFunction()) { + ret.rc = InferenceEngine::GENERAL_ERROR; + return ret; + } + auto res = query_model(network.getFunction(), deviceName, any_copy(config)); + if (!network.getFunction() || res.empty()) { + ret.rc = InferenceEngine::GENERAL_ERROR; + return ret; + } + ret.supportedLayersMap = res; + + const auto& func = network.getFunction(); + auto specialized_function = func->clone(); + + std::string defDevice = ret.supportedLayersMap.begin()->second; + ngraph::pass::ConstantFolding().run_on_model(specialized_function); + std::unordered_set opNames; + + for (const auto& op : specialized_function->get_ops()) + opNames.emplace(op->get_friendly_name()); + + for (const auto& op : func->get_ops()) { + if (opNames.find(op->get_friendly_name()) == opNames.end()) { + ret.supportedLayersMap[op->get_friendly_name()] = defDevice; + } + } + + for (const auto& op : func->get_ops()) { + if (!ret.supportedLayersMap.count(op->get_friendly_name()) && + std::dynamic_pointer_cast(op)) { + bool are_all_users_supported = true; + for (const auto& user : op->output(0).get_target_inputs()) { + if (!ret.supportedLayersMap.count(user.get_node()->get_friendly_name())) { + are_all_users_supported = false; + break; + } + } + if (are_all_users_supported) { + ret.supportedLayersMap[op->get_friendly_name()] = defDevice; + } + } + } + return ret; +} + +ov::Any ov::CoreImpl::GetMetric(const std::string& deviceName, + const std::string& name, + const ov::AnyMap& options) const { + // HETERO case + { + if (deviceName.find("HETERO:") == 0) { + IE_THROW() + << "You can get specific metrics with the GetMetric only for the HETERO itself (without devices). " + "To get individual devices's metrics call GetMetric for each device separately"; + } + } + + // MULTI case + { + if (deviceName.find("MULTI:") == 0) { + IE_THROW() + << "You can get specific metrics with the GetMetric only for the MULTI itself (without devices). " + "To get individual devices's metrics call GetMetric for each device separately"; + } + } + + // AUTO case + { + if (deviceName.find("AUTO:") == 0) { + IE_THROW() << "You can get specific metrics with the GetMetric only for the AUTO itself (without devices). " + "To get individual devices's metrics call GetMetric for each device separately"; + } + } + + // BATCH case + { + if (deviceName.find("BATCH:") == 0) { + IE_THROW() + << "You can get specific metrics with the GetMetric only for the BATCH itself (without devices). " + "To get individual devices's metrics call GetMetric for each device separately"; + } + } + + auto parsed = parseDeviceNameIntoConfig(deviceName); + for (auto o : options) { + parsed._config.insert(o); + } + + return get_plugin(parsed._deviceName).get_property(name, parsed._config); +} + +ov::Any ov::CoreImpl::GetConfig(const std::string& deviceName, const std::string& name) const { + auto parsed = parseDeviceNameIntoConfig(deviceName); + return get_plugin(parsed._deviceName).get_property(name, parsed._config); +} + +std::vector ov::CoreImpl::GetAvailableDevices() const { + return get_available_devices(); +} + +InferenceEngine::RemoteContext::Ptr ov::CoreImpl::CreateContext(const std::string& deviceName, + const InferenceEngine::ParamMap& params) { + return create_context(deviceName, params)._impl; +} + +/** + * @brief Registers the extension in a Core object + * Such extensions can be used for both CNNNetwork readers and device plugins + */ +void ov::CoreImpl::AddExtension(const InferenceEngine::IExtensionPtr& extension) { + std::lock_guard lock(get_mutex()); + AddExtensionUnsafe(extension); +} + +bool ov::CoreImpl::DeviceSupportsImportExport(const std::string& deviceName) const { + return device_supports_import_export(deviceName); +} + +std::map ov::CoreImpl::GetSupportedConfig(const std::string& deviceName, + const std::map& configs) { + std::vector supportedConfigKeys; + try { + supportedConfigKeys = GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as>(); + } catch (ov::Exception&) { + } + try { + for (auto&& property : ICore::get_property(deviceName, ov::supported_properties)) { + if (property.is_mutable()) { + supportedConfigKeys.emplace_back(std::move(property)); + } + } + } catch (ov::Exception&) { + } + std::map supportedConfig; + for (auto&& key : supportedConfigKeys) { + auto itKey = configs.find(key); + if (configs.end() != itKey) { + supportedConfig[key] = itKey->second; + } + } + for (auto&& config : configs) { + auto parsed = parseDeviceNameIntoConfig(config.first); + if (deviceName.find(parsed._deviceName) != std::string::npos) { + std::stringstream strm(config.second); + std::map device_configs; + util::Read>{}(strm, device_configs); + for (auto&& device_config : device_configs) { + if (ov::util::contains(supportedConfigKeys, device_config.first)) { + supportedConfig[device_config.first] = device_config.second; + } + } + for (auto&& config : parsed._config) { + supportedConfig[config.first] = config.second.as(); + } + } + } + return supportedConfig; +} + +std::map ov::CoreImpl::GetVersions(const std::string& deviceName) const { + std::map versions; + std::vector deviceNames; + + { + // for compatibility with samples / demo + if (deviceName.find("HETERO") == 0) { + auto pos = deviceName.find_first_of(":"); + if (pos != std::string::npos) { + deviceNames = InferenceEngine::DeviceIDParser::getHeteroDevices(deviceName.substr(pos + 1)); + } + deviceNames.push_back("HETERO"); + } else if (deviceName.find("MULTI") == 0) { + auto pos = deviceName.find_first_of(":"); + if (pos != std::string::npos) { + deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1)); + } + deviceNames.push_back("MULTI"); + } else if (deviceName.find("AUTO") == 0) { + auto pos = deviceName.find_first_of(":"); + if (pos != std::string::npos) { + deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1)); + } + deviceNames.emplace_back("AUTO"); + } else if (deviceName.find("BATCH") == 0) { + auto pos = deviceName.find_first_of(":"); + if (pos != std::string::npos) { + deviceNames = {InferenceEngine::DeviceIDParser::getBatchDevice(deviceName.substr(pos + 1))}; + } + deviceNames.push_back("BATCH"); + } else { + deviceNames.push_back(deviceName); + } + } + + for (auto&& deviceName_ : deviceNames) { + ie::DeviceIDParser parser(deviceName_); + std::string deviceNameLocal = parser.getDeviceName(); + + ov::Plugin cppPlugin = get_plugin(deviceNameLocal); + + versions[deviceNameLocal] = ov::legacy_convert::convert_plugin(cppPlugin.m_ptr)->GetVersion(); + } + + return versions; +} diff --git a/src/inference/src/dev/iplugin.cpp b/src/inference/src/dev/iplugin.cpp new file mode 100644 index 00000000000..5ff92bd03ad --- /dev/null +++ b/src/inference/src/dev/iplugin.cpp @@ -0,0 +1,47 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/runtime/iplugin.hpp" + +ov::IPlugin::IPlugin() : m_executor_manager(InferenceEngine::executorManager()), m_is_new_api(true) {} + +void ov::IPlugin::set_version(const ov::Version& version) { + m_version = version; +} + +const ov::Version& ov::IPlugin::get_version() const { + return m_version; +} + +void ov::IPlugin::set_device_name(const std::string& name) { + m_plugin_name = name; +} + +const std::string& ov::IPlugin::get_device_name() const { + return m_plugin_name; +} + +void ov::IPlugin::add_extension(const std::shared_ptr& extension) { + OPENVINO_NOT_IMPLEMENTED; +} + +void ov::IPlugin::set_core(const std::weak_ptr& core) { + OPENVINO_ASSERT(!core.expired()); + m_core = core; + auto locked_core = m_core.lock(); + if (locked_core) + m_is_new_api = locked_core->is_new_api(); +} + +std::shared_ptr ov::IPlugin::get_core() const { + return m_core.lock(); +} + +bool ov::IPlugin::is_new_api() const { + return m_is_new_api; +} + +const std::shared_ptr& ov::IPlugin::get_executor_manager() const { + return m_executor_manager; +} diff --git a/src/inference/src/dev/iplugin_wrapper.cpp b/src/inference/src/dev/iplugin_wrapper.cpp new file mode 100644 index 00000000000..eb9c594d58d --- /dev/null +++ b/src/inference/src/dev/iplugin_wrapper.cpp @@ -0,0 +1,114 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "iplugin_wrapper.hpp" + +#include + +#include "any_copy.hpp" +#include "dev/converter_utils.hpp" +#include "ie_icore.hpp" + +namespace InferenceEngine { + +IPluginWrapper::IPluginWrapper(const std::shared_ptr& ptr) : m_old_plugin(ptr) { + OPENVINO_ASSERT(m_old_plugin); + auto& ver = m_old_plugin->GetVersion(); + m_version.buildNumber = ver.buildNumber; + m_version.description = ver.description; + m_plugin_name = m_old_plugin->GetName(); + m_is_new_api = m_old_plugin->IsNewAPI(); + m_core = m_old_plugin->GetCore(); + m_executor_manager = m_old_plugin->executorManager(); +} + +std::shared_ptr IPluginWrapper::compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties) const { + auto exec_network = + m_old_plugin->LoadNetwork(ov::legacy_convert::convert_model(model, is_new_api()), ov::any_copy(properties)); + return exec_network; +} + +std::shared_ptr IPluginWrapper::compile_model( + const std::string& model_path, + const ov::AnyMap& properties) const { + auto exec_network = m_old_plugin->LoadNetwork(model_path, any_copy(properties)); + return exec_network._ptr; +} + +std::shared_ptr IPluginWrapper::compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties, + const ov::RemoteContext& context) const { + auto compiled_model = m_old_plugin->LoadNetwork(ov::legacy_convert::convert_model(model, is_new_api()), + any_copy(properties), + context._impl); + return compiled_model; +} + +void IPluginWrapper::set_property(const ov::AnyMap& properties) { + m_old_plugin->SetProperties(properties); +} + +ov::Any IPluginWrapper::get_property(const std::string& name, const ov::AnyMap& arguments) const { + try { + return m_old_plugin->GetConfig(name, arguments); + } catch (...) { + return m_old_plugin->GetMetric(name, arguments); + } +} + +ov::RemoteContext IPluginWrapper::create_context(const ov::AnyMap& remote_properties) const { + return ov::RemoteContext{m_old_plugin->CreateContext(remote_properties), {nullptr}}; +} + +ov::RemoteContext IPluginWrapper::get_default_context(const ov::AnyMap& remote_properties) const { + return ov::RemoteContext{m_old_plugin->GetDefaultContext(remote_properties), {nullptr}}; +} + +std::shared_ptr IPluginWrapper::import_model( + std::istream& model, + const ov::AnyMap& properties) const { + return m_old_plugin->ImportNetwork(model, any_copy(properties)); +} + +std::shared_ptr IPluginWrapper::import_model( + std::istream& model, + const ov::RemoteContext& context, + const ov::AnyMap& properties) const { + return m_old_plugin->ImportNetwork(model, context._impl, any_copy(properties)); +} + +ov::SupportedOpsMap IPluginWrapper::query_model(const std::shared_ptr& model, + const ov::AnyMap& properties) const { + auto res = m_old_plugin->QueryNetwork(ov::legacy_convert::convert_model(model, is_new_api()), any_copy(properties)); + if (res.rc != InferenceEngine::OK) { + throw ov::Exception(res.resp.msg); + } + return res.supportedLayersMap; +} + +void IPluginWrapper::add_extension(const std::shared_ptr& extension) { + m_old_plugin->AddExtension(extension); +} + +const std::shared_ptr& IPluginWrapper::get_plugin() const { + return m_old_plugin; +} + +void IPluginWrapper::set_core(const std::weak_ptr& core) { + auto locked_core = core.lock(); + auto old_core = std::dynamic_pointer_cast(locked_core); + if (old_core) + m_old_plugin->SetCore(old_core); + m_core = core; +} + +void IPluginWrapper::set_device_name(const std::string& device_name) { + m_plugin_name = device_name; + m_old_plugin->SetName(device_name); +} + +} // namespace InferenceEngine diff --git a/src/inference/src/dev/iplugin_wrapper.hpp b/src/inference/src/dev/iplugin_wrapper.hpp new file mode 100644 index 00000000000..106ea3f6a7e --- /dev/null +++ b/src/inference/src/dev/iplugin_wrapper.hpp @@ -0,0 +1,166 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "openvino/runtime/iplugin.hpp" + +namespace InferenceEngine { + +/** + * @brief Class wraps InferenceEngine::IInferencePlugin into ov::IPlugin + */ +class IPluginWrapper : public ov::IPlugin { +public: + /** + * @brief Constructs Plugin wrapper + * + * @param ptr shared pointer to InferenceEngine::IInferencePlugin + */ + IPluginWrapper(const std::shared_ptr& ptr); + + /** + * @brief Create compiled model based on model and properties + * + * @param model OpenVINO Model representation + * @param properties configurations for compiled model + * + * @return shared pointer to compiled model interface + */ + std::shared_ptr compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties) const override; + + /** + * @brief Create compiled model based on model and properties + * + * @param model_path Path to the model + * @param properties configurations for compiled model + * + * @return shared pointer to compiled model interface + */ + std::shared_ptr compile_model( + const std::string& model_path, + const ov::AnyMap& properties) const override; + + /** + * @brief Create compiled model based on model and properties + * + * @param model OpenVINO Model representation + * @param properties configurations for compiled model + * @param context remote context + * + * @return shared pointer to compiled model interface + */ + std::shared_ptr compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties, + const ov::RemoteContext& context) const override; + + /** + * @brief Specifies some plugin properties + * + * @param properties map with configuration properties + */ + void set_property(const ov::AnyMap& properties) override; + + /** + * @brief Returns the property + * + * @param name property name + * @param arguments configuration parameters + * + * @return ov::Any object which contains property value + */ + ov::Any get_property(const std::string& name, const ov::AnyMap& arguments) const override; + + /** + * @brief Create remote context + * + * @param remote_properties configuration parameters + * + * @return Remote context + */ + ov::RemoteContext create_context(const ov::AnyMap& remote_properties) const override; + + /** + * @brief Create default remote context + * + * @param remote_properties configuration parameters + * + * @return Remote context + */ + ov::RemoteContext get_default_context(const ov::AnyMap& remote_properties) const override; + + /** + * @brief Import model to the plugin + * + * @param model strim with the model + * @param properties configuration properties + * + * @return shared pointer to compiled model interface + */ + std::shared_ptr import_model( + std::istream& model, + const ov::AnyMap& properties) const override; + + /** + * @brief Import model to the plugin + * + * @param model strim with the model + * @param context remote context + * @param properties configuration properties + * + * @return shared pointer to compiled model interface + */ + std::shared_ptr + import_model(std::istream& model, const ov::RemoteContext& context, const ov::AnyMap& properties) const override; + + /** + * @brief query model + * + * @param model OpenVINO Model + * @param properties configuration properties + * + * @return Map of supported operations + */ + ov::SupportedOpsMap query_model(const std::shared_ptr& model, + const ov::AnyMap& properties) const override; + + /** + * @brief Register legacy Inference Engine Extension for the plugin + * + * @param extension legacy Inference Engine Extension + */ + void add_extension(const std::shared_ptr& extension) override; + + /** + * @brief Returns the instance of the legacy plugin + * + * @return Legacy InferenceEngine::InferencePlugin object + */ + const std::shared_ptr& get_plugin() const; + + /** + * @brief Set core interface to the plugin + * This method works under the non-virtual method of IPlugin class + * + * @param core OpenVINO Core interface + */ + void set_core(const std::weak_ptr& core); + + /** + * @brief Set plugin name for the wrapper and legacy plugin + * This method works under the non-virtual method of IPlugin class + * + * @param device_name The name of plugin + */ + void set_device_name(const std::string& device_name); + +private: + std::shared_ptr m_old_plugin; +}; + +} // namespace InferenceEngine diff --git a/src/inference/src/dev/plugin.cpp b/src/inference/src/dev/plugin.cpp new file mode 100644 index 00000000000..e4b8d41214b --- /dev/null +++ b/src/inference/src/dev/plugin.cpp @@ -0,0 +1,146 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "plugin.hpp" + +#include + +#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" +#include "ie_plugin_config.hpp" +#include "iplugin_wrapper.hpp" + +#define OV_PLUGIN_CALL_STATEMENT(...) \ + OPENVINO_ASSERT(m_ptr != nullptr, "OpenVINO Runtime Plugin was not initialized."); \ + try { \ + __VA_ARGS__; \ + } catch (...) { \ + ::InferenceEngine::details::Rethrow(); \ + } +ov::Plugin::~Plugin() { + m_ptr = {}; +} + +ov::Plugin::Plugin(const std::shared_ptr& ptr, const std::shared_ptr& so) : m_ptr{ptr}, m_so{so} { + OV_PLUGIN_CALL_STATEMENT(); +} + +void ov::Plugin::set_name(const std::string& deviceName) { + OV_PLUGIN_CALL_STATEMENT({ + m_ptr->set_device_name(deviceName); + if (auto wrapper = std::dynamic_pointer_cast(m_ptr)) + wrapper->set_device_name(deviceName); + }); +} + +void ov::Plugin::set_core(std::weak_ptr core) { + OV_PLUGIN_CALL_STATEMENT({ + m_ptr->set_core(core); + if (auto wrapper = std::dynamic_pointer_cast(m_ptr)) + wrapper->set_core(core); + }); +} + +const ov::Version ov::Plugin::get_version() const { + OV_PLUGIN_CALL_STATEMENT(return m_ptr->get_version()); +} + +void ov::Plugin::add_extension(const ie::IExtensionPtr& extension) { + OPENVINO_SUPPRESS_DEPRECATED_START + OV_PLUGIN_CALL_STATEMENT(m_ptr->add_extension(extension)); + OPENVINO_SUPPRESS_DEPRECATED_END +} + +void ov::Plugin::set_property(const ov::AnyMap& config) { + OV_PLUGIN_CALL_STATEMENT(m_ptr->set_property(config)); +} + +ov::SoPtr ov::Plugin::compile_model( + const std::shared_ptr& model, + const ov::AnyMap& properties) const { + OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model, properties), m_so}); +} + +ov::SoPtr ov::Plugin::compile_model(const std::string& model_path, + const ov::AnyMap& properties) const { + OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model_path, properties), m_so}); +} + +ov::SoPtr ov::Plugin::compile_model( + const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& properties) const { + OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model, properties, context), m_so}); +} + +ov::SupportedOpsMap ov::Plugin::query_model(const std::shared_ptr& model, + const ov::AnyMap& properties) const { + OV_PLUGIN_CALL_STATEMENT(return m_ptr->query_model(model, properties)); +} + +ov::SoPtr ov::Plugin::import_model(std::istream& model, + const ov::AnyMap& properties) const { + OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(model, properties), m_so}); +} + +ov::SoPtr ov::Plugin::import_model(std::istream& networkModel, + const ov::RemoteContext& context, + const ov::AnyMap& config) const { + OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(networkModel, context, config), m_so}); +} + +ov::RemoteContext ov::Plugin::create_context(const AnyMap& params) const { + OV_PLUGIN_CALL_STATEMENT({ + auto remote = m_ptr->create_context(params); + auto so = remote._so; + if (m_so) + so.emplace_back(m_so); + return {remote._impl, so}; + }); +} + +ov::RemoteContext ov::Plugin::get_default_context(const AnyMap& params) const { + OV_PLUGIN_CALL_STATEMENT({ + auto remote = m_ptr->get_default_context(params); + auto so = remote._so; + if (m_so) + so.emplace_back(m_so); + return {remote._impl, so}; + }); +} + +ov::Any ov::Plugin::get_property(const std::string& name, const AnyMap& arguments) const { + OV_PLUGIN_CALL_STATEMENT({ + if (ov::supported_properties == name) { + try { + return {m_ptr->get_property(name, arguments), {m_so}}; + } catch (const ie::Exception&) { + std::vector supported_properties; + try { + auto ro_properties = + m_ptr->get_property(METRIC_KEY(SUPPORTED_METRICS), arguments).as>(); + for (auto&& ro_property : ro_properties) { + if (ro_property != METRIC_KEY(SUPPORTED_METRICS) && + ro_property != METRIC_KEY(SUPPORTED_CONFIG_KEYS)) { + supported_properties.emplace_back(ro_property, PropertyMutability::RO); + } + } + } catch (const ov::Exception&) { + } catch (const ie::Exception&) { + } + try { + auto rw_properties = m_ptr->get_property(METRIC_KEY(SUPPORTED_CONFIG_KEYS), arguments) + .as>(); + for (auto&& rw_property : rw_properties) { + supported_properties.emplace_back(rw_property, PropertyMutability::RW); + } + } catch (const ov::Exception&) { + } catch (const ie::Exception&) { + } + supported_properties.emplace_back(ov::supported_properties.name(), PropertyMutability::RO); + return supported_properties; + } + } + return {m_ptr->get_property(name, arguments), {m_so}}; + }); +} diff --git a/src/inference/src/dev/plugin.hpp b/src/inference/src/dev/plugin.hpp new file mode 100644 index 00000000000..ed3f9f00faa --- /dev/null +++ b/src/inference/src/dev/plugin.hpp @@ -0,0 +1,82 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +/** + * @brief This is a header file for the Inference Engine plugin C++ API + * + * @file plugin.hpp + */ +#pragma once + +#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp" +#include "ie_iextension.h" +#include "openvino/runtime/iplugin.hpp" + +namespace ov { + +class CoreImpl; + +/** + * @brief Plugin wrapper under the plugin interface which is used inside the core interface + */ +class Plugin { + std::shared_ptr m_ptr; + std::shared_ptr m_so; + friend ::ov::CoreImpl; + +public: + Plugin() = default; + + ~Plugin(); + + Plugin(const std::shared_ptr& ptr, const std::shared_ptr& so); + + void set_name(const std::string& deviceName); + + void set_core(std::weak_ptr core); + + const ov::Version get_version() const; + + void add_extension(const ie::IExtensionPtr& extension); + + void set_property(const ov::AnyMap& config); + + SoPtr compile_model(const std::shared_ptr& model, + const ov::AnyMap& properties) const; + + SoPtr compile_model(const std::string& model_path, + const ov::AnyMap& properties) const; + + SoPtr compile_model(const std::shared_ptr& model, + const ov::RemoteContext& context, + const ov::AnyMap& properties) const; + + ov::SupportedOpsMap query_model(const std::shared_ptr& model, const ov::AnyMap& properties) const; + + SoPtr import_model(std::istream& model, + const ov::AnyMap& properties) const; + + SoPtr import_model(std::istream& networkModel, + const ov::RemoteContext& context, + const ov::AnyMap& config) const; + + ov::RemoteContext create_context(const AnyMap& params) const; + + ov::RemoteContext get_default_context(const AnyMap& params) const; + + Any get_property(const std::string& name, const AnyMap& arguments) const; + + template + T get_property(const ov::Property& property) const { + return get_property(property.name(), {}).template as(); + } + + template + T get_property(const ov::Property& property, const AnyMap& arguments) const { + return get_property(property.name(), arguments).template as(); + } +}; + +} // namespace ov + diff --git a/src/inference/src/ie_core.cpp b/src/inference/src/ie_core.cpp index 96e42f2387f..c4f728ee89e 100644 --- a/src/inference/src/ie_core.cpp +++ b/src/inference/src/ie_core.cpp @@ -65,7 +65,7 @@ Core::Core(const std::string& xmlConfigFile) { _impl = std::make_shared(); #ifdef OPENVINO_STATIC_LIBRARY - _impl->RegisterPluginsInRegistry(::getStaticPluginsRegistry()); + _impl->register_plugins_in_registry(::getStaticPluginsRegistry()); #else RegisterPlugins(ov::findPluginXML(xmlConfigFile)); #endif @@ -176,7 +176,10 @@ ExecutableNetwork Core::ImportNetwork(const std::string& modelFileName, const std::map& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::ImportNetwork"); auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config); - auto exec = _impl->GetCPPPluginByName(parsed._deviceName).import_model(modelFileName, parsed._config); + std::ifstream modelStream(modelFileName, std::ios::binary); + if (!modelStream.is_open()) + IE_THROW(NetworkNotRead) << "Model file " << modelFileName << " cannot be opened!"; + auto exec = _impl->get_plugin(parsed._deviceName).import_model(modelStream, ov::any_copy(parsed._config)); return {exec._ptr, exec._so}; } @@ -206,7 +209,7 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel) { } networkModel.seekg(currentPos, networkModel.beg); - auto exec = _impl->GetCPPPluginByName(deviceName).import_model(networkModel, {}); + auto exec = _impl->get_plugin(deviceName).import_model(networkModel, {}); return {exec._ptr, exec._so}; } @@ -224,8 +227,10 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel, std::string deviceName = device.getDeviceName(); auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config); - auto exec = _impl->GetCPPPluginByName(deviceName) - .import_model(networkModel, std::dynamic_pointer_cast(context), parsed._config); + auto exec = _impl->get_plugin(deviceName) + .import_model(networkModel, + ov::RemoteContext{std::dynamic_pointer_cast(context), {}}, + ov::any_copy(parsed._config)); return {exec._ptr, exec._so}; } @@ -262,9 +267,9 @@ void Core::SetConfig(const std::map& config, const std ov::AnyMap conf = ov::any_copy(config); if (deviceName.empty()) { - _impl->SetConfigForPlugins(conf, std::string()); + _impl->set_property_for_devivce(conf, std::string()); } else { - _impl->SetConfigForPlugins(conf, deviceName); + _impl->set_property_for_devivce(conf, deviceName); } } @@ -297,7 +302,7 @@ Parameter Core::GetConfig(const std::string& deviceName, const std::string& name } auto parsed = ov::parseDeviceNameIntoConfig(deviceName); - return _impl->GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config); + return _impl->get_plugin(parsed._deviceName).get_property(name, parsed._config); } Parameter Core::GetMetric(const std::string& deviceName, const std::string& name, const ParamMap& options) const { @@ -309,18 +314,18 @@ std::vector Core::GetAvailableDevices() const { } void Core::RegisterPlugin(const std::string& pluginName, const std::string& deviceName) { - _impl->RegisterPluginByName(pluginName, deviceName); + _impl->register_plugin(pluginName, deviceName); } void Core::RegisterPlugins(const std::string& xmlConfigFile) { - _impl->RegisterPluginsInRegistry(xmlConfigFile); + _impl->register_plugins_in_registry(xmlConfigFile); } void Core::UnregisterPlugin(const std::string& deviceName_) { DeviceIDParser parser(deviceName_); std::string deviceName = parser.getDeviceName(); - _impl->UnloadPluginByName(deviceName); + _impl->unload_plugin(deviceName); } } // namespace InferenceEngine diff --git a/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp b/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp index cb77200b055..c5b68f037de 100644 --- a/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp +++ b/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp @@ -5,22 +5,29 @@ #pragma once #include + #include "ie_icore.hpp" class MockICore : public InferenceEngine::ICore { public: - MOCK_CONST_METHOD3(ReadNetwork, InferenceEngine::CNNNetwork(const std::string&, const InferenceEngine::Blob::CPtr&, bool)); + MOCK_CONST_METHOD3(ReadNetwork, + InferenceEngine::CNNNetwork(const std::string&, const InferenceEngine::Blob::CPtr&, bool)); MOCK_CONST_METHOD2(ReadNetwork, InferenceEngine::CNNNetwork(const std::string&, const std::string&)); - MOCK_METHOD3(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( - const InferenceEngine::CNNNetwork&, const std::string&, const std::map&)); - MOCK_METHOD3(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( - const InferenceEngine::CNNNetwork&, const std::shared_ptr &, const std::map&)); - MOCK_METHOD4(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( - const std::string &, - const std::string &, - const std::map &, - const std::function &)); + MOCK_METHOD3(LoadNetwork, + InferenceEngine::SoExecutableNetworkInternal(const InferenceEngine::CNNNetwork&, + const std::string&, + const std::map&)); + MOCK_METHOD3(LoadNetwork, + InferenceEngine::SoExecutableNetworkInternal(const InferenceEngine::CNNNetwork&, + const std::shared_ptr&, + const std::map&)); + MOCK_METHOD4( + LoadNetwork, + InferenceEngine::SoExecutableNetworkInternal(const std::string&, + const std::string&, + const std::map&, + const std::function&)); MOCK_METHOD5( LoadNetwork, InferenceEngine::SoExecutableNetworkInternal(const std::string&, @@ -29,26 +36,65 @@ public: const std::map&, const std::function&)); - MOCK_METHOD3(ImportNetwork, InferenceEngine::SoExecutableNetworkInternal( - std::istream&, const std::string&, const std::map&)); - MOCK_METHOD3(ImportNetwork, InferenceEngine::SoExecutableNetworkInternal( - std::istream&, const std::shared_ptr&, const std::map&)); + MOCK_METHOD3(ImportNetwork, + InferenceEngine::SoExecutableNetworkInternal(std::istream&, + const std::string&, + const std::map&)); + MOCK_METHOD3(ImportNetwork, + InferenceEngine::SoExecutableNetworkInternal(std::istream&, + const std::shared_ptr&, + const std::map&)); - MOCK_METHOD2(CreateContext, InferenceEngine::RemoteContext::Ptr(const std::string& deviceName, - const InferenceEngine::ParamMap& params)); + MOCK_METHOD2(CreateContext, + InferenceEngine::RemoteContext::Ptr(const std::string& deviceName, + const InferenceEngine::ParamMap& params)); - MOCK_CONST_METHOD3(QueryNetwork, InferenceEngine::QueryNetworkResult( - const InferenceEngine::CNNNetwork&, const std::string&, const std::map&)); + MOCK_CONST_METHOD3(QueryNetwork, + InferenceEngine::QueryNetworkResult(const InferenceEngine::CNNNetwork&, + const std::string&, + const std::map&)); MOCK_CONST_METHOD3(GetMetric, ov::Any(const std::string&, const std::string&, const ov::AnyMap&)); MOCK_CONST_METHOD2(GetConfig, ov::Any(const std::string&, const std::string&)); MOCK_CONST_METHOD3(get_property, ov::Any(const std::string&, const std::string&, const ov::AnyMap&)); - MOCK_METHOD2(set_property, void(const std::string&, const ov::AnyMap&)); MOCK_CONST_METHOD0(GetAvailableDevices, std::vector()); - MOCK_CONST_METHOD1(DeviceSupportsImportExport, bool(const std::string&)); // NOLINT not a cast to bool - MOCK_METHOD2(GetSupportedConfig, std::map(const std::string&, const std::map&)); + MOCK_CONST_METHOD1(DeviceSupportsImportExport, bool(const std::string&)); // NOLINT not a cast to bool + MOCK_METHOD2(GetSupportedConfig, + std::map(const std::string&, const std::map&)); MOCK_CONST_METHOD0(isNewAPI, bool()); MOCK_METHOD1(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(const std::string&)); + MOCK_CONST_METHOD0(is_new_api, bool()); + MOCK_CONST_METHOD2(create_context, ov::RemoteContext(const std::string& deviceName, const ov::AnyMap& params)); + MOCK_CONST_METHOD0(get_available_devices, std::vector()); + MOCK_CONST_METHOD3(query_model, + ov::SupportedOpsMap(const std::shared_ptr&, + const std::string&, + const ov::AnyMap&)); + MOCK_CONST_METHOD3(import_model, + ov::SoPtr(std::istream&, + const std::string&, + const ov::AnyMap&)); + MOCK_CONST_METHOD3(compile_model, + ov::SoPtr(const std::shared_ptr&, + const std::string&, + const ov::AnyMap&)); + MOCK_CONST_METHOD3(compile_model, + ov::SoPtr(const std::shared_ptr&, + const ov::RemoteContext&, + const ov::AnyMap&)); + MOCK_CONST_METHOD3(compile_model, + ov::SoPtr(const std::string&, + const std::string&, + const ov::AnyMap&)); + MOCK_CONST_METHOD4(compile_model, + InferenceEngine::SoExecutableNetworkInternal(const std::string&, + const ov::Tensor&, + const std::string&, + const ov::AnyMap&)); + MOCK_CONST_METHOD3(read_model, std::shared_ptr(const std::string&, const ov::Tensor&, bool)); + MOCK_CONST_METHOD2(read_model, std::shared_ptr(const std::string&, const std::string&)); + MOCK_CONST_METHOD1(get_default_context, ov::RemoteContext(const std::string&)); + ~MockICore() = default; }; diff --git a/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp b/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp index c110f7d14b1..d9729dc6770 100644 --- a/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp +++ b/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp @@ -2,19 +2,20 @@ // SPDX-License-Identifier: Apache-2.0 // +#include "mock_plugin.hpp" + #include -#include #include #include +#include -#include "openvino/runtime/common.hpp" -#include "mock_plugin.hpp" #include "description_buffer.hpp" +#include "openvino/runtime/common.hpp" using namespace std; using namespace InferenceEngine; -MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin *target) { +MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin* target) { _target = target; } @@ -25,7 +26,8 @@ void MockPlugin::SetConfig(const std::map& _config) { } } -Parameter MockPlugin::GetMetric(const std::string& name, const std::map& options) const { +Parameter MockPlugin::GetMetric(const std::string& name, + const std::map& options) const { if (_target) { return _target->GetMetric(name, options); } else { @@ -33,9 +35,9 @@ Parameter MockPlugin::GetMetric(const std::string& name, const std::map -MockPlugin::LoadNetwork(const CNNNetwork &network, - const std::map &config) { +std::shared_ptr MockPlugin::LoadNetwork( + const CNNNetwork& network, + const std::map& config) { if (_target) { return _target->LoadNetwork(network, config); } else { @@ -43,10 +45,10 @@ MockPlugin::LoadNetwork(const CNNNetwork &network, } } -std::shared_ptr -MockPlugin::LoadNetwork(const CNNNetwork& network, - const std::map& config, - const std::shared_ptr& context) { +std::shared_ptr MockPlugin::LoadNetwork( + const CNNNetwork& network, + const std::map& config, + const std::shared_ptr& context) { if (_target) { return _target->LoadNetwork(network, config, context); } else { @@ -54,9 +56,9 @@ MockPlugin::LoadNetwork(const CNNNetwork& network, } } -ov::SoPtr -MockPlugin::LoadNetwork(const std::string &modelPath, - const std::map &config) { +ov::SoPtr MockPlugin::LoadNetwork( + const std::string& modelPath, + const std::map& config) { if (_target) { return _target->LoadNetwork(modelPath, config); } else { @@ -64,15 +66,15 @@ MockPlugin::LoadNetwork(const std::string &modelPath, } } -std::shared_ptr -MockPlugin::LoadExeNetworkImpl(const CNNNetwork& network, - const std::map& config) { +std::shared_ptr MockPlugin::LoadExeNetworkImpl( + const CNNNetwork& network, + const std::map& config) { return {}; } -std::shared_ptr -MockPlugin::ImportNetwork(std::istream& networkModel, - const std::map& config) { +std::shared_ptr MockPlugin::ImportNetwork( + std::istream& networkModel, + const std::map& config) { if (_target) { return _target->ImportNetwork(networkModel, config); } else { @@ -80,10 +82,10 @@ MockPlugin::ImportNetwork(std::istream& networkModel, } } -std::shared_ptr -MockPlugin::ImportNetwork(std::istream& networkModel, - const std::shared_ptr& context, - const std::map& config) { +std::shared_ptr MockPlugin::ImportNetwork( + std::istream& networkModel, + const std::shared_ptr& context, + const std::map& config) { if (_target) { return _target->ImportNetwork(networkModel, context, config); } else { @@ -99,9 +101,8 @@ std::shared_ptr MockPlugin::GetDefaultContext(co } } -InferenceEngine::QueryNetworkResult -MockPlugin::QueryNetwork(const InferenceEngine::CNNNetwork& network, - const std::map& config) const { +InferenceEngine::QueryNetworkResult MockPlugin::QueryNetwork(const InferenceEngine::CNNNetwork& network, + const std::map& config) const { if (_target) { return _target->QueryNetwork(network, config); } else { @@ -130,20 +131,14 @@ std::string MockPlugin::GetName() const noexcept { return InferenceEngine::IInferencePlugin::GetName(); } +InferenceEngine::IInferencePlugin* __target = nullptr; -InferenceEngine::IInferencePlugin *__target = nullptr; - -OPENVINO_PLUGIN_API void CreatePluginEngine(std::shared_ptr& plugin) { - IInferencePlugin *p = nullptr; +OPENVINO_PLUGIN_API void CreatePluginEngine(std::shared_ptr& plugin) { + IInferencePlugin* p = nullptr; std::swap(__target, p); - plugin = std::make_shared(p); + plugin = convert_plugin(std::make_shared(p)); } -OPENVINO_PLUGIN_API InferenceEngine::IInferencePlugin* -CreatePluginEngineProxy(InferenceEngine::IInferencePlugin *target) { - return new MockPlugin(target); -} - -OPENVINO_PLUGIN_API void InjectProxyEngine(InferenceEngine::IInferencePlugin *target) { +OPENVINO_PLUGIN_API void InjectProxyEngine(InferenceEngine::IInferencePlugin* target) { __target = target; } diff --git a/tests/stress_tests/common/ie_pipelines/pipelines.cpp b/tests/stress_tests/common/ie_pipelines/pipelines.cpp index caeab490b42..63b278e332b 100644 --- a/tests/stress_tests/common/ie_pipelines/pipelines.cpp +++ b/tests/stress_tests/common/ie_pipelines/pipelines.cpp @@ -16,7 +16,7 @@ std::function load_unload_plugin(const std::string &target_device, const int &api_version) { return [&] { auto ie_api_wrapper = create_infer_api_wrapper(api_version); - // get_versions silently register plugin in `plugins` through `GetCPPPluginByName` + // get_versions silently register plugin in `plugins` through `get_plugin` ie_api_wrapper->load_plugin(target_device); // Remove plugin for target_device from `plugins` ie_api_wrapper->unload_plugin(target_device); diff --git a/tests/stress_tests/unittests/tests_pipelines/tests_pipelines_full_pipeline.cpp b/tests/stress_tests/unittests/tests_pipelines/tests_pipelines_full_pipeline.cpp index dc3d37ecbe1..a0396acef77 100644 --- a/tests/stress_tests/unittests/tests_pipelines/tests_pipelines_full_pipeline.cpp +++ b/tests/stress_tests/unittests/tests_pipelines/tests_pipelines_full_pipeline.cpp @@ -39,7 +39,7 @@ void test_load_unload_plugin_full_pipeline(const std::string &model, const std:: if (i == n / 2) { log_info("Half of the test have already passed"); } - // get_versions silently register plugin in `plugins` through `GetCPPPluginByName` + // get_versions silently register plugin in `plugins` through `get_plugin` ie_api_wrapper->load_plugin(target_device); // Remove plugin for target_device from `plugins` ie_api_wrapper->unload_plugin(target_device);