Introduce ov::ICore (#15096)

* Added new ov::ICore

* Fixed style and removed unused functions

* Fixed mock core

* Fixed linker issue

* Fixed inference functional tests

* Update copyrights

* Use new plugin API inside the new API

* Fixed CPU tests

* Removed old plugin from ov::IPlugin

* Fixed Inference tests

* Added comments and some other minor changes

* Split core interfaces

* Added logic to load v7 IRs without crash

* Fixed thread tests

* Fixed comments:

 * Add const keyword for core methods
 * Rename core methods

* Fixed mock core

* Added more const methods

* Fixed tests

* Remove unnecessary exception catch

* Fixed some comments

* Remove default implementations of get/set property

* Fix additional comments

* Removed set property

* Try to fix linux build

* Revert function move
This commit is contained in:
Ilya Churaev 2023-01-20 13:43:48 +04:00 committed by GitHub
parent ffdf31fba8
commit e2635a0053
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 2742 additions and 1182 deletions

View File

@ -26,6 +26,7 @@ class ExecutableNetwork;
} // namespace InferenceEngine } // namespace InferenceEngine
namespace ov { namespace ov {
class Plugin;
/** @cond INTERNAL */ /** @cond INTERNAL */
class Any; class Any;
namespace util { namespace util {
@ -339,7 +340,6 @@ class RuntimeAttribute;
class CompiledModel; class CompiledModel;
class RemoteContext; class RemoteContext;
class RemoteTensor; class RemoteTensor;
class InferencePlugin;
/** /**
* @brief This class represents an object to work with different types * @brief This class represents an object to work with different types
@ -632,7 +632,7 @@ class OPENVINO_API Any {
friend class ::ov::CompiledModel; friend class ::ov::CompiledModel;
friend class ::ov::RemoteContext; friend class ::ov::RemoteContext;
friend class ::ov::RemoteTensor; friend class ::ov::RemoteTensor;
friend class ::ov::InferencePlugin; friend class ::ov::Plugin;
Any(const Any& other, const std::vector<std::shared_ptr<void>>& so); Any(const Any& other, const std::vector<std::shared_ptr<void>>& so);

View File

@ -24,6 +24,7 @@ class Blob;
namespace ov { namespace ov {
class Core; class Core;
class CoreImpl;
class InferRequest; class InferRequest;
class RemoteContext; class RemoteContext;
class VariableState; class VariableState;
@ -47,6 +48,7 @@ protected:
Tensor(const std::shared_ptr<InferenceEngine::Blob>& impl, const std::vector<std::shared_ptr<void>>& so); Tensor(const std::shared_ptr<InferenceEngine::Blob>& impl, const std::vector<std::shared_ptr<void>>& so);
friend class ov::Core; friend class ov::Core;
friend class ov::CoreImpl;
friend class ov::InferRequest; friend class ov::InferRequest;
friend class ov::RemoteContext; friend class ov::RemoteContext;
friend class ov::VariableState; friend class ov::VariableState;

View File

@ -19,19 +19,17 @@
#include "ie_iextension.h" #include "ie_iextension.h"
#include "ie_input_info.hpp" #include "ie_input_info.hpp"
#include "ie_parameter.hpp" #include "ie_parameter.hpp"
#include "openvino/runtime/iplugin.hpp"
#include "openvino/util/pp.hpp" #include "openvino/util/pp.hpp"
#include "so_ptr.hpp" #include "so_ptr.hpp"
namespace ov {
class Function;
class ICore;
} // namespace ov
namespace InferenceEngine { namespace InferenceEngine {
class ExecutorManager; class ExecutorManager;
class IExecutableNetworkInternal; class IExecutableNetworkInternal;
class RemoteContext; class RemoteContext;
class IExtension; class IExtension;
class ICore;
/** /**
* @brief Copies preprocess info * @brief Copies preprocess info
@ -287,13 +285,13 @@ public:
* @brief Sets pointer to ICore interface * @brief Sets pointer to ICore interface
* @param core Pointer to Core interface * @param core Pointer to Core interface
*/ */
virtual void SetCore(std::weak_ptr<ov::ICore> core); virtual void SetCore(std::weak_ptr<InferenceEngine::ICore> core);
/** /**
* @brief Gets reference to ICore interface * @brief Gets reference to ICore interface
* @return Reference to ICore interface * @return Reference to ICore interface
*/ */
virtual std::shared_ptr<ov::ICore> GetCore() const noexcept; virtual std::shared_ptr<InferenceEngine::ICore> GetCore() const noexcept;
/** /**
* @brief Provides an information about used API * @brief Provides an information about used API
@ -374,7 +372,7 @@ protected:
std::string _pluginName; //!< A device name that plugins enables std::string _pluginName; //!< A device name that plugins enables
std::map<std::string, std::string> _config; //!< A map config keys -> values std::map<std::string, std::string> _config; //!< A map config keys -> values
std::weak_ptr<ov::ICore> _core; //!< A pointer to ICore interface std::weak_ptr<InferenceEngine::ICore> _core; //!< A pointer to ICore interface
std::shared_ptr<ExecutorManager> _executorManager; //!< A tasks execution manager std::shared_ptr<ExecutorManager> _executorManager; //!< A tasks execution manager
bool _isNewAPI; //!< A flag which shows used API bool _isNewAPI; //!< A flag which shows used API
}; };
@ -382,7 +380,7 @@ protected:
/** /**
* @private * @private
*/ */
using CreatePluginEngineFunc = void(std::shared_ptr<IInferencePlugin>&); using CreatePluginEngineFunc = void(std::shared_ptr<::ov::IPlugin>&);
/** /**
* @private * @private
@ -403,6 +401,9 @@ using CreateExtensionFunc = void(std::shared_ptr<IExtension>&);
*/ */
constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PLUGIN); constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PLUGIN);
INFERENCE_ENGINE_API_CPP(std::shared_ptr<::ov::IPlugin>)
convert_plugin(const std::shared_ptr<InferenceEngine::IInferencePlugin>& from);
} // namespace InferenceEngine } // namespace InferenceEngine
/** /**
@ -410,20 +411,22 @@ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PL
* @brief Defines the exported `IE_CREATE_PLUGIN` function which is used to create a plugin instance * @brief Defines the exported `IE_CREATE_PLUGIN` function which is used to create a plugin instance
* @ingroup ie_dev_api_plugin_api * @ingroup ie_dev_api_plugin_api
*/ */
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ #define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
INFERENCE_PLUGIN_API(void) \ INFERENCE_PLUGIN_API(void) \
IE_CREATE_PLUGIN(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false); \ IE_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false); \
void IE_CREATE_PLUGIN(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false) { \ void IE_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false) { \
try { \ std::shared_ptr<::InferenceEngine::IInferencePlugin> ie_plugin; \
plugin = ::std::make_shared<PluginType>(__VA_ARGS__); \ try { \
} catch (const InferenceEngine::Exception&) { \ ie_plugin = ::std::make_shared<PluginType>(__VA_ARGS__); \
throw; \ } catch (const InferenceEngine::Exception&) { \
} catch (const std::exception& ex) { \ throw; \
IE_THROW() << ex.what(); \ } catch (const std::exception& ex) { \
} catch (...) { \ IE_THROW() << ex.what(); \
IE_THROW(Unexpected); \ } catch (...) { \
} \ IE_THROW(Unexpected); \
plugin->SetVersion(version); \ } \
ie_plugin->SetVersion(version); \
plugin = convert_plugin(ie_plugin); \
} }
/** /**
@ -431,7 +434,7 @@ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(IE_CREATE_PL
*/ */
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(_IE_CREATE_PLUGIN_FUNC) \ #define IE_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(_IE_CREATE_PLUGIN_FUNC) \
INFERENCE_PLUGIN_API(void) \ INFERENCE_PLUGIN_API(void) \
_IE_CREATE_PLUGIN_FUNC(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) noexcept(false) _IE_CREATE_PLUGIN_FUNC(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false)
/** /**
* @private * @private

View File

@ -17,16 +17,12 @@
#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp" #include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
#include "ie_parameter.hpp" #include "ie_parameter.hpp"
#include "ie_remote_context.hpp" #include "ie_remote_context.hpp"
#include "openvino/runtime/icore.hpp"
#include "openvino/runtime/properties.hpp" #include "openvino/runtime/properties.hpp"
namespace ov { namespace InferenceEngine {
/** class ICore : public ov::ICore {
* @interface ICore
* @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
* @ingroup ie_dev_api_plugin_api
*/
class ICore {
public: public:
/** /**
* @brief Reads IR xml and bin (with the same name) files * @brief Reads IR xml and bin (with the same name) files
@ -35,9 +31,9 @@ public:
* @param frontendMode read network without post-processing or other transformations * @param frontendMode read network without post-processing or other transformations
* @return CNNNetwork * @return CNNNetwork
*/ */
virtual ie::CNNNetwork ReadNetwork(const std::string& model, virtual CNNNetwork ReadNetwork(const std::string& model,
const ie::Blob::CPtr& weights, const Blob::CPtr& weights,
bool frontendMode = false) const = 0; bool frontendMode = false) const = 0;
/** /**
* @brief Reads IR xml and bin files * @brief Reads IR xml and bin files
@ -46,7 +42,7 @@ public:
* if bin file with the same name was not found, will load IR without weights. * if bin file with the same name was not found, will load IR without weights.
* @return CNNNetwork * @return CNNNetwork
*/ */
virtual ie::CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const = 0; virtual CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const = 0;
/** /**
* @brief Creates an executable network from a network object. * @brief Creates an executable network from a network object.
@ -60,9 +56,9 @@ public:
* operation * operation
* @return An executable network reference * @return An executable network reference
*/ */
virtual ie::SoExecutableNetworkInternal LoadNetwork(const ie::CNNNetwork& network, virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork& network,
const std::string& deviceName, const std::string& deviceName,
const std::map<std::string, std::string>& config = {}) = 0; const std::map<std::string, std::string>& config = {}) = 0;
/** /**
* @brief Creates an executable network from a network object. * @brief Creates an executable network from a network object.
@ -76,28 +72,9 @@ public:
* operation * operation
* @return An executable network reference * @return An executable network reference
*/ */
virtual ie::SoExecutableNetworkInternal LoadNetwork(const ie::CNNNetwork& network, virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork& network,
const ie::RemoteContext::Ptr& remoteCtx, const RemoteContext::Ptr& remoteCtx,
const std::map<std::string, std::string>& config = {}) = 0; const std::map<std::string, std::string>& config = {}) = 0;
/**
* @brief Creates an executable network from a model file.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param modelPath Path to model
* @param deviceName Name of device to load network to
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered
* @return An executable network reference
*/
virtual ie::SoExecutableNetworkInternal LoadNetwork(
const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const ie::CNNNetwork&)>& val = nullptr) = 0;
/** /**
* @brief Creates an executable network from a model memory. * @brief Creates an executable network from a model memory.
@ -113,12 +90,30 @@ public:
* @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered * @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered
* @return An executable network reference * @return An executable network reference
*/ */
virtual ie::SoExecutableNetworkInternal LoadNetwork( virtual SoExecutableNetworkInternal LoadNetwork(
const std::string& modelStr, const std::string& modelStr,
const ie::Blob::CPtr& weights, const InferenceEngine::Blob::CPtr& weights,
const std::string& deviceName, const std::string& deviceName,
const std::map<std::string, std::string>& config, const std::map<std::string, std::string>& config,
const std::function<void(const ie::CNNNetwork&)>& val = nullptr) = 0; const std::function<void(const InferenceEngine::CNNNetwork&)>& val = nullptr) = 0;
/**
* @brief Creates an executable network from a model file.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param modelPath Path to model
* @param deviceName Name of device to load network to
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @param val Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered
* @return An executable network reference
*/
virtual SoExecutableNetworkInternal LoadNetwork(const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const CNNNetwork&)>& val = nullptr) = 0;
/** /**
* @brief Creates an executable network from a previously exported network * @brief Creates an executable network from a previously exported network
@ -128,9 +123,9 @@ public:
* operation* * operation*
* @return An executable network reference * @return An executable network reference
*/ */
virtual ie::SoExecutableNetworkInternal ImportNetwork(std::istream& networkModel, virtual SoExecutableNetworkInternal ImportNetwork(std::istream& networkModel,
const std::string& deviceName = {}, const std::string& deviceName = {},
const std::map<std::string, std::string>& config = {}) = 0; const std::map<std::string, std::string>& config = {}) = 0;
/** /**
* @brief Query device if it supports specified network with specified configuration * @brief Query device if it supports specified network with specified configuration
@ -140,9 +135,9 @@ public:
* @param config Optional map of pairs: (config parameter name, config parameter value) * @param config Optional map of pairs: (config parameter name, config parameter value)
* @return An object containing a map of pairs a layer name -> a device name supporting this layer. * @return An object containing a map of pairs a layer name -> a device name supporting this layer.
*/ */
virtual ie::QueryNetworkResult QueryNetwork(const ie::CNNNetwork& network, virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network,
const std::string& deviceName, const std::string& deviceName,
const std::map<std::string, std::string>& config) const = 0; const std::map<std::string, std::string>& config) const = 0;
/** /**
* @brief Gets general runtime metric for dedicated hardware. * @brief Gets general runtime metric for dedicated hardware.
@ -154,7 +149,9 @@ public:
* @param name - metric name to request. * @param name - metric name to request.
* @return Metric value corresponding to metric key. * @return Metric value corresponding to metric key.
*/ */
virtual Any GetMetric(const std::string& deviceName, const std::string& name, const AnyMap& options = {}) const = 0; virtual ov::Any GetMetric(const std::string& deviceName,
const std::string& name,
const ov::AnyMap& options = {}) const = 0;
/** /**
* @brief Gets configuration dedicated to device behaviour. * @brief Gets configuration dedicated to device behaviour.
@ -165,7 +162,7 @@ public:
* @param name - config key. * @param name - config key.
* @return Value of config corresponding to config key. * @return Value of config corresponding to config key.
*/ */
virtual Any GetConfig(const std::string& deviceName, const std::string& name) const = 0; virtual ov::Any GetConfig(const std::string& deviceName, const std::string& name) const = 0;
/** /**
* @brief Returns devices available for neural networks inference * @brief Returns devices available for neural networks inference
@ -191,7 +188,7 @@ public:
* @param params Map of device-specific shared context parameters. * @param params Map of device-specific shared context parameters.
* @return A shared pointer to a created remote context. * @return A shared pointer to a created remote context.
*/ */
virtual InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, const AnyMap&) = 0; virtual InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, const ov::AnyMap&) = 0;
/** /**
* @brief Get only configs that are suppored by device * @brief Get only configs that are suppored by device
@ -209,81 +206,9 @@ public:
* @param deviceName - A name of a device to get create shared context from. * @param deviceName - A name of a device to get create shared context from.
* @return A shared pointer to a default remote context. * @return A shared pointer to a default remote context.
*/ */
virtual ie::RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0; virtual RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0;
/**
* @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.
*
* @param device_name Name of a device.
*
* @param properties Map of pairs: (property name, property value).
*/
virtual void set_property(const std::string& device_name, const AnyMap& properties) = 0;
/**
* @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.
*
* @tparam Properties Should be the pack of `std::pair<std::string, Any>` types.
* @param device_name Name of a device.
* @param properties Optional pack of pairs: (property name, property value).
*/
template <typename... Properties>
util::EnableIfAllStringAny<void, Properties...> set_property(const std::string& device_name,
Properties&&... properties) {
set_property(device_name, AnyMap{std::forward<Properties>(properties)...});
}
/**
* @brief Gets properties related to device behaviour.
*
*
* @param device_name Name of a device to get a property value.
* @param name Property name.
* @param arguments Additional arguments to get a property.
* @return Value of a property corresponding to the property name.
*/
virtual Any get_property(const std::string& device_name,
const std::string& name,
const AnyMap& arguments) const = 0;
/**
* @brief Gets properties related to device behaviour.
*
* @tparam T Type of a returned value.
* @tparam M Property mutability.
* @param deviceName Name of a device to get a property value.
* @param property Property object.
* @return Property value.
*/
template <typename T, PropertyMutability M>
T get_property(const std::string& device_name, const Property<T, M>& property) const {
return get_property(device_name, property.name(), {}).template as<T>();
}
/**
* @brief Gets properties related to device behaviour.
*
* @tparam T Type of a returned value.
* @tparam M Property mutability.
* @param deviceName Name of a device to get a property value.
* @param property Property object.
* @param arguments Additional arguments to get a property.
* @return Property value.
*/
template <typename T, PropertyMutability M>
T get_property(const std::string& device_name, const Property<T, M>& property, const AnyMap& arguments) const {
return get_property(device_name, property.name(), arguments).template as<T>();
}
/**
* @brief Default virtual destructor
*/
virtual ~ICore() = default;
}; };
} // namespace ov
namespace InferenceEngine {
using ICore = ov::ICore;
/** /**
* @private * @private
*/ */

View File

@ -0,0 +1,212 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief A file provides API for Core object
* @file openvino/runtime/icore.hpp
*/
#pragma once
#include <memory>
#include <openvino/runtime/compiled_model.hpp>
#include "openvino/runtime/tensor.hpp"
#include "so_ptr.hpp"
namespace ov {
/**
* @interface ICore
* @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
* @ingroup ie_dev_api_plugin_api
*/
class ICore {
public:
/**
* @brief Reads IR xml and bin (with the same name) files
* @param model string with IR
* @param weights shared pointer to constant blob with weights
* @param frontend_mode read network without post-processing or other transformations
* @return shared pointer to ov::Model
*/
virtual std::shared_ptr<ov::Model> read_model(const std::string& model,
const ov::Tensor& weights,
bool frontend_mode = false) const = 0;
/**
* @brief Reads IR xml and bin files
* @param model_path path to IR file
* @param bin_path path to bin file, if path is empty, will try to read bin file with the same name as xml and
* if bin file with the same name was not found, will load IR without weights.
* @return shared pointer to ov::Model
*/
virtual std::shared_ptr<ov::Model> read_model(const std::string& model_path, const std::string& bin_path) const = 0;
/**
* @brief Creates an executable network from a network object.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param model OpenVINO Model
* @param device_name Name of device to load network to
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @return An executable network reference
*/
virtual ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config = {}) const = 0;
/**
* @brief Creates an executable network from a network object.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param model OpenVINO Model
* @param context "Remote" (non-CPU) accelerator device-specific execution context to use
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @return An executable network reference
*/
virtual ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& config = {}) const = 0;
/**
* @brief Creates an executable network from a model file.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param model_path Path to model
* @param device_name Name of device to load network to
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @return An executable network reference
*/
virtual ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::string& model_path,
const std::string& device_name,
const ov::AnyMap& config) const = 0;
/**
* @brief Creates an executable network from a model memory.
*
* Users can create as many networks as they need and use
* them simultaneously (up to the limitation of the hardware resources)
*
* @param model_str String data of model
* @param weights Model's weights
* @param device_name Name of device to load network to
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation
* @return An executable network reference
*/
virtual ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::string& model_str,
const ov::Tensor& weights,
const std::string& device_name,
const ov::AnyMap& config) const = 0;
/**
* @brief Creates an executable network from a previously exported network
* @param model model stream
* @param device_name Name of device load executable network on
* @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
* operation*
* @return An executable network reference
*/
virtual ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>
import_model(std::istream& model, const std::string& device_name, const ov::AnyMap& config = {}) const = 0;
/**
* @brief Query device if it supports specified network with specified configuration
*
* @param model OpenVINO Model
* @param device_name A name of a device to query
* @param config Optional map of pairs: (config parameter name, config parameter value)
* @return An object containing a map of pairs a layer name -> a device name supporting this layer.
*/
virtual ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config) const = 0;
/**
* @brief Returns devices available for neural networks inference
*
* @return A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, MYRIAD }
* If there more than one device of specific type, they are enumerated with .# suffix.
*/
virtual std::vector<std::string> get_available_devices() const = 0;
/**
* @brief Create a new shared context object on specified accelerator device
* using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
* @param device_name Name of a device to create new shared context on.
* @param params Map of device-specific shared context parameters.
* @return A shared pointer to a created remote context.
*/
virtual ov::RemoteContext create_context(const std::string& device_name, const AnyMap& args) const = 0;
virtual bool is_new_api() const = 0;
/**
* @brief Get a pointer to default shared context object for the specified device.
* @param device_name - A name of a device to get create shared context from.
* @return A shared pointer to a default remote context.
*/
virtual ov::RemoteContext get_default_context(const std::string& device_name) const = 0;
/**
* @brief Gets properties related to device behaviour.
*
*
* @param device_name Name of a device to get a property value.
* @param name Property name.
* @param arguments Additional arguments to get a property.
* @return Value of a property corresponding to the property name.
*/
virtual Any get_property(const std::string& device_name,
const std::string& name,
const AnyMap& arguments) const = 0;
/**
* @brief Gets properties related to device behaviour.
*
* @tparam T Type of a returned value.
* @tparam M Property mutability.
* @param deviceName Name of a device to get a property value.
* @param property Property object.
* @return Property value.
*/
template <typename T, PropertyMutability M>
T get_property(const std::string& device_name, const Property<T, M>& property) const {
return get_property(device_name, property.name(), {}).template as<T>();
}
/**
* @brief Gets properties related to device behaviour.
*
* @tparam T Type of a returned value.
* @tparam M Property mutability.
* @param deviceName Name of a device to get a property value.
* @param property Property object.
* @param arguments Additional arguments to get a property.
* @return Property value.
*/
template <typename T, PropertyMutability M>
T get_property(const std::string& device_name, const Property<T, M>& property, const AnyMap& arguments) const {
return get_property(device_name, property.name(), arguments).template as<T>();
}
/**
* @brief Default virtual destructor
*/
virtual ~ICore();
};
} // namespace ov

View File

@ -0,0 +1,237 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief OpenVINO Runtime plugin API wrapper
* @file openvino/runtime/iplugin.hpp
*/
#pragma once
#include <memory>
#include "openvino/core/any.hpp"
#include "openvino/core/deprecated.hpp"
#include "openvino/core/model.hpp"
#include "openvino/core/version.hpp"
#include "openvino/runtime/common.hpp"
#include "openvino/runtime/icore.hpp"
#include "openvino/runtime/remote_context.hpp"
#include "threading/ie_executor_manager.hpp"
namespace InferenceEngine {
class IExecutableNetworkInternal;
class IPluginWrapper;
class IExtension;
} // namespace InferenceEngine
namespace ov {
/**
* @brief OpenVINO Plugin Interface 2.0
*/
class OPENVINO_RUNTIME_API IPlugin : public std::enable_shared_from_this<IPlugin> {
public:
/**
* @brief Sets a plugin version
*
* @param version A version to set
*/
void set_version(const Version& version);
/**
* @brief Returns a plugin version
*
* @return A constant ov::Version object
*/
const Version& get_version() const;
/**
* @brief Sets a name for the plugin
*
* @param name Plugin name
*/
void set_device_name(const std::string& name);
/**
* @brief Provides a plugin name
*
* @return Plugin name
*/
const std::string& get_device_name() const;
/**
* @brief Compiles model from ov::Model object
* @param model A model object acquired from ov::Core::read_model or source construction
* @param properties A ov::AnyMap of properties relevant only for this load operation
* @return Created Compiled Model object
*/
virtual std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const = 0;
/**
* @brief Compiles model from ov::Model object
* @param model_path A path to model (path can be converted from unicode representation)
* @param properties A ov::AnyMap of properties relevant only for this load operation
* @return Created Compiled Model object
*/
virtual std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::string& model_path,
const ov::AnyMap& properties) const = 0;
/**
* @brief Compiles model from ov::Model object, on specified remote context
* @param model A model object acquired from ov::Core::read_model or source construction
* @param properties A ov::AnyMap of properties relevant only for this load operation
* @param context A pointer to plugin context derived from RemoteContext class used to
* execute the model
* @return Created Compiled Model object
*/
virtual std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties,
const ov::RemoteContext& context) const = 0;
/**
* @brief Sets properties for plugin, acceptable keys can be found in openvino/runtime/properties.hpp
* @param properties ov::AnyMap of properties
*/
virtual void set_property(const ov::AnyMap& properties) = 0;
/**
* @brief Gets properties related to plugin behaviour.
*
* @param name Property name.
* @param arguments Additional arguments to get a property.
*
* @return Value of a property corresponding to the property name.
*/
virtual ov::Any get_property(const std::string& name, const ov::AnyMap& arguments) const = 0;
/**
* @brief Creates a remote context instance based on a map of properties
* @param remote_properties Map of device-specific shared context remote properties.
*
* @return A remote context object
*/
virtual RemoteContext create_context(const ov::AnyMap& remote_properties) const = 0;
/**
* @brief Provides a default remote context instance if supported by a plugin
* @param remote_properties Map of device-specific shared context remote properties.
*
* @return The default context.
*/
virtual RemoteContext get_default_context(const ov::AnyMap& remote_properties) const = 0;
/**
* @brief Creates an compiled model from an previously exported model using plugin implementation
* and removes OpenVINO Runtime magic and plugin name
* @param model Reference to model output stream
* @param properties A ov::AnyMap of properties
* @return An Compiled model
*/
virtual std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> import_model(
std::istream& model,
const ov::AnyMap& properties) const = 0;
/**
* @brief Creates an compiled model from an previously exported model using plugin implementation
* and removes OpenVINO Runtime magic and plugin name
* @param model Reference to model output stream
* @param context A pointer to plugin context derived from RemoteContext class used to
* execute the network
* @param properties A ov::AnyMap of properties
* @return An Compiled model
*/
virtual std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>
import_model(std::istream& model, const ov::RemoteContext& context, const ov::AnyMap& properties) const = 0;
/**
* @brief Queries a plugin about supported layers in model
* @param model Model object to query.
* @param properties Optional map of pairs: (property name, property value).
* @return An object containing a map of pairs an operation name -> a device name supporting this operation.
*/
virtual ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const = 0;
/**
* @deprecated This method allows to load legacy Inference Engine Extensions and will be removed in 2024.0 release
* @brief Registers legacy extension within plugin
* @param extension - pointer to already loaded legacy extension
*/
OPENVINO_DEPRECATED(
"This method allows to load legacy Inference Engine Extensions and will be removed in 2024.0 release")
virtual void add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension);
/**
* @brief Sets pointer to ICore interface
* @param core Pointer to Core interface
*/
void set_core(const std::weak_ptr<ov::ICore>& core);
/**
* @brief Gets reference to ICore interface
* @return Reference to ICore interface
*/
std::shared_ptr<ov::ICore> get_core() const;
/**
* @brief Provides an information about used API
* @return true if new API is used
*/
bool is_new_api() const;
/**
* @brief Gets reference to tasks execution manager
* @return Reference to ExecutorManager interface
*/
const std::shared_ptr<InferenceEngine::ExecutorManager>& get_executor_manager() const;
~IPlugin() = default;
protected:
IPlugin();
private:
friend ::InferenceEngine::IPluginWrapper;
std::string m_plugin_name; //!< A device name that plugins enables
std::weak_ptr<ov::ICore> m_core; //!< A pointer to ICore interface
std::shared_ptr<InferenceEngine::ExecutorManager> m_executor_manager; //!< A tasks execution manager
ov::Version m_version; //!< Member contains plugin version
bool m_is_new_api; //!< A flag which shows used API
};
} // namespace ov
/**
* @def OV_CREATE_PLUGIN
* @brief Defines a name of a function creating plugin instance
* @ingroup ie_dev_api_plugin_api
*/
#ifndef OV_CREATE_PLUGIN
# define OV_CREATE_PLUGIN CreatePluginEngine
#endif
/**
* @def OV_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version)
* @brief Defines the exported `OV_CREATE_PLUGIN` function which is used to create a plugin instance
* @ingroup ov_dev_api_plugin_api
*/
#define OV_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
OPENVINO_PLUGIN_API void OV_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false); \
void OV_CREATE_PLUGIN(::std::shared_ptr<::ov::IPlugin>& plugin) noexcept(false) { \
try { \
plugin = ::std::make_shared<PluginType>(__VA_ARGS__); \
plugin->set_version(version); \
} catch (const InferenceEngine::Exception& ex) { \
throw ov::Exception(ex.what()); \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
} \
}

View File

@ -20,11 +20,17 @@
namespace InferenceEngine { namespace InferenceEngine {
class RemoteContext; class RemoteContext;
class IPluginWrapper;
class Core;
} // namespace InferenceEngine } // namespace InferenceEngine
namespace ov { namespace ov {
class Core; class Core;
class CoreImpl;
class Plugin;
class IPlugin;
class IInferencePluginWrapper;
class CompiledModel; class CompiledModel;
/** /**
@ -47,7 +53,13 @@ protected:
*/ */
RemoteContext(const std::shared_ptr<InferenceEngine::RemoteContext>& impl, RemoteContext(const std::shared_ptr<InferenceEngine::RemoteContext>& impl,
const std::vector<std::shared_ptr<void>>& so); const std::vector<std::shared_ptr<void>>& so);
friend class InferenceEngine::Core;
friend class InferenceEngine::IPluginWrapper;
friend class ov::Core; friend class ov::Core;
friend class ov::CoreImpl;
friend class ov::Plugin;
friend class ov::IPlugin;
friend class ov::IInferencePluginWrapper;
friend class ov::CompiledModel; friend class ov::CompiledModel;
public: public:

View File

@ -9,14 +9,13 @@
#include "transformations/common_optimizations/dimension_tracking.hpp" #include "transformations/common_optimizations/dimension_tracking.hpp"
#include "transformations/init_node_info.hpp" #include "transformations/init_node_info.hpp"
namespace InferenceEngine { namespace ov {
namespace details { namespace details {
NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network, NetworkBatchAbility is_model_batchable(const std::shared_ptr<const ov::Model>& model,
const std::string& deviceNameWithoutBatch, const std::string& deviceNameWithoutBatch,
bool strictly_track_dims) { bool strictly_track_dims) {
CNNNetwork clonedNetwork(cloneNetwork(orig_network)); auto function = model->clone();
auto function = clonedNetwork.getFunction();
// find the batch dim // find the batch dim
ov::pass::Manager m; ov::pass::Manager m;
m.register_pass<ngraph::pass::InitNodeInfo>(); m.register_pass<ngraph::pass::InitNodeInfo>();
@ -49,12 +48,12 @@ NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network,
if (!any_batched_inputs) if (!any_batched_inputs)
return NetworkBatchAbility::NO; return NetworkBatchAbility::NO;
for (auto&& node : orig_network.getFunction()->get_ops()) for (auto&& node : model->get_ops())
node->get_rt_info()["affinity"] = "BATCH"; // default affinity (ignored if HETERO is not triggered) node->get_rt_info()["affinity"] = "BATCH"; // default affinity (ignored if HETERO is not triggered)
// have to execute the DetectionOutput separately (without batching) // have to execute the DetectionOutput separately (without batching)
// as this layer does mix-in the values from the different inputs (batch id) // as this layer does mix-in the values from the different inputs (batch id)
bool bDetectionOutput = false; bool bDetectionOutput = false;
for (auto& result_node : orig_network.getFunction()->get_results()) { for (auto& result_node : model->get_results()) {
auto do_node = result_node->input_value(0).get_node_shared_ptr(); auto do_node = result_node->input_value(0).get_node_shared_ptr();
std::shared_ptr<ov::Node> convert_node; std::shared_ptr<ov::Node> convert_node;
if (ov::is_type<ov::opset1::Convert>(do_node)) { // cases with do->convert->result if (ov::is_type<ov::opset1::Convert>(do_node)) { // cases with do->convert->result
@ -76,4 +75,4 @@ NetworkBatchAbility isNetworkBatchable(const CNNNetwork& orig_network,
} }
} // namespace details } // namespace details
} // namespace InferenceEngine } // namespace ov

View File

@ -7,17 +7,17 @@
#include "cnn_network_ngraph_impl.hpp" #include "cnn_network_ngraph_impl.hpp"
namespace InferenceEngine { namespace ov {
namespace details { namespace details {
/** /**
* @brief Checks if the input network is batch-able (e.g. no dynamic inputs, inputs has the batch dimension, etc) * @brief Checks if the input network is batch-able (e.g. no dynamic inputs, inputs has the batch dimension, etc)
* @param function A ngraph function to check for automatic-batching applicability * @param function A ngraph function to check for automatic-batching applicability
* @return An enum value indicating whether the network can be safely batched (with HETERO or as is) or not * @return An enum value indicating whether the network can be safely batched (with HETERO or as is) or not
*/ */
enum NetworkBatchAbility : uint32_t { NO = 0, AS_IS, WITH_HETERO }; enum class NetworkBatchAbility : uint32_t { NO = 0, AS_IS, WITH_HETERO };
NetworkBatchAbility isNetworkBatchable(const CNNNetwork& network, NetworkBatchAbility is_model_batchable(const std::shared_ptr<const ov::Model>& model,
const std::string& deviceNoBatch, const std::string& deviceNoBatch,
bool strictly_track_dims); bool strictly_track_dims);
} // namespace details } // namespace details
} // namespace InferenceEngine } // namespace ov

View File

@ -65,7 +65,7 @@ std::string NetworkCompilationContext::calculateFileInfo(const std::string& file
return std::to_string(seed); return std::to_string(seed);
} }
std::string NetworkCompilationContext::computeHash(const CNNNetwork& network, std::string NetworkCompilationContext::computeHash(CNNNetwork& network,
const std::map<std::string, std::string>& compileOptions) { const std::map<std::string, std::string>& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, itt::domains::IE_LT, "NetworkCompilationContext::computeHash - CNN"); OV_ITT_SCOPE(FIRST_INFERENCE, itt::domains::IE_LT, "NetworkCompilationContext::computeHash - CNN");
@ -73,11 +73,10 @@ std::string NetworkCompilationContext::computeHash(const CNNNetwork& network,
uint64_t seed = 0; uint64_t seed = 0;
// 1. Calculate hash on function // 1. Calculate hash on function
CNNNetwork net(network);
ov::pass::Manager m; ov::pass::Manager m;
m.register_pass<ngraph::pass::FixRtInfo>(); m.register_pass<ngraph::pass::FixRtInfo>();
m.register_pass<ov::pass::Hash>(seed); m.register_pass<ov::pass::Hash>(seed);
m.run_passes(net.getFunction()); m.run_passes(network.getFunction());
// 2. Compute hash on serialized data and options // 2. Compute hash on serialized data and options
for (const auto& kvp : compileOptions) { for (const auto& kvp : compileOptions) {

View File

@ -20,7 +20,7 @@ class CNNNetwork;
struct NetworkCompilationContext final { struct NetworkCompilationContext final {
static std::string calculateFileInfo(const std::string& filePath); static std::string calculateFileInfo(const std::string& filePath);
static std::string computeHash(const CNNNetwork& network, const std::map<std::string, std::string>& compileOptions); static std::string computeHash(CNNNetwork& network, const std::map<std::string, std::string>& compileOptions);
static std::string computeHash(const std::string& modelName, static std::string computeHash(const std::string& modelName,
const std::map<std::string, std::string>& compileOptions); const std::map<std::string, std::string>& compileOptions);

View File

@ -7,6 +7,7 @@
#include "any_copy.hpp" #include "any_copy.hpp"
#include "cnn_network_ngraph_impl.hpp" #include "cnn_network_ngraph_impl.hpp"
#include "cpp/ie_plugin.hpp" #include "cpp/ie_plugin.hpp"
#include "dev/converter_utils.hpp"
#include "dev/core_impl.hpp" #include "dev/core_impl.hpp"
#include "ie_itt.hpp" #include "ie_itt.hpp"
#include "so_extension.hpp" #include "so_extension.hpp"
@ -49,7 +50,7 @@ Core::Core(const std::string& xmlConfigFile) {
_impl = std::make_shared<Impl>(); _impl = std::make_shared<Impl>();
#ifdef OPENVINO_STATIC_LIBRARY #ifdef OPENVINO_STATIC_LIBRARY
_impl->RegisterPluginsInRegistry(::getStaticPluginsRegistry()); _impl->register_plugins_in_registry(::getStaticPluginsRegistry());
#else #else
register_plugins(findPluginXML(xmlConfigFile)); register_plugins(findPluginXML(xmlConfigFile));
#endif #endif
@ -67,34 +68,18 @@ std::map<std::string, Version> Core::get_versions(const std::string& deviceName)
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
std::shared_ptr<ov::Model> Core::read_model(const std::wstring& modelPath, const std::wstring& binPath) const { std::shared_ptr<ov::Model> Core::read_model(const std::wstring& modelPath, const std::wstring& binPath) const {
OV_CORE_CALL_STATEMENT( OV_CORE_CALL_STATEMENT(
return _impl->ReadNetwork(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath)) return _impl->read_model(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath)););
.getFunction(););
} }
#endif #endif
std::shared_ptr<ov::Model> Core::read_model(const std::string& modelPath, const std::string& binPath) const { std::shared_ptr<ov::Model> Core::read_model(const std::string& modelPath, const std::string& binPath) const {
OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(modelPath, binPath).getFunction();); OV_CORE_CALL_STATEMENT(return _impl->read_model(modelPath, binPath););
} }
std::shared_ptr<ov::Model> Core::read_model(const std::string& model, const ov::Tensor& weights) const { std::shared_ptr<ov::Model> Core::read_model(const std::string& model, const ov::Tensor& weights) const {
InferenceEngine::Blob::Ptr blob; OV_CORE_CALL_STATEMENT(return _impl->read_model(model, weights););
if (weights) {
blob = weights._impl;
}
OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(model, blob).getFunction(););
} }
namespace {
ie::CNNNetwork toCNN(const std::shared_ptr<const ngraph::Function>& model) {
return ie::CNNNetwork(
std::make_shared<ie::details::CNNNetworkNGraphImpl>(std::const_pointer_cast<ngraph::Function>(model),
std::vector<ie::IExtensionPtr>{},
true));
}
} // namespace
CompiledModel Core::compile_model(const std::shared_ptr<const ov::Model>& model, const AnyMap& config) { CompiledModel Core::compile_model(const std::shared_ptr<const ov::Model>& model, const AnyMap& config) {
return compile_model(model, ov::DEFAULT_DEVICE_NAME, config); return compile_model(model, ov::DEFAULT_DEVICE_NAME, config);
} }
@ -103,7 +88,7 @@ CompiledModel Core::compile_model(const std::shared_ptr<const ov::Model>& model,
const std::string& deviceName, const std::string& deviceName,
const AnyMap& config) { const AnyMap& config) {
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->LoadNetwork(toCNN(model), deviceName, any_copy(flatten_sub_properties(deviceName, config))); auto exec = _impl->compile_model(model, deviceName, flatten_sub_properties(deviceName, config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -114,7 +99,7 @@ CompiledModel Core::compile_model(const std::string& modelPath, const AnyMap& co
CompiledModel Core::compile_model(const std::string& modelPath, const std::string& deviceName, const AnyMap& config) { CompiledModel Core::compile_model(const std::string& modelPath, const std::string& deviceName, const AnyMap& config) {
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->LoadNetwork(modelPath, deviceName, any_copy(flatten_sub_properties(deviceName, config))); auto exec = _impl->compile_model(modelPath, deviceName, flatten_sub_properties(deviceName, config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -123,12 +108,8 @@ CompiledModel Core::compile_model(const std::string& model,
const ov::Tensor& weights, const ov::Tensor& weights,
const std::string& deviceName, const std::string& deviceName,
const AnyMap& config) { const AnyMap& config) {
InferenceEngine::Blob::Ptr blob;
if (weights) {
blob = weights._impl;
}
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->LoadNetwork(model, blob, deviceName, any_copy(flatten_sub_properties(deviceName, config))); auto exec = _impl->compile_model(model, weights, deviceName, flatten_sub_properties(deviceName, config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -137,9 +118,7 @@ CompiledModel Core::compile_model(const std::shared_ptr<const ov::Model>& model,
const RemoteContext& context, const RemoteContext& context,
const AnyMap& config) { const AnyMap& config) {
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->LoadNetwork(toCNN(model), auto exec = _impl->compile_model(model, context, flatten_sub_properties(context.get_device_name(), config));
context._impl,
any_copy(flatten_sub_properties(context.get_device_name(), config)));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -187,13 +166,13 @@ void Core::add_extension(const std::shared_ptr<ov::Extension>& extension) {
add_extension(std::vector<std::shared_ptr<ov::Extension>>{extension}); add_extension(std::vector<std::shared_ptr<ov::Extension>>{extension});
} }
void Core::add_extension(const std::vector<std::shared_ptr<ov::Extension>>& extensions) { void Core::add_extension(const std::vector<std::shared_ptr<ov::Extension>>& extensions) {
OV_CORE_CALL_STATEMENT({ _impl->AddOVExtensions(extensions); }); OV_CORE_CALL_STATEMENT({ _impl->add_extension(extensions); });
} }
CompiledModel Core::import_model(std::istream& modelStream, const std::string& deviceName, const AnyMap& config) { CompiledModel Core::import_model(std::istream& modelStream, const std::string& deviceName, const AnyMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->ImportNetwork(modelStream, deviceName, any_copy(flatten_sub_properties(deviceName, config))); auto exec = _impl->import_model(modelStream, deviceName, flatten_sub_properties(deviceName, config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -218,7 +197,7 @@ CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext&
modelStream.seekg(currentPos, modelStream.beg); modelStream.seekg(currentPos, modelStream.beg);
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->GetCPPPluginByName(deviceName).import_model(modelStream, {}); auto exec = _impl->get_plugin(deviceName).import_model(modelStream, {});
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }
@ -226,11 +205,7 @@ CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext&
SupportedOpsMap Core::query_model(const std::shared_ptr<const ov::Model>& model, SupportedOpsMap Core::query_model(const std::shared_ptr<const ov::Model>& model,
const std::string& deviceName, const std::string& deviceName,
const AnyMap& config) const { const AnyMap& config) const {
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT(return _impl->query_model(model, deviceName, flatten_sub_properties(deviceName, config)););
auto qnResult =
_impl->QueryNetwork(toCNN(model), deviceName, any_copy(flatten_sub_properties(deviceName, config)));
return qnResult.supportedLayersMap;
});
} }
void Core::set_property(const AnyMap& properties) { void Core::set_property(const AnyMap& properties) {
@ -254,7 +229,7 @@ std::vector<std::string> Core::get_available_devices() const {
} }
void Core::register_plugin(const std::string& pluginName, const std::string& deviceName) { void Core::register_plugin(const std::string& pluginName, const std::string& deviceName) {
OV_CORE_CALL_STATEMENT(_impl->RegisterPluginByName(pluginName, deviceName);); OV_CORE_CALL_STATEMENT(_impl->register_plugin(pluginName, deviceName););
} }
void Core::unload_plugin(const std::string& deviceName) { void Core::unload_plugin(const std::string& deviceName) {
@ -262,12 +237,12 @@ void Core::unload_plugin(const std::string& deviceName) {
ie::DeviceIDParser parser(deviceName); ie::DeviceIDParser parser(deviceName);
std::string devName = parser.getDeviceName(); std::string devName = parser.getDeviceName();
_impl->UnloadPluginByName(devName); _impl->unload_plugin(devName);
}); });
} }
void Core::register_plugins(const std::string& xmlConfigFile) { void Core::register_plugins(const std::string& xmlConfigFile) {
OV_CORE_CALL_STATEMENT(_impl->RegisterPluginsInRegistry(xmlConfigFile);); OV_CORE_CALL_STATEMENT(_impl->register_plugins_in_registry(xmlConfigFile););
} }
RemoteContext Core::create_context(const std::string& deviceName, const AnyMap& params) { RemoteContext Core::create_context(const std::string& deviceName, const AnyMap& params) {
@ -278,8 +253,8 @@ RemoteContext Core::create_context(const std::string& deviceName, const AnyMap&
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto parsed = parseDeviceNameIntoConfig(deviceName, flatten_sub_properties(deviceName, params)); auto parsed = parseDeviceNameIntoConfig(deviceName, flatten_sub_properties(deviceName, params));
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).create_context(parsed._config); auto remoteContext = _impl->get_plugin(parsed._deviceName).create_context(parsed._config);
return {remoteContext._ptr, {remoteContext._so}}; return {remoteContext._impl, {remoteContext._so}};
}); });
} }
@ -291,8 +266,8 @@ RemoteContext Core::get_default_context(const std::string& deviceName) {
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto parsed = parseDeviceNameIntoConfig(deviceName, AnyMap{}); auto parsed = parseDeviceNameIntoConfig(deviceName, AnyMap{});
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config); auto remoteContext = _impl->get_plugin(parsed._deviceName).get_default_context(parsed._config);
return {remoteContext._ptr, {remoteContext._so}}; return {remoteContext._impl, {remoteContext._so}};
}); });
} }

View File

@ -13,25 +13,28 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include "file_utils.h"
#include "cpp/ie_cnn_network.h"
#include "cpp/exception2status.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "so_ptr.hpp"
#include "openvino/runtime/common.hpp"
#include "any_copy.hpp" #include "any_copy.hpp"
#include "cpp/exception2status.hpp"
#include "cpp/ie_cnn_network.h"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "file_utils.h"
#include "ie_plugin_config.hpp" #include "ie_plugin_config.hpp"
#include "openvino/runtime/common.hpp"
#include "so_ptr.hpp"
#if defined __GNUC__ #if defined __GNUC__
# pragma GCC diagnostic push # pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wreturn-type" # pragma GCC diagnostic ignored "-Wreturn-type"
#endif #endif
#define PLUGIN_CALL_STATEMENT(...) \ #define PLUGIN_CALL_STATEMENT(...) \
if (!_ptr) IE_THROW() << "Wrapper used in the PLUGIN_CALL_STATEMENT was not initialized."; \ if (!_ptr) \
try { \ IE_THROW() << "Wrapper used in the PLUGIN_CALL_STATEMENT was not initialized."; \
__VA_ARGS__; \ try { \
} catch(...) {::InferenceEngine::details::Rethrow();} __VA_ARGS__; \
} catch (...) { \
::InferenceEngine::details::Rethrow(); \
}
namespace InferenceEngine { namespace InferenceEngine {
/** /**
@ -47,11 +50,11 @@ struct InferencePlugin {
_ptr = {}; _ptr = {};
} }
void SetName(const std::string & deviceName) { void SetName(const std::string& deviceName) {
PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName)); PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName));
} }
void SetCore(std::weak_ptr<ov::ICore> core) { void SetCore(std::weak_ptr<InferenceEngine::ICore> core) {
PLUGIN_CALL_STATEMENT(_ptr->SetCore(core)); PLUGIN_CALL_STATEMENT(_ptr->SetCore(core));
} }
@ -67,17 +70,19 @@ struct InferencePlugin {
PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config)); PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config));
} }
ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const CNNNetwork& network, const std::map<std::string, std::string>& config) { ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const CNNNetwork& network,
const std::map<std::string, std::string>& config) {
PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config), _so}); PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config), _so});
} }
ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const CNNNetwork& network, ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const CNNNetwork& network,
const std::shared_ptr<RemoteContext>& context, const std::shared_ptr<RemoteContext>& context,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config, context), _so}); PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config, context), _so});
} }
ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const std::string& modelPath, const std::map<std::string, std::string>& config) { ov::SoPtr<IExecutableNetworkInternal> LoadNetwork(const std::string& modelPath,
const std::map<std::string, std::string>& config) {
ov::SoPtr<IExecutableNetworkInternal> res; ov::SoPtr<IExecutableNetworkInternal> res;
PLUGIN_CALL_STATEMENT(res = _ptr->LoadNetwork(modelPath, config)); PLUGIN_CALL_STATEMENT(res = _ptr->LoadNetwork(modelPath, config));
if (!res._so) if (!res._so)
@ -85,27 +90,27 @@ struct InferencePlugin {
return res; return res;
} }
QueryNetworkResult QueryNetwork(const CNNNetwork& network, QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map<std::string, std::string>& config) const {
const std::map<std::string, std::string>& config) const {
QueryNetworkResult res; QueryNetworkResult res;
PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config)); PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config));
if (res.rc != OK) IE_THROW() << res.resp.msg; if (res.rc != OK)
IE_THROW() << res.resp.msg;
return res; return res;
} }
ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(const std::string& modelFileName, ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(const std::string& modelFileName,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(modelFileName, config), _so}); PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(modelFileName, config), _so});
} }
ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(std::istream& networkModel, ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, config), _so}); PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, config), _so});
} }
ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(std::istream& networkModel, ov::SoPtr<IExecutableNetworkInternal> ImportNetwork(std::istream& networkModel,
const std::shared_ptr<RemoteContext>& context, const std::shared_ptr<RemoteContext>& context,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, context, config), _so}); PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, context, config), _so});
} }
@ -127,174 +132,8 @@ struct InferencePlugin {
}; };
} // namespace InferenceEngine } // namespace InferenceEngine
#if defined __GNUC__ #if defined __GNUC__
# pragma GCC diagnostic pop # pragma GCC diagnostic pop
#endif #endif
namespace ov {
#define OV_PLUGIN_CALL_STATEMENT(...) \
OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized."); \
try { \
__VA_ARGS__; \
} catch (...) { \
::InferenceEngine::details::Rethrow(); \
}
/**
* @brief This class is a C++ API wrapper for IInferencePlugin.
*
* It can throw exceptions safely for the application, where it is properly handled.
*/
class InferencePlugin {
std::shared_ptr<ie::IInferencePlugin> _ptr;
std::shared_ptr<void> _so;
public:
InferencePlugin() = default;
~InferencePlugin() {
_ptr = {};
}
InferencePlugin(const std::shared_ptr<ie::IInferencePlugin>& ptr, const std::shared_ptr<void>& so) :
_ptr{ptr},
_so{so} {
OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized.");
}
void set_name(const std::string& deviceName) {
OV_PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName));
}
void set_core(std::weak_ptr<ICore> core) {
OV_PLUGIN_CALL_STATEMENT(_ptr->SetCore(core));
}
const ie::Version get_version() const {
OV_PLUGIN_CALL_STATEMENT(return _ptr->GetVersion());
}
void add_extension(const ie::IExtensionPtr& extension) {
OV_PLUGIN_CALL_STATEMENT(_ptr->AddExtension(extension));
}
void set_config(const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config));
}
void set_properties(const ov::AnyMap& config) {
OV_PLUGIN_CALL_STATEMENT(_ptr->SetProperties(config));
}
SoPtr<ie::IExecutableNetworkInternal> compile_model(const ie::CNNNetwork& network,
const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config), _so});
}
SoPtr<ie::IExecutableNetworkInternal> compile_model(const ie::CNNNetwork& network,
const std::shared_ptr<ie::RemoteContext>& context,
const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->LoadNetwork(network, config, context), _so});
}
SoPtr<ie::IExecutableNetworkInternal> compile_model(const std::string& modelPath, const std::map<std::string, std::string>& config) {
SoPtr<ie::IExecutableNetworkInternal> res;
OV_PLUGIN_CALL_STATEMENT(res = _ptr->LoadNetwork(modelPath, config));
if (!res._so)
res._so = _so;
return res;
}
ie::QueryNetworkResult query_model(const ie::CNNNetwork& network,
const std::map<std::string, std::string>& config) const {
ie::QueryNetworkResult res;
OV_PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config));
OPENVINO_ASSERT(res.rc == ie::OK, res.resp.msg);
return res;
}
SoPtr<ie::IExecutableNetworkInternal> import_model(const std::string& modelFileName,
const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(modelFileName, config), _so});
}
SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, config), _so});
}
SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const std::shared_ptr<ie::RemoteContext>& context,
const std::map<std::string, std::string>& config) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->ImportNetwork(networkModel, context, config), _so});
}
Any get_metric(const std::string& name, const AnyMap& options) const {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), {_so}});
}
SoPtr<ie::RemoteContext> create_context(const AnyMap& params) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->CreateContext(params), _so});
}
SoPtr<ie::RemoteContext> get_default_context(const AnyMap& params) {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetDefaultContext(params), _so});
}
Any get_config(const std::string& name, const AnyMap& options) const {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), {_so}});
}
Any get_property(const std::string& name, const AnyMap& arguments) const {
OV_PLUGIN_CALL_STATEMENT({
if (ov::supported_properties == name) {
try {
return {_ptr->GetMetric(name, arguments), {_so}};
} catch (ie::Exception&) {
std::vector<ov::PropertyName> supported_properties;
try {
auto ro_properties = _ptr->GetMetric(METRIC_KEY(SUPPORTED_METRICS), arguments)
.as<std::vector<std::string>>();
for (auto&& ro_property : ro_properties) {
if (ro_property != METRIC_KEY(SUPPORTED_METRICS) &&
ro_property != METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
supported_properties.emplace_back(ro_property, PropertyMutability::RO);
}
}
} catch (ie::Exception&) {}
try {
auto rw_properties = _ptr->GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), arguments)
.as<std::vector<std::string>>();
for (auto&& rw_property : rw_properties) {
supported_properties.emplace_back(rw_property, PropertyMutability::RW);
}
} catch (ie::Exception&) {}
supported_properties.emplace_back(ov::supported_properties.name(), PropertyMutability::RO);
return supported_properties;
}
}
try {
return {_ptr->GetMetric(name, arguments), {_so}};
} catch (ie::Exception&) {
return {_ptr->GetConfig(name, arguments), {_so}};
}
});
}
template <typename T, PropertyMutability M>
T get_property(const ov::Property<T, M>& property) const {
return get_property(property.name(), {}).template as<T>();
}
template <typename T, PropertyMutability M>
T get_property(const ov::Property<T, M>& property, const AnyMap& arguments) const {
return get_property(property.name(), arguments).template as<T>();
}
};
} // namespace ov
#undef PLUGIN_CALL_STATEMENT #undef PLUGIN_CALL_STATEMENT
#undef OV_PLUGIN_CALL_STATEMENT

View File

@ -13,6 +13,7 @@
#include <istream> #include <istream>
#include <map> #include <map>
#include <memory> #include <memory>
#include <openvino/runtime/remote_context.hpp>
#include <string> #include <string>
#include <transformations/common_optimizations/fused_names_cleanup.hpp> #include <transformations/common_optimizations/fused_names_cleanup.hpp>
#include <unordered_set> #include <unordered_set>
@ -21,6 +22,7 @@
#include "blob_factory.hpp" #include "blob_factory.hpp"
#include "cnn_network_ngraph_impl.hpp" #include "cnn_network_ngraph_impl.hpp"
#include "cpp/ie_cnn_network.h" #include "cpp/ie_cnn_network.h"
#include "dev/converter_utils.hpp"
#include "exec_graph_info.hpp" #include "exec_graph_info.hpp"
#include "ie_algorithm.hpp" #include "ie_algorithm.hpp"
#include "ie_api.h" #include "ie_api.h"
@ -529,4 +531,8 @@ void SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNet
exeNetwork->setOutputs(const_results); exeNetwork->setOutputs(const_results);
} }
std::shared_ptr<::ov::IPlugin> convert_plugin(const std::shared_ptr<InferenceEngine::IInferencePlugin>& from) {
return ov::legacy_convert::convert_plugin(from);
}
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -0,0 +1,329 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "converter_utils.hpp"
#include <ie_blob.h>
#include <ie_common.h>
#include <ie_compound_blob.h>
#include <ie_layouts.h>
#include <fstream>
#include <ie_input_info.hpp>
#include <ie_version.hpp>
#include <memory>
#include <openvino/core/except.hpp>
#include <openvino/op/parameter.hpp>
#include <openvino/runtime/exception.hpp>
#include <openvino/runtime/remote_context.hpp>
#include <openvino/runtime/tensor.hpp>
#include "any_copy.hpp"
#include "cnn_network_ngraph_impl.hpp"
#include "cpp/ie_plugin.hpp"
#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "ie_icore.hpp"
#include "ie_ngraph_utils.hpp"
#include "iplugin_wrapper.hpp"
#include "openvino/runtime/iplugin.hpp"
#include "so_ptr.hpp"
#include "transformations/utils/utils.hpp"
namespace {
void fill_input_info(ov::Output<ov::Node>& input, InferenceEngine::InputInfo::Ptr& input_info) {
const ov::Output<const ov::Node> const_input(input.get_node(), input.get_index());
ov::legacy_convert::fill_input_info(const_input, input_info);
auto& rt_info = input.get_rt_info();
auto it = rt_info.find("ie_legacy_preproc");
if (it != rt_info.end()) {
rt_info.erase(it);
}
it = rt_info.find("ie_legacy_td");
if (it != rt_info.end()) {
rt_info.erase(it);
}
}
void fill_output_info(ov::Output<ov::Node>& input, InferenceEngine::DataPtr& output_info) {
const ov::Output<const ov::Node> const_input(input.get_node(), input.get_index());
ov::legacy_convert::fill_output_info(const_input, output_info);
auto& rt_info = input.get_rt_info();
auto it = rt_info.find("ie_legacy_td");
if (it != rt_info.end()) {
rt_info.erase(it);
}
}
InferenceEngine::SizeVector get_dims(const ov::Output<const ov::Node>& port,
const std::function<bool(InferenceEngine::SizeVector& dims)>& callback = {}) {
InferenceEngine::SizeVector dims = {};
const auto& p_shape = port.get_partial_shape();
if (p_shape.is_static())
dims = p_shape.get_shape();
else {
if (!callback || !callback(dims)) {
if (p_shape.rank().is_static()) {
for (size_t i = 0; i < static_cast<size_t>(p_shape.rank().get_length()); i++) {
dims.emplace_back(0);
}
}
}
}
return dims;
}
} // namespace
void ov::legacy_convert::fill_input_info(const ov::Output<const ov::Node>& input,
InferenceEngine::InputInfo::Ptr& input_info) {
if (!input_info) {
// Create input info
auto param_name = input.get_node()->get_friendly_name();
auto dims = get_dims(input, [&](InferenceEngine::SizeVector& dims) -> bool {
auto param = std::dynamic_pointer_cast<const ov::op::v0::Parameter>(input.get_node_shared_ptr());
if (param && param->get_partial_shape().is_static()) {
dims = param->get_partial_shape().get_shape();
return true;
}
return false;
});
InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()),
dims,
InferenceEngine::TensorDesc::getLayoutByDims(dims));
auto data = std::make_shared<InferenceEngine::Data>(param_name, desc);
input_info = std::make_shared<InferenceEngine::InputInfo>();
input_info->setInputData(data);
}
auto& rt_info = input.get_rt_info();
auto it = rt_info.find("ie_legacy_preproc");
if (it != rt_info.end()) {
input_info->getPreProcess() = it->second.as<InferenceEngine::PreProcessInfo>();
}
it = rt_info.find("ie_legacy_td");
if (it != rt_info.end()) {
auto td = it->second.as<InferenceEngine::TensorDesc>();
input_info->getInputData()->reshape(td.getDims(), td.getLayout());
input_info->setPrecision(td.getPrecision());
}
}
void ov::legacy_convert::fill_output_info(const ov::Output<const ov::Node>& output,
InferenceEngine::DataPtr& output_info) {
if (!output_info) {
// Create input info
const auto& res_name = ov::op::util::create_ie_output_name(output);
auto dims = get_dims(output);
InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(output.get_element_type()),
dims,
InferenceEngine::TensorDesc::getLayoutByDims(dims));
output_info = std::make_shared<InferenceEngine::Data>(res_name, desc);
}
auto& rt_info = output.get_rt_info();
auto it = rt_info.find("ie_legacy_td");
if (it != rt_info.end()) {
auto td = it->second.as<InferenceEngine::TensorDesc>();
output_info->reshape(td.getDims(), td.getLayout());
output_info->setPrecision(td.getPrecision());
}
}
InferenceEngine::CNNNetwork ov::legacy_convert::convert_model(const std::shared_ptr<const ov::Model>& model,
bool is_new_api) {
auto network = InferenceEngine::CNNNetwork(std::shared_ptr<InferenceEngine::ICNNNetwork>(
new InferenceEngine::details::CNNNetworkNGraphImpl(model->clone(), {}, is_new_api)));
std::shared_ptr<ov::Model> cloned_model = network.getFunction();
for (auto&& input : cloned_model->inputs()) {
auto param_name = input.get_node()->get_friendly_name();
OPENVINO_ASSERT(network.getInputsInfo().find(param_name) != network.getInputsInfo().end());
auto input_info = network.getInputsInfo()[param_name];
::fill_input_info(input, input_info);
}
for (auto&& result : cloned_model->get_results()) {
auto output = result->input_value(0);
const auto& res_name = ov::op::util::create_ie_output_name(output);
OPENVINO_ASSERT(network.getOutputsInfo().find(res_name) != network.getOutputsInfo().end());
auto output_info = network.getOutputsInfo()[res_name];
::fill_output_info(output, output_info);
}
return network;
}
std::shared_ptr<const ov::Model> ov::legacy_convert::convert_model(const InferenceEngine::CNNNetwork& network,
bool is_new_api) {
OPENVINO_ASSERT(network.getFunction(),
"CNNNetwork can be converted to OpenVINO Model only in case if it contains ngraph::Function");
if (is_new_api)
return network.getFunction();
auto cloned_model = network.getFunction()->clone();
for (auto&& input : cloned_model->inputs()) {
auto param_name = input.get_node()->get_friendly_name();
OPENVINO_ASSERT(network.getInputsInfo().find(param_name) != network.getInputsInfo().end());
auto input_info = network.getInputsInfo().at(param_name);
auto& rt_info = input.get_rt_info();
rt_info["ie_legacy_preproc"] = input_info->getPreProcess();
rt_info["ie_legacy_td"] = input_info->getTensorDesc();
}
for (auto&& result : cloned_model->get_results()) {
auto output = result->input_value(0);
const auto& res_name = ov::op::util::create_ie_output_name(output);
OPENVINO_ASSERT(network.getOutputsInfo().find(res_name) != network.getOutputsInfo().end());
auto output_info = network.getOutputsInfo().at(res_name);
auto& rt_info = output.get_rt_info();
rt_info["ie_legacy_td"] = output_info->getTensorDesc();
}
return cloned_model;
}
namespace ov {
class IInferencePluginWrapper : public InferenceEngine::IInferencePlugin {
public:
IInferencePluginWrapper(const std::shared_ptr<ov::IPlugin>& plugin) {
auto& ver = plugin->get_version();
InferenceEngine::Version version;
version.buildNumber = ver.buildNumber;
version.description = ver.description;
SetVersion(version);
_isNewAPI = plugin->is_new_api();
_executorManager = plugin->get_executor_manager();
}
std::string GetName() const noexcept override {
return m_plugin->get_device_name();
}
void SetName(const std::string& name) noexcept override {
m_plugin->set_device_name(name);
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> LoadNetwork(
const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) override {
return m_plugin->compile_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()),
ov::any_copy(config));
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> LoadNetwork(
const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config,
const std::shared_ptr<InferenceEngine::RemoteContext>& context) override {
return m_plugin->compile_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()),
ov::any_copy(config),
ov::RemoteContext{context, {}});
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> LoadNetwork(
const std::string& modelPath,
const std::map<std::string, std::string>& config) override {
return ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>(
m_plugin->compile_model(modelPath, ov::any_copy(config)),
{});
}
void AddExtension(const std::shared_ptr<InferenceEngine::IExtension>& extension) override {
m_plugin->add_extension(extension);
}
void SetConfig(const std::map<std::string, std::string>& config) override {
m_plugin->set_property(ov::any_copy(config));
}
void SetProperties(const ov::AnyMap& config) override {
m_plugin->set_property(config);
}
InferenceEngine::Parameter GetConfig(
const std::string& name,
const std::map<std::string, InferenceEngine::Parameter>& options) const override {
return m_plugin->get_property(name, options);
}
InferenceEngine::Parameter GetMetric(
const std::string& name,
const std::map<std::string, InferenceEngine::Parameter>& options) const override {
return m_plugin->get_property(name, options);
}
std::shared_ptr<InferenceEngine::RemoteContext> CreateContext(const InferenceEngine::ParamMap& params) override {
return m_plugin->create_context(params)._impl;
}
std::shared_ptr<InferenceEngine::RemoteContext> GetDefaultContext(
const InferenceEngine::ParamMap& params) override {
return m_plugin->get_default_context(params)._impl;
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> ImportNetwork(
const std::string& modelFileName,
const std::map<std::string, std::string>& config) override {
std::ifstream model(modelFileName, std::ios::binary);
return m_plugin->import_model(model, ov::any_copy(config));
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> ImportNetwork(
std::istream& networkModel,
const std::map<std::string, std::string>& config) override {
return m_plugin->import_model(networkModel, ov::any_copy(config));
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> ImportNetwork(
std::istream& networkModel,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config) override {
return m_plugin->import_model(networkModel, ov::RemoteContext{context, {}}, ov::any_copy(config));
}
void SetCore(std::weak_ptr<InferenceEngine::ICore> core) override {
return m_plugin->set_core(std::dynamic_pointer_cast<ov::ICore>(core));
}
std::shared_ptr<InferenceEngine::ICore> GetCore() const noexcept override {
auto core = m_plugin->get_core();
return std::dynamic_pointer_cast<InferenceEngine::ICore>(core);
}
InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) const override {
auto res = m_plugin->query_model(ov::legacy_convert::convert_model(network, m_plugin->is_new_api()),
ov::any_copy(config));
ie::QueryNetworkResult ret;
if (!network.getFunction() || res.empty()) {
ret.rc = InferenceEngine::GENERAL_ERROR;
return ret;
}
ret.supportedLayersMap = res;
return ret;
}
std::shared_ptr<ov::IPlugin> get_plugin() {
return m_plugin;
}
private:
std::shared_ptr<ov::IPlugin> m_plugin;
};
} // namespace ov
std::shared_ptr<::InferenceEngine::IInferencePlugin> ov::legacy_convert::convert_plugin(
const std::shared_ptr<::ov::IPlugin>& plugin) {
if (auto wrapper = std::dynamic_pointer_cast<InferenceEngine::IPluginWrapper>(plugin))
return wrapper->get_plugin();
return std::make_shared<ov::IInferencePluginWrapper>(plugin);
}
std::shared_ptr<::ov::IPlugin> ov::legacy_convert::convert_plugin(
const std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) {
std::shared_ptr<::ov::IPlugin> ov_plugin(new ::InferenceEngine::IPluginWrapper(plugin));
return ov_plugin;
}

View File

@ -0,0 +1,26 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "cpp/ie_cnn_network.h"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "openvino/core/model.hpp"
#include "openvino/runtime/iplugin.hpp"
namespace ov {
namespace legacy_convert {
void fill_input_info(const ov::Output<const ov::Node>& input, InferenceEngine::InputInfo::Ptr& inputInfo);
void fill_output_info(const ov::Output<const ov::Node>& output, InferenceEngine::DataPtr& outputInfo);
InferenceEngine::CNNNetwork convert_model(const std::shared_ptr<const ov::Model>& model, bool is_new_api);
std::shared_ptr<const ov::Model> convert_model(const InferenceEngine::CNNNetwork& model, bool is_new_api);
std::shared_ptr<::InferenceEngine::IInferencePlugin> convert_plugin(const std::shared_ptr<::ov::IPlugin>& plugin);
std::shared_ptr<::ov::IPlugin> convert_plugin(const std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin);
} // namespace legacy_convert
} // namespace ov

File diff suppressed because it is too large Load Diff

View File

@ -4,14 +4,19 @@
#pragma once #pragma once
#include <ie_extension.h> #include <cpp/ie_cnn_network.h>
#include <ie_remote_context.hpp>
#include "any_copy.hpp" #include "any_copy.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" #include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "dev/plugin.hpp"
#include "ie_cache_guard.hpp" #include "ie_cache_guard.hpp"
#include "ie_cache_manager.hpp" #include "ie_cache_manager.hpp"
#include "ie_extension.h"
#include "ie_icore.hpp" #include "ie_icore.hpp"
#include "multi-device/multi_device_config.hpp" #include "multi-device/multi_device_config.hpp"
#include "openvino/core/any.hpp"
#include "openvino/core/extension.hpp" #include "openvino/core/extension.hpp"
#include "openvino/core/version.hpp" #include "openvino/core/version.hpp"
#include "openvino/runtime/common.hpp" #include "openvino/runtime/common.hpp"
@ -72,7 +77,7 @@ ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& p
class CoreImpl : public InferenceEngine::ICore, public std::enable_shared_from_this<InferenceEngine::ICore> { class CoreImpl : public InferenceEngine::ICore, public std::enable_shared_from_this<InferenceEngine::ICore> {
private: private:
mutable std::map<std::string, ov::InferencePlugin> plugins; mutable std::map<std::string, ov::Plugin> plugins;
// Mutex is needed to prevent changes of dev mutexes map from different threads // Mutex is needed to prevent changes of dev mutexes map from different threads
mutable std::mutex global_mutex; mutable std::mutex global_mutex;
// Global mutex "" locks parallel access to pluginRegistry and plugins // Global mutex "" locks parallel access to pluginRegistry and plugins
@ -91,22 +96,22 @@ private:
bool flag_allow_auto_batching = true; bool flag_allow_auto_batching = true;
void setAndUpdate(ov::AnyMap& config); void set_and_update(ov::AnyMap& config);
void setCacheForDevice(const std::string& dir, const std::string& name); void set_cache_dir_for_device(const std::string& dir, const std::string& name);
std::string get_cache_dir() const; std::string get_cache_dir() const;
// Creating thread-safe copy of config including shared_ptr to ICacheManager // Creating thread-safe copy of config including shared_ptr to ICacheManager
// Passing empty or not-existing name will return global cache config // Passing empty or not-existing name will return global cache config
CacheConfig getCacheConfigForDevice(const std::string& device_name, CacheConfig get_cache_config_for_device(const std::string& device_name,
bool deviceSupportsCacheDir, bool device_supports_cache_dir,
std::map<std::string, std::string>& parsedConfig) const; ov::AnyMap& parsedConfig) const;
CacheConfig getCacheConfigForDevice(const std::string& device_name) const; CacheConfig get_cache_config_for_device(const std::string& device_name) const;
private: private:
static void fillConfig(CacheConfig& config, const std::string& dir); static void fill_config(CacheConfig& config, const std::string& dir);
mutable std::mutex _cacheConfigMutex; mutable std::mutex _cacheConfigMutex;
CacheConfig _cacheConfig; CacheConfig _cacheConfig;
@ -126,7 +131,9 @@ private:
// Core settings (cache config, etc) // Core settings (cache config, etc)
CoreConfig coreConfig; CoreConfig coreConfig;
InferenceEngine::CacheGuard cacheGuard; Any get_property_for_core(const std::string& name) const;
mutable InferenceEngine::CacheGuard cacheGuard;
struct PluginDescriptor { struct PluginDescriptor {
ov::util::FilePath libraryLocation; ov::util::FilePath libraryLocation;
@ -162,9 +169,51 @@ private:
std::map<std::string, PluginDescriptor> pluginRegistry; std::map<std::string, PluginDescriptor> pluginRegistry;
const bool newAPI; const bool m_new_api;
void AddExtensionUnsafe(const InferenceEngine::IExtensionPtr& extension) const;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model_impl(
const std::shared_ptr<const ov::Model>& model,
ov::Plugin& plugin,
const ov::AnyMap& parsedConfig,
const ov::RemoteContext& context,
const CacheContent& cacheContent,
bool forceDisableCache = false) const;
static ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> load_model_from_cache(
const CacheContent& cacheContent,
ov::Plugin& plugin,
const ov::AnyMap& config,
const ov::RemoteContext& context,
bool& networkIsImported);
bool device_supports_import_export(const ov::Plugin& plugin) const;
bool device_supports_property(const ov::Plugin& plugin, const std::string& key) const;
bool device_supports_cache_dir(const ov::Plugin& plugin) const;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(ov::Plugin& plugin,
const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& config) const;
std::map<std::string, std::string> create_compile_config(const ov::Plugin& plugin,
const std::string& deviceFamily,
const ov::AnyMap& origConfig) const;
std::string calculate_file_hash(const std::string& modelName,
const std::string& deviceFamily,
const ov::Plugin& plugin,
const ov::AnyMap& config) const;
std::string calculate_memory_hash(const std::string& modelStr,
const ov::Tensor& weights,
const std::string& deviceFamily,
const ov::Plugin& plugin,
const ov::AnyMap& config) const;
// Legacy API
void AddExtensionUnsafe(const InferenceEngine::IExtensionPtr& extension) const;
template <typename C, typename = FileUtils::enableIfSupportedChar<C>> template <typename C, typename = FileUtils::enableIfSupportedChar<C>>
void TryToRegisterLibraryAsExtensionUnsafe(const std::basic_string<C>& path) const { void TryToRegisterLibraryAsExtensionUnsafe(const std::basic_string<C>& path) const {
try { try {
@ -174,6 +223,23 @@ private:
// in case of shared library is not opened // in case of shared library is not opened
} }
} }
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> LoadNetworkImpl(
const InferenceEngine::CNNNetwork& model,
ov::Plugin& plugin,
const std::map<std::string, std::string>& parsedConfig,
const InferenceEngine::RemoteContext::Ptr& context,
const CacheContent& cacheContent,
bool forceDisableCache = false);
std::string CalculateNetworkHash(const InferenceEngine::CNNNetwork& network,
const std::string& deviceFamily,
const ov::Plugin& plugin,
const ov::AnyMap& config) const;
std::string CalculateNetworkHash(InferenceEngine::CNNNetwork& network,
const std::string& deviceFamily,
const ov::Plugin& plugin,
const ov::AnyMap& config) const;
public: public:
CoreImpl(bool _newAPI); CoreImpl(bool _newAPI);
@ -185,7 +251,13 @@ public:
* @note The function supports UNICODE path * @note The function supports UNICODE path
* @param xmlConfigFile An .xml configuraion with device / plugin information * @param xmlConfigFile An .xml configuraion with device / plugin information
*/ */
void RegisterPluginsInRegistry(const std::string& xmlConfigFile); void register_plugins_in_registry(const std::string& xmlConfigFile);
void apply_auto_batching(const std::shared_ptr<const ov::Model>& model,
std::string& deviceName,
ov::AnyMap& config) const;
void clean_properties(std::string& deviceName, ov::AnyMap& config, ov::Any property) const;
#ifdef OPENVINO_STATIC_LIBRARY #ifdef OPENVINO_STATIC_LIBRARY
@ -194,7 +266,7 @@ public:
* @note The function supports UNICODE path * @note The function supports UNICODE path
* @param static_registry a statically defined configuration with device / plugin information * @param static_registry a statically defined configuration with device / plugin information
*/ */
void RegisterPluginsInRegistry(const decltype(::getStaticPluginsRegistry())& static_registry) { void register_plugins_in_registry(const decltype(::getStaticPluginsRegistry())& static_registry) {
std::lock_guard<std::mutex> lock(get_mutex()); std::lock_guard<std::mutex> lock(get_mutex());
for (const auto& plugin : static_registry) { for (const auto& plugin : static_registry) {
@ -233,12 +305,6 @@ public:
const std::shared_ptr<InferenceEngine::RemoteContext>& context, const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config) override; const std::map<std::string, std::string>& config) override;
void ApplyAutoBatching(const InferenceEngine::CNNNetwork& network,
std::string& deviceName,
std::map<std::string, std::string>& config);
void CleanUpProperties(std::string& deviceName, std::map<std::string, std::string>& config, ov::Any property);
InferenceEngine::SoExecutableNetworkInternal LoadNetwork(const InferenceEngine::CNNNetwork& network, InferenceEngine::SoExecutableNetworkInternal LoadNetwork(const InferenceEngine::CNNNetwork& network,
const std::string& deviceNameOrig, const std::string& deviceNameOrig,
const std::map<std::string, std::string>& config) override; const std::map<std::string, std::string>& config) override;
@ -267,12 +333,6 @@ public:
Any GetMetric(const std::string& deviceName, const std::string& name, const AnyMap& options = {}) const override; Any GetMetric(const std::string& deviceName, const std::string& name, const AnyMap& options = {}) const override;
void set_property(const std::string& device_name, const AnyMap& properties) override;
Any get_property_for_core(const std::string& name) const;
Any get_property(const std::string& device_name, const std::string& name, const AnyMap& arguments) const override;
Any GetConfig(const std::string& deviceName, const std::string& name) const override; Any GetConfig(const std::string& deviceName, const std::string& name) const override;
/** /**
@ -293,48 +353,6 @@ public:
InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName, InferenceEngine::RemoteContext::Ptr CreateContext(const std::string& deviceName,
const InferenceEngine::ParamMap& params) override; const InferenceEngine::ParamMap& params) override;
/**
* @brief Returns reference to CPP plugin wrapper by a device name
* @param deviceName A name of device
* @return Reference to a CPP plugin wrapper
*/
ov::InferencePlugin GetCPPPluginByName(const std::string& pluginName) const;
/**
* @brief Unload plugin for specified device, but plugin meta-data is still in plugin registry
* @param deviceName A name of device
*/
void UnloadPluginByName(const std::string& deviceName);
/**
* @brief Registers plugin meta-data in registry for specified device
* @param deviceName A name of device
*/
void RegisterPluginByName(const std::string& pluginName, const std::string& deviceName);
/**
* @brief Provides a list of plugin names in registry; physically such plugins may not be created
* @return A list of plugin names
*/
std::vector<std::string> GetListOfDevicesInRegistry() const;
/**
* @brief Sets config values for a plugin or set of plugins
* @param deviceName A device name to set config to
* If empty, config is set for all the plugins / plugin's meta-data
* @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU
* just simple forms like CPU, GPU, MULTI, GPU.0, etc
*/
void SetConfigForPlugins(const ov::AnyMap& configMap, const std::string& deviceName);
/**
* @brief Get device config it is passed as pair of device_name and `AnyMap`
* @param configs All set of configs
* @note `device_name` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU
* just simple forms like CPU, GPU, MULTI, GPU.0, etc
*/
void ExtractAndSetDeviceConfig(const ov::AnyMap& configs);
std::map<std::string, std::string> GetSupportedConfig(const std::string& deviceName, std::map<std::string, std::string> GetSupportedConfig(const std::string& deviceName,
const std::map<std::string, std::string>& configs) override; const std::map<std::string, std::string>& configs) override;
@ -344,60 +362,121 @@ public:
*/ */
void AddExtension(const InferenceEngine::IExtensionPtr& extension); void AddExtension(const InferenceEngine::IExtensionPtr& extension);
void AddOVExtensions(const std::vector<ov::Extension::Ptr>& extensions);
/** /**
* @brief Provides a list of extensions * @brief Provides a list of extensions
* @return A list of registered extensions * @return A list of registered extensions
*/ */
const std::vector<InferenceEngine::IExtensionPtr>& GetExtensions() const; const std::vector<InferenceEngine::IExtensionPtr>& GetExtensions() const;
const std::vector<ov::Extension::Ptr>& GetOVExtensions() const; bool DeviceSupportsImportExport(const std::string& deviceName) const override;
std::map<std::string, InferenceEngine::Version> GetVersions(const std::string& deviceName) const; std::map<std::string, InferenceEngine::Version> GetVersions(const std::string& deviceName) const;
bool DeviceSupportsImportExport(const std::string& deviceName) const override; // Common API
bool DeviceSupportsConfigKey(const ov::InferencePlugin& plugin, const std::string& key) const; /**
* @brief Returns reference to CPP plugin wrapper by a device name
* @param deviceName A name of device
* @return Reference to a CPP plugin wrapper
*/
ov::Plugin get_plugin(const std::string& pluginName) const;
bool DeviceSupportsImportExport(const ov::InferencePlugin& plugin) const; /**
* @brief Unload plugin for specified device, but plugin meta-data is still in plugin registry
* @param deviceName A name of device
*/
void unload_plugin(const std::string& deviceName);
bool DeviceSupportsCacheDir(const ov::InferencePlugin& plugin) const; /**
* @brief Registers plugin meta-data in registry for specified device
* @param deviceName A name of device
*/
void register_plugin(const std::string& pluginName, const std::string& deviceName);
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model_impl( /**
const InferenceEngine::CNNNetwork& network, * @brief Provides a list of plugin names in registry; physically such plugins may not be created
ov::InferencePlugin& plugin, * @return A list of plugin names
const std::map<std::string, std::string>& parsedConfig, */
const InferenceEngine::RemoteContext::Ptr& context, std::vector<std::string> get_registered_devices() const;
const CacheContent& cacheContent,
bool forceDisableCache = false);
static ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> LoadNetworkFromCache( /**
const CacheContent& cacheContent, * @brief Sets config values for a plugin or set of plugins
ov::InferencePlugin& plugin, * @param deviceName A device name to set config to
const std::map<std::string, std::string>& config, * If empty, config is set for all the plugins / plugin's meta-data
const std::shared_ptr<InferenceEngine::RemoteContext>& context, * @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU
bool& networkIsImported); * just simple forms like CPU, GPU, MULTI, GPU.0, etc
*/
void set_property_for_devivce(const ov::AnyMap& configMap, const std::string& deviceName);
std::map<std::string, std::string> CreateCompileConfig(const ov::InferencePlugin& plugin, void add_extension(const std::vector<ov::Extension::Ptr>& extensions);
const std::string& deviceFamily,
const std::map<std::string, std::string>& origConfig) const;
std::string CalculateNetworkHash(const InferenceEngine::CNNNetwork& network, bool device_supports_import_export(const std::string& deviceName) const;
const std::string& deviceFamily,
const ov::InferencePlugin& plugin,
const std::map<std::string, std::string>& config) const;
std::string CalculateFileHash(const std::string& modelName, // ov::ICore
const std::string& deviceFamily, std::shared_ptr<ov::Model> read_model(const std::string& model,
const ov::InferencePlugin& plugin, const ov::Tensor& weights,
const std::map<std::string, std::string>& config) const; bool frontend_mode = false) const override;
std::string CalculateMemoryHash(const std::string& modelStr, std::shared_ptr<ov::Model> read_model(const std::string& model_path, const std::string& bin_path) const override;
const ov::Tensor& weights,
const std::string& deviceFamily, ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::InferencePlugin& plugin, const std::string& device_name,
const std::map<std::string, std::string>& config) const; const ov::AnyMap& config = {}) const override;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& config = {}) const override;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::string& model_path,
const std::string& device_name,
const ov::AnyMap& config) const override;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::string& model_str,
const ov::Tensor& weights,
const std::string& device_name,
const ov::AnyMap& config) const override;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> import_model(std::istream& model,
const std::string& device_name = {},
const ov::AnyMap& config = {}) const override;
ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config) const override;
std::vector<std::string> get_available_devices() const override;
ov::RemoteContext create_context(const std::string& device_name, const AnyMap& args) const override;
ov::AnyMap get_supported_property(const std::string& device_name, const ov::AnyMap& config) const;
bool is_new_api() const override;
ov::RemoteContext get_default_context(const std::string& device_name) const override;
/**
* @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.
*
* @param device_name Name of a device.
*
* @param properties Map of pairs: (property name, property value).
*/
void set_property(const std::string& device_name, const AnyMap& properties);
/**
* @brief Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.
*
* @tparam Properties Should be the pack of `std::pair<std::string, Any>` types.
* @param device_name Name of a device.
* @param properties Optional pack of pairs: (property name, property value).
*/
template <typename... Properties>
util::EnableIfAllStringAny<void, Properties...> set_property(const std::string& device_name,
Properties&&... properties) {
set_property(device_name, AnyMap{std::forward<Properties>(properties)...});
}
Any get_property(const std::string& device_name, const std::string& name, const AnyMap& arguments) const override;
}; };
} // namespace ov } // namespace ov

View File

@ -0,0 +1,443 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <memory>
#include "any_copy.hpp"
#include "compilation_context.hpp"
#include "core_impl.hpp"
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "dev/converter_utils.hpp"
#include "ie_network_reader.hpp"
#include "iplugin_wrapper.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/pass/constant_folding.hpp"
#include "openvino/itt.hpp"
#include "openvino/util/common_util.hpp"
bool ov::CoreImpl::isNewAPI() const {
return is_new_api();
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::CoreImpl::LoadNetworkImpl(
const InferenceEngine::CNNNetwork& network,
ov::Plugin& plugin,
const std::map<std::string, std::string>& parsedConfig,
const InferenceEngine::RemoteContext::Ptr& context,
const CacheContent& cacheContent,
bool forceDisableCache) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "CoreImpl::compile_model_impl");
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> execNetwork;
auto wrapper = std::dynamic_pointer_cast<InferenceEngine::IPluginWrapper>(plugin.m_ptr);
OPENVINO_ASSERT(wrapper);
auto old_plugin = wrapper->get_plugin();
execNetwork = {context ? old_plugin->LoadNetwork(network, parsedConfig, context)
: old_plugin->LoadNetwork(network, parsedConfig),
plugin.m_so};
if (!forceDisableCache && cacheContent.cacheManager && device_supports_import_export(plugin)) {
try {
// need to export network for further import from "cache"
OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Export");
cacheContent.cacheManager->writeCacheEntry(cacheContent.blobId, [&](std::ostream& networkStream) {
networkStream << InferenceEngine::CompiledBlobHeader(
InferenceEngine::GetInferenceEngineVersion()->buildNumber,
InferenceEngine::NetworkCompilationContext::calculateFileInfo(cacheContent.modelPath));
execNetwork->Export(networkStream);
});
} catch (...) {
cacheContent.cacheManager->removeCacheEntry(cacheContent.blobId);
throw;
}
}
return execNetwork;
}
InferenceEngine::RemoteContext::Ptr ov::CoreImpl::GetDefaultContext(const std::string& deviceName) {
return get_default_context(deviceName)._impl;
}
InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& modelPath, const std::string& binPath) const {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from file");
return InferenceEngine::details::ReadNetwork(modelPath, binPath, extensions, ov_extensions, is_new_api());
}
InferenceEngine::CNNNetwork ov::CoreImpl::ReadNetwork(const std::string& model,
const InferenceEngine::Blob::CPtr& weights,
bool frontendMode) const {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "CoreImpl::ReadNetwork from memory");
return InferenceEngine::details::ReadNetwork(model, weights, extensions, ov_extensions, is_new_api(), frontendMode);
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::CoreImpl::LoadNetwork(
const InferenceEngine::CNNNetwork& network,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config) {
OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::RemoteContext");
if (network.getFunction()) {
ov::RemoteContext ctx{context, {nullptr}};
auto compiled_model =
compile_model(ov::legacy_convert::convert_model(network, isNewAPI()), ctx, any_copy(config));
return {compiled_model._ptr, compiled_model._so};
}
if (context == nullptr) {
IE_THROW() << "Remote context is null";
}
// have to deduce the device name/config from the context first
auto parsed = parseDeviceNameIntoConfig(context->getDeviceName(), config);
auto plugin = get_plugin(parsed._deviceName);
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> res;
auto conf = ov::any_copy(parsed._config);
auto cacheManager =
coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf)
._cacheManager;
auto cacheContent = CacheContent{cacheManager};
if (cacheManager && device_supports_import_export(plugin)) {
cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, ov::any_copy(parsed._config));
bool loadedFromCache = false;
auto lock = cacheGuard.getHashLock(cacheContent.blobId);
res = load_model_from_cache(cacheContent, plugin, conf, {context, {}}, loadedFromCache);
if (!loadedFromCache) {
res = LoadNetworkImpl(network, plugin, parsed._config, context, cacheContent);
} else {
// Temporary workaround until all plugins support caching of original model inputs
InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI());
}
} else {
res = LoadNetworkImpl(network, plugin, parsed._config, context, cacheContent);
}
return res;
}
InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork(
const InferenceEngine::CNNNetwork& network,
const std::string& deviceNameOrig,
const std::map<std::string, std::string>& config) {
OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::CNN");
if (network.getFunction()) {
auto compiled_model =
compile_model(ov::legacy_convert::convert_model(network, isNewAPI()), deviceNameOrig, any_copy(config));
return {compiled_model._ptr, compiled_model._so};
}
std::string deviceName = deviceNameOrig;
std::map<std::string, std::string> config_with_batch = config;
bool forceDisableCache = config_with_batch.count(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE)) > 0;
auto parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch);
if (forceDisableCache) {
// remove this config key from parsed as plugins can throw unsupported exception
parsed._config.erase(CONFIG_KEY_INTERNAL(FORCE_DISABLE_CACHE));
}
auto plugin = get_plugin(parsed._deviceName);
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> res;
auto conf = ov::any_copy(parsed._config);
auto cacheManager =
coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf)
._cacheManager;
auto cacheContent = CacheContent{cacheManager};
if (!forceDisableCache && cacheManager && device_supports_import_export(plugin)) {
cacheContent.blobId = CalculateNetworkHash(network, parsed._deviceName, plugin, ov::any_copy(parsed._config));
bool loadedFromCache = false;
auto lock = cacheGuard.getHashLock(cacheContent.blobId);
res = load_model_from_cache(cacheContent, plugin, conf, {}, loadedFromCache);
if (!loadedFromCache) {
res = LoadNetworkImpl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache);
} else {
// Temporary workaround until all plugins support caching of original model inputs
InferenceEngine::SetExeNetworkInfo(res._ptr, network.getFunction(), isNewAPI());
}
} else {
res = LoadNetworkImpl(network, plugin, parsed._config, nullptr, cacheContent, forceDisableCache);
}
return {res._ptr, res._so};
}
InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork(
const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const InferenceEngine::CNNNetwork&)>& val) {
OV_ITT_SCOPE(FIRST_INFERENCE, ie::itt::domains::IE_LT, "Core::LoadNetwork::Path");
auto parsed = parseDeviceNameIntoConfig(deviceName, config);
auto plugin = get_plugin(parsed._deviceName);
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> res;
auto conf = any_copy(parsed._config);
auto cacheManager =
coreConfig.get_cache_config_for_device(parsed._deviceName, device_supports_cache_dir(plugin), conf)
._cacheManager;
auto cacheContent = CacheContent{cacheManager, modelPath};
if (cacheManager && device_supports_import_export(plugin)) {
bool loadedFromCache = false;
cacheContent.blobId = calculate_file_hash(modelPath, parsed._deviceName, plugin, conf);
auto lock = cacheGuard.getHashLock(cacheContent.blobId);
res = load_model_from_cache(cacheContent, plugin, conf, {}, loadedFromCache);
if (!loadedFromCache) {
auto cnnNetwork = ReadNetwork(modelPath, std::string());
if (val) {
val(cnnNetwork);
}
if (cnnNetwork.getFunction()) {
res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()),
plugin,
conf,
{},
cacheContent);
} else {
res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent);
}
}
} else if (cacheManager) {
res = plugin.compile_model(modelPath, conf);
} else {
auto cnnNetwork = ReadNetwork(modelPath, std::string());
if (val) {
val(cnnNetwork);
}
if (cnnNetwork.getFunction()) {
res = compile_model_impl(ov::legacy_convert::convert_model(cnnNetwork, isNewAPI()),
plugin,
conf,
{},
cacheContent);
} else {
res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, cacheContent);
}
}
return {res._ptr, res._so};
}
InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::LoadNetwork(
const std::string& modelStr,
const InferenceEngine::Blob::CPtr& weights,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const InferenceEngine::CNNNetwork&)>& val) {
OV_ITT_SCOPE(FIRST_INFERENCE, InferenceEngine::itt::domains::IE_LT, "Core::LoadNetwork::Memory");
auto compiled_model = compile_model(modelStr,
ov::Tensor{std::const_pointer_cast<InferenceEngine::Blob>(weights), {}},
deviceName,
ov::any_copy(config));
return {compiled_model._ptr, compiled_model._so};
}
InferenceEngine::SoExecutableNetworkInternal ov::CoreImpl::ImportNetwork(
std::istream& networkModel,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
auto compiled_model = import_model(networkModel, deviceName, any_copy(config));
return {compiled_model._ptr, compiled_model._so};
}
InferenceEngine::QueryNetworkResult ov::CoreImpl::QueryNetwork(const InferenceEngine::CNNNetwork& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config) const {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::QueryNetwork");
ie::QueryNetworkResult ret;
if (!network.getFunction()) {
ret.rc = InferenceEngine::GENERAL_ERROR;
return ret;
}
auto res = query_model(network.getFunction(), deviceName, any_copy(config));
if (!network.getFunction() || res.empty()) {
ret.rc = InferenceEngine::GENERAL_ERROR;
return ret;
}
ret.supportedLayersMap = res;
const auto& func = network.getFunction();
auto specialized_function = func->clone();
std::string defDevice = ret.supportedLayersMap.begin()->second;
ngraph::pass::ConstantFolding().run_on_model(specialized_function);
std::unordered_set<std::string> opNames;
for (const auto& op : specialized_function->get_ops())
opNames.emplace(op->get_friendly_name());
for (const auto& op : func->get_ops()) {
if (opNames.find(op->get_friendly_name()) == opNames.end()) {
ret.supportedLayersMap[op->get_friendly_name()] = defDevice;
}
}
for (const auto& op : func->get_ops()) {
if (!ret.supportedLayersMap.count(op->get_friendly_name()) &&
std::dynamic_pointer_cast<ngraph::op::Constant>(op)) {
bool are_all_users_supported = true;
for (const auto& user : op->output(0).get_target_inputs()) {
if (!ret.supportedLayersMap.count(user.get_node()->get_friendly_name())) {
are_all_users_supported = false;
break;
}
}
if (are_all_users_supported) {
ret.supportedLayersMap[op->get_friendly_name()] = defDevice;
}
}
}
return ret;
}
ov::Any ov::CoreImpl::GetMetric(const std::string& deviceName,
const std::string& name,
const ov::AnyMap& options) const {
// HETERO case
{
if (deviceName.find("HETERO:") == 0) {
IE_THROW()
<< "You can get specific metrics with the GetMetric only for the HETERO itself (without devices). "
"To get individual devices's metrics call GetMetric for each device separately";
}
}
// MULTI case
{
if (deviceName.find("MULTI:") == 0) {
IE_THROW()
<< "You can get specific metrics with the GetMetric only for the MULTI itself (without devices). "
"To get individual devices's metrics call GetMetric for each device separately";
}
}
// AUTO case
{
if (deviceName.find("AUTO:") == 0) {
IE_THROW() << "You can get specific metrics with the GetMetric only for the AUTO itself (without devices). "
"To get individual devices's metrics call GetMetric for each device separately";
}
}
// BATCH case
{
if (deviceName.find("BATCH:") == 0) {
IE_THROW()
<< "You can get specific metrics with the GetMetric only for the BATCH itself (without devices). "
"To get individual devices's metrics call GetMetric for each device separately";
}
}
auto parsed = parseDeviceNameIntoConfig(deviceName);
for (auto o : options) {
parsed._config.insert(o);
}
return get_plugin(parsed._deviceName).get_property(name, parsed._config);
}
ov::Any ov::CoreImpl::GetConfig(const std::string& deviceName, const std::string& name) const {
auto parsed = parseDeviceNameIntoConfig(deviceName);
return get_plugin(parsed._deviceName).get_property(name, parsed._config);
}
std::vector<std::string> ov::CoreImpl::GetAvailableDevices() const {
return get_available_devices();
}
InferenceEngine::RemoteContext::Ptr ov::CoreImpl::CreateContext(const std::string& deviceName,
const InferenceEngine::ParamMap& params) {
return create_context(deviceName, params)._impl;
}
/**
* @brief Registers the extension in a Core object
* Such extensions can be used for both CNNNetwork readers and device plugins
*/
void ov::CoreImpl::AddExtension(const InferenceEngine::IExtensionPtr& extension) {
std::lock_guard<std::mutex> lock(get_mutex());
AddExtensionUnsafe(extension);
}
bool ov::CoreImpl::DeviceSupportsImportExport(const std::string& deviceName) const {
return device_supports_import_export(deviceName);
}
std::map<std::string, std::string> ov::CoreImpl::GetSupportedConfig(const std::string& deviceName,
const std::map<std::string, std::string>& configs) {
std::vector<std::string> supportedConfigKeys;
try {
supportedConfigKeys = GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
} catch (ov::Exception&) {
}
try {
for (auto&& property : ICore::get_property(deviceName, ov::supported_properties)) {
if (property.is_mutable()) {
supportedConfigKeys.emplace_back(std::move(property));
}
}
} catch (ov::Exception&) {
}
std::map<std::string, std::string> supportedConfig;
for (auto&& key : supportedConfigKeys) {
auto itKey = configs.find(key);
if (configs.end() != itKey) {
supportedConfig[key] = itKey->second;
}
}
for (auto&& config : configs) {
auto parsed = parseDeviceNameIntoConfig(config.first);
if (deviceName.find(parsed._deviceName) != std::string::npos) {
std::stringstream strm(config.second);
std::map<std::string, std::string> device_configs;
util::Read<std::map<std::string, std::string>>{}(strm, device_configs);
for (auto&& device_config : device_configs) {
if (ov::util::contains(supportedConfigKeys, device_config.first)) {
supportedConfig[device_config.first] = device_config.second;
}
}
for (auto&& config : parsed._config) {
supportedConfig[config.first] = config.second.as<std::string>();
}
}
}
return supportedConfig;
}
std::map<std::string, InferenceEngine::Version> ov::CoreImpl::GetVersions(const std::string& deviceName) const {
std::map<std::string, InferenceEngine::Version> versions;
std::vector<std::string> deviceNames;
{
// for compatibility with samples / demo
if (deviceName.find("HETERO") == 0) {
auto pos = deviceName.find_first_of(":");
if (pos != std::string::npos) {
deviceNames = InferenceEngine::DeviceIDParser::getHeteroDevices(deviceName.substr(pos + 1));
}
deviceNames.push_back("HETERO");
} else if (deviceName.find("MULTI") == 0) {
auto pos = deviceName.find_first_of(":");
if (pos != std::string::npos) {
deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1));
}
deviceNames.push_back("MULTI");
} else if (deviceName.find("AUTO") == 0) {
auto pos = deviceName.find_first_of(":");
if (pos != std::string::npos) {
deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1));
}
deviceNames.emplace_back("AUTO");
} else if (deviceName.find("BATCH") == 0) {
auto pos = deviceName.find_first_of(":");
if (pos != std::string::npos) {
deviceNames = {InferenceEngine::DeviceIDParser::getBatchDevice(deviceName.substr(pos + 1))};
}
deviceNames.push_back("BATCH");
} else {
deviceNames.push_back(deviceName);
}
}
for (auto&& deviceName_ : deviceNames) {
ie::DeviceIDParser parser(deviceName_);
std::string deviceNameLocal = parser.getDeviceName();
ov::Plugin cppPlugin = get_plugin(deviceNameLocal);
versions[deviceNameLocal] = ov::legacy_convert::convert_plugin(cppPlugin.m_ptr)->GetVersion();
}
return versions;
}

View File

@ -0,0 +1,47 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/runtime/iplugin.hpp"
ov::IPlugin::IPlugin() : m_executor_manager(InferenceEngine::executorManager()), m_is_new_api(true) {}
void ov::IPlugin::set_version(const ov::Version& version) {
m_version = version;
}
const ov::Version& ov::IPlugin::get_version() const {
return m_version;
}
void ov::IPlugin::set_device_name(const std::string& name) {
m_plugin_name = name;
}
const std::string& ov::IPlugin::get_device_name() const {
return m_plugin_name;
}
void ov::IPlugin::add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension) {
OPENVINO_NOT_IMPLEMENTED;
}
void ov::IPlugin::set_core(const std::weak_ptr<ov::ICore>& core) {
OPENVINO_ASSERT(!core.expired());
m_core = core;
auto locked_core = m_core.lock();
if (locked_core)
m_is_new_api = locked_core->is_new_api();
}
std::shared_ptr<ov::ICore> ov::IPlugin::get_core() const {
return m_core.lock();
}
bool ov::IPlugin::is_new_api() const {
return m_is_new_api;
}
const std::shared_ptr<InferenceEngine::ExecutorManager>& ov::IPlugin::get_executor_manager() const {
return m_executor_manager;
}

View File

@ -0,0 +1,114 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "iplugin_wrapper.hpp"
#include <memory>
#include "any_copy.hpp"
#include "dev/converter_utils.hpp"
#include "ie_icore.hpp"
namespace InferenceEngine {
IPluginWrapper::IPluginWrapper(const std::shared_ptr<InferenceEngine::IInferencePlugin>& ptr) : m_old_plugin(ptr) {
OPENVINO_ASSERT(m_old_plugin);
auto& ver = m_old_plugin->GetVersion();
m_version.buildNumber = ver.buildNumber;
m_version.description = ver.description;
m_plugin_name = m_old_plugin->GetName();
m_is_new_api = m_old_plugin->IsNewAPI();
m_core = m_old_plugin->GetCore();
m_executor_manager = m_old_plugin->executorManager();
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> IPluginWrapper::compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const {
auto exec_network =
m_old_plugin->LoadNetwork(ov::legacy_convert::convert_model(model, is_new_api()), ov::any_copy(properties));
return exec_network;
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> IPluginWrapper::compile_model(
const std::string& model_path,
const ov::AnyMap& properties) const {
auto exec_network = m_old_plugin->LoadNetwork(model_path, any_copy(properties));
return exec_network._ptr;
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> IPluginWrapper::compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties,
const ov::RemoteContext& context) const {
auto compiled_model = m_old_plugin->LoadNetwork(ov::legacy_convert::convert_model(model, is_new_api()),
any_copy(properties),
context._impl);
return compiled_model;
}
void IPluginWrapper::set_property(const ov::AnyMap& properties) {
m_old_plugin->SetProperties(properties);
}
ov::Any IPluginWrapper::get_property(const std::string& name, const ov::AnyMap& arguments) const {
try {
return m_old_plugin->GetConfig(name, arguments);
} catch (...) {
return m_old_plugin->GetMetric(name, arguments);
}
}
ov::RemoteContext IPluginWrapper::create_context(const ov::AnyMap& remote_properties) const {
return ov::RemoteContext{m_old_plugin->CreateContext(remote_properties), {nullptr}};
}
ov::RemoteContext IPluginWrapper::get_default_context(const ov::AnyMap& remote_properties) const {
return ov::RemoteContext{m_old_plugin->GetDefaultContext(remote_properties), {nullptr}};
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> IPluginWrapper::import_model(
std::istream& model,
const ov::AnyMap& properties) const {
return m_old_plugin->ImportNetwork(model, any_copy(properties));
}
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> IPluginWrapper::import_model(
std::istream& model,
const ov::RemoteContext& context,
const ov::AnyMap& properties) const {
return m_old_plugin->ImportNetwork(model, context._impl, any_copy(properties));
}
ov::SupportedOpsMap IPluginWrapper::query_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const {
auto res = m_old_plugin->QueryNetwork(ov::legacy_convert::convert_model(model, is_new_api()), any_copy(properties));
if (res.rc != InferenceEngine::OK) {
throw ov::Exception(res.resp.msg);
}
return res.supportedLayersMap;
}
void IPluginWrapper::add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension) {
m_old_plugin->AddExtension(extension);
}
const std::shared_ptr<InferenceEngine::IInferencePlugin>& IPluginWrapper::get_plugin() const {
return m_old_plugin;
}
void IPluginWrapper::set_core(const std::weak_ptr<ov::ICore>& core) {
auto locked_core = core.lock();
auto old_core = std::dynamic_pointer_cast<InferenceEngine::ICore>(locked_core);
if (old_core)
m_old_plugin->SetCore(old_core);
m_core = core;
}
void IPluginWrapper::set_device_name(const std::string& device_name) {
m_plugin_name = device_name;
m_old_plugin->SetName(device_name);
}
} // namespace InferenceEngine

View File

@ -0,0 +1,166 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "openvino/runtime/iplugin.hpp"
namespace InferenceEngine {
/**
* @brief Class wraps InferenceEngine::IInferencePlugin into ov::IPlugin
*/
class IPluginWrapper : public ov::IPlugin {
public:
/**
* @brief Constructs Plugin wrapper
*
* @param ptr shared pointer to InferenceEngine::IInferencePlugin
*/
IPluginWrapper(const std::shared_ptr<InferenceEngine::IInferencePlugin>& ptr);
/**
* @brief Create compiled model based on model and properties
*
* @param model OpenVINO Model representation
* @param properties configurations for compiled model
*
* @return shared pointer to compiled model interface
*/
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const override;
/**
* @brief Create compiled model based on model and properties
*
* @param model_path Path to the model
* @param properties configurations for compiled model
*
* @return shared pointer to compiled model interface
*/
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::string& model_path,
const ov::AnyMap& properties) const override;
/**
* @brief Create compiled model based on model and properties
*
* @param model OpenVINO Model representation
* @param properties configurations for compiled model
* @param context remote context
*
* @return shared pointer to compiled model interface
*/
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties,
const ov::RemoteContext& context) const override;
/**
* @brief Specifies some plugin properties
*
* @param properties map with configuration properties
*/
void set_property(const ov::AnyMap& properties) override;
/**
* @brief Returns the property
*
* @param name property name
* @param arguments configuration parameters
*
* @return ov::Any object which contains property value
*/
ov::Any get_property(const std::string& name, const ov::AnyMap& arguments) const override;
/**
* @brief Create remote context
*
* @param remote_properties configuration parameters
*
* @return Remote context
*/
ov::RemoteContext create_context(const ov::AnyMap& remote_properties) const override;
/**
* @brief Create default remote context
*
* @param remote_properties configuration parameters
*
* @return Remote context
*/
ov::RemoteContext get_default_context(const ov::AnyMap& remote_properties) const override;
/**
* @brief Import model to the plugin
*
* @param model strim with the model
* @param properties configuration properties
*
* @return shared pointer to compiled model interface
*/
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> import_model(
std::istream& model,
const ov::AnyMap& properties) const override;
/**
* @brief Import model to the plugin
*
* @param model strim with the model
* @param context remote context
* @param properties configuration properties
*
* @return shared pointer to compiled model interface
*/
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>
import_model(std::istream& model, const ov::RemoteContext& context, const ov::AnyMap& properties) const override;
/**
* @brief query model
*
* @param model OpenVINO Model
* @param properties configuration properties
*
* @return Map of supported operations
*/
ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const override;
/**
* @brief Register legacy Inference Engine Extension for the plugin
*
* @param extension legacy Inference Engine Extension
*/
void add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension) override;
/**
* @brief Returns the instance of the legacy plugin
*
* @return Legacy InferenceEngine::InferencePlugin object
*/
const std::shared_ptr<InferenceEngine::IInferencePlugin>& get_plugin() const;
/**
* @brief Set core interface to the plugin
* This method works under the non-virtual method of IPlugin class
*
* @param core OpenVINO Core interface
*/
void set_core(const std::weak_ptr<ov::ICore>& core);
/**
* @brief Set plugin name for the wrapper and legacy plugin
* This method works under the non-virtual method of IPlugin class
*
* @param device_name The name of plugin
*/
void set_device_name(const std::string& device_name);
private:
std::shared_ptr<InferenceEngine::IInferencePlugin> m_old_plugin;
};
} // namespace InferenceEngine

View File

@ -0,0 +1,146 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "plugin.hpp"
#include <memory>
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "ie_plugin_config.hpp"
#include "iplugin_wrapper.hpp"
#define OV_PLUGIN_CALL_STATEMENT(...) \
OPENVINO_ASSERT(m_ptr != nullptr, "OpenVINO Runtime Plugin was not initialized."); \
try { \
__VA_ARGS__; \
} catch (...) { \
::InferenceEngine::details::Rethrow(); \
}
ov::Plugin::~Plugin() {
m_ptr = {};
}
ov::Plugin::Plugin(const std::shared_ptr<ov::IPlugin>& ptr, const std::shared_ptr<void>& so) : m_ptr{ptr}, m_so{so} {
OV_PLUGIN_CALL_STATEMENT();
}
void ov::Plugin::set_name(const std::string& deviceName) {
OV_PLUGIN_CALL_STATEMENT({
m_ptr->set_device_name(deviceName);
if (auto wrapper = std::dynamic_pointer_cast<InferenceEngine::IPluginWrapper>(m_ptr))
wrapper->set_device_name(deviceName);
});
}
void ov::Plugin::set_core(std::weak_ptr<ICore> core) {
OV_PLUGIN_CALL_STATEMENT({
m_ptr->set_core(core);
if (auto wrapper = std::dynamic_pointer_cast<InferenceEngine::IPluginWrapper>(m_ptr))
wrapper->set_core(core);
});
}
const ov::Version ov::Plugin::get_version() const {
OV_PLUGIN_CALL_STATEMENT(return m_ptr->get_version());
}
void ov::Plugin::add_extension(const ie::IExtensionPtr& extension) {
OPENVINO_SUPPRESS_DEPRECATED_START
OV_PLUGIN_CALL_STATEMENT(m_ptr->add_extension(extension));
OPENVINO_SUPPRESS_DEPRECATED_END
}
void ov::Plugin::set_property(const ov::AnyMap& config) {
OV_PLUGIN_CALL_STATEMENT(m_ptr->set_property(config));
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::Plugin::compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model, properties), m_so});
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::Plugin::compile_model(const std::string& model_path,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model_path, properties), m_so});
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::Plugin::compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model, properties, context), m_so});
}
ov::SupportedOpsMap ov::Plugin::query_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return m_ptr->query_model(model, properties));
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::Plugin::import_model(std::istream& model,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(model, properties), m_so});
}
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::Plugin::import_model(std::istream& networkModel,
const ov::RemoteContext& context,
const ov::AnyMap& config) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(networkModel, context, config), m_so});
}
ov::RemoteContext ov::Plugin::create_context(const AnyMap& params) const {
OV_PLUGIN_CALL_STATEMENT({
auto remote = m_ptr->create_context(params);
auto so = remote._so;
if (m_so)
so.emplace_back(m_so);
return {remote._impl, so};
});
}
ov::RemoteContext ov::Plugin::get_default_context(const AnyMap& params) const {
OV_PLUGIN_CALL_STATEMENT({
auto remote = m_ptr->get_default_context(params);
auto so = remote._so;
if (m_so)
so.emplace_back(m_so);
return {remote._impl, so};
});
}
ov::Any ov::Plugin::get_property(const std::string& name, const AnyMap& arguments) const {
OV_PLUGIN_CALL_STATEMENT({
if (ov::supported_properties == name) {
try {
return {m_ptr->get_property(name, arguments), {m_so}};
} catch (const ie::Exception&) {
std::vector<ov::PropertyName> supported_properties;
try {
auto ro_properties =
m_ptr->get_property(METRIC_KEY(SUPPORTED_METRICS), arguments).as<std::vector<std::string>>();
for (auto&& ro_property : ro_properties) {
if (ro_property != METRIC_KEY(SUPPORTED_METRICS) &&
ro_property != METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
supported_properties.emplace_back(ro_property, PropertyMutability::RO);
}
}
} catch (const ov::Exception&) {
} catch (const ie::Exception&) {
}
try {
auto rw_properties = m_ptr->get_property(METRIC_KEY(SUPPORTED_CONFIG_KEYS), arguments)
.as<std::vector<std::string>>();
for (auto&& rw_property : rw_properties) {
supported_properties.emplace_back(rw_property, PropertyMutability::RW);
}
} catch (const ov::Exception&) {
} catch (const ie::Exception&) {
}
supported_properties.emplace_back(ov::supported_properties.name(), PropertyMutability::RO);
return supported_properties;
}
}
return {m_ptr->get_property(name, arguments), {m_so}};
});
}

View File

@ -0,0 +1,82 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief This is a header file for the Inference Engine plugin C++ API
*
* @file plugin.hpp
*/
#pragma once
#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
#include "ie_iextension.h"
#include "openvino/runtime/iplugin.hpp"
namespace ov {
class CoreImpl;
/**
* @brief Plugin wrapper under the plugin interface which is used inside the core interface
*/
class Plugin {
std::shared_ptr<ov::IPlugin> m_ptr;
std::shared_ptr<void> m_so;
friend ::ov::CoreImpl;
public:
Plugin() = default;
~Plugin();
Plugin(const std::shared_ptr<ov::IPlugin>& ptr, const std::shared_ptr<void>& so);
void set_name(const std::string& deviceName);
void set_core(std::weak_ptr<ICore> core);
const ov::Version get_version() const;
void add_extension(const ie::IExtensionPtr& extension);
void set_property(const ov::AnyMap& config);
SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const;
SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::string& model_path,
const ov::AnyMap& properties) const;
SoPtr<InferenceEngine::IExecutableNetworkInternal> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& properties) const;
ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model, const ov::AnyMap& properties) const;
SoPtr<InferenceEngine::IExecutableNetworkInternal> import_model(std::istream& model,
const ov::AnyMap& properties) const;
SoPtr<InferenceEngine::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const ov::RemoteContext& context,
const ov::AnyMap& config) const;
ov::RemoteContext create_context(const AnyMap& params) const;
ov::RemoteContext get_default_context(const AnyMap& params) const;
Any get_property(const std::string& name, const AnyMap& arguments) const;
template <typename T, PropertyMutability M>
T get_property(const ov::Property<T, M>& property) const {
return get_property(property.name(), {}).template as<T>();
}
template <typename T, PropertyMutability M>
T get_property(const ov::Property<T, M>& property, const AnyMap& arguments) const {
return get_property(property.name(), arguments).template as<T>();
}
};
} // namespace ov

View File

@ -65,7 +65,7 @@ Core::Core(const std::string& xmlConfigFile) {
_impl = std::make_shared<Impl>(); _impl = std::make_shared<Impl>();
#ifdef OPENVINO_STATIC_LIBRARY #ifdef OPENVINO_STATIC_LIBRARY
_impl->RegisterPluginsInRegistry(::getStaticPluginsRegistry()); _impl->register_plugins_in_registry(::getStaticPluginsRegistry());
#else #else
RegisterPlugins(ov::findPluginXML(xmlConfigFile)); RegisterPlugins(ov::findPluginXML(xmlConfigFile));
#endif #endif
@ -176,7 +176,10 @@ ExecutableNetwork Core::ImportNetwork(const std::string& modelFileName,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::ImportNetwork"); OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::ImportNetwork");
auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config); auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config);
auto exec = _impl->GetCPPPluginByName(parsed._deviceName).import_model(modelFileName, parsed._config); std::ifstream modelStream(modelFileName, std::ios::binary);
if (!modelStream.is_open())
IE_THROW(NetworkNotRead) << "Model file " << modelFileName << " cannot be opened!";
auto exec = _impl->get_plugin(parsed._deviceName).import_model(modelStream, ov::any_copy(parsed._config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
} }
@ -206,7 +209,7 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel) {
} }
networkModel.seekg(currentPos, networkModel.beg); networkModel.seekg(currentPos, networkModel.beg);
auto exec = _impl->GetCPPPluginByName(deviceName).import_model(networkModel, {}); auto exec = _impl->get_plugin(deviceName).import_model(networkModel, {});
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
} }
@ -224,8 +227,10 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel,
std::string deviceName = device.getDeviceName(); std::string deviceName = device.getDeviceName();
auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config); auto parsed = ov::parseDeviceNameIntoConfig(deviceName, config);
auto exec = _impl->GetCPPPluginByName(deviceName) auto exec = _impl->get_plugin(deviceName)
.import_model(networkModel, std::dynamic_pointer_cast<RemoteContext>(context), parsed._config); .import_model(networkModel,
ov::RemoteContext{std::dynamic_pointer_cast<RemoteContext>(context), {}},
ov::any_copy(parsed._config));
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
} }
@ -262,9 +267,9 @@ void Core::SetConfig(const std::map<std::string, std::string>& config, const std
ov::AnyMap conf = ov::any_copy(config); ov::AnyMap conf = ov::any_copy(config);
if (deviceName.empty()) { if (deviceName.empty()) {
_impl->SetConfigForPlugins(conf, std::string()); _impl->set_property_for_devivce(conf, std::string());
} else { } else {
_impl->SetConfigForPlugins(conf, deviceName); _impl->set_property_for_devivce(conf, deviceName);
} }
} }
@ -297,7 +302,7 @@ Parameter Core::GetConfig(const std::string& deviceName, const std::string& name
} }
auto parsed = ov::parseDeviceNameIntoConfig(deviceName); auto parsed = ov::parseDeviceNameIntoConfig(deviceName);
return _impl->GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config); return _impl->get_plugin(parsed._deviceName).get_property(name, parsed._config);
} }
Parameter Core::GetMetric(const std::string& deviceName, const std::string& name, const ParamMap& options) const { Parameter Core::GetMetric(const std::string& deviceName, const std::string& name, const ParamMap& options) const {
@ -309,18 +314,18 @@ std::vector<std::string> Core::GetAvailableDevices() const {
} }
void Core::RegisterPlugin(const std::string& pluginName, const std::string& deviceName) { void Core::RegisterPlugin(const std::string& pluginName, const std::string& deviceName) {
_impl->RegisterPluginByName(pluginName, deviceName); _impl->register_plugin(pluginName, deviceName);
} }
void Core::RegisterPlugins(const std::string& xmlConfigFile) { void Core::RegisterPlugins(const std::string& xmlConfigFile) {
_impl->RegisterPluginsInRegistry(xmlConfigFile); _impl->register_plugins_in_registry(xmlConfigFile);
} }
void Core::UnregisterPlugin(const std::string& deviceName_) { void Core::UnregisterPlugin(const std::string& deviceName_) {
DeviceIDParser parser(deviceName_); DeviceIDParser parser(deviceName_);
std::string deviceName = parser.getDeviceName(); std::string deviceName = parser.getDeviceName();
_impl->UnloadPluginByName(deviceName); _impl->unload_plugin(deviceName);
} }
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -5,22 +5,29 @@
#pragma once #pragma once
#include <gmock/gmock.h> #include <gmock/gmock.h>
#include "ie_icore.hpp" #include "ie_icore.hpp"
class MockICore : public InferenceEngine::ICore { class MockICore : public InferenceEngine::ICore {
public: public:
MOCK_CONST_METHOD3(ReadNetwork, InferenceEngine::CNNNetwork(const std::string&, const InferenceEngine::Blob::CPtr&, bool)); MOCK_CONST_METHOD3(ReadNetwork,
InferenceEngine::CNNNetwork(const std::string&, const InferenceEngine::Blob::CPtr&, bool));
MOCK_CONST_METHOD2(ReadNetwork, InferenceEngine::CNNNetwork(const std::string&, const std::string&)); MOCK_CONST_METHOD2(ReadNetwork, InferenceEngine::CNNNetwork(const std::string&, const std::string&));
MOCK_METHOD3(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( MOCK_METHOD3(LoadNetwork,
const InferenceEngine::CNNNetwork&, const std::string&, const std::map<std::string, std::string>&)); InferenceEngine::SoExecutableNetworkInternal(const InferenceEngine::CNNNetwork&,
MOCK_METHOD3(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( const std::string&,
const InferenceEngine::CNNNetwork&, const std::shared_ptr<InferenceEngine::RemoteContext> &, const std::map<std::string, std::string>&)); const std::map<std::string, std::string>&));
MOCK_METHOD4(LoadNetwork, InferenceEngine::SoExecutableNetworkInternal( MOCK_METHOD3(LoadNetwork,
const std::string &, InferenceEngine::SoExecutableNetworkInternal(const InferenceEngine::CNNNetwork&,
const std::string &, const std::shared_ptr<InferenceEngine::RemoteContext>&,
const std::map<std::string, std::string> &, const std::map<std::string, std::string>&));
const std::function<void(const InferenceEngine::CNNNetwork&)> &)); MOCK_METHOD4(
LoadNetwork,
InferenceEngine::SoExecutableNetworkInternal(const std::string&,
const std::string&,
const std::map<std::string, std::string>&,
const std::function<void(const InferenceEngine::CNNNetwork&)>&));
MOCK_METHOD5( MOCK_METHOD5(
LoadNetwork, LoadNetwork,
InferenceEngine::SoExecutableNetworkInternal(const std::string&, InferenceEngine::SoExecutableNetworkInternal(const std::string&,
@ -29,26 +36,65 @@ public:
const std::map<std::string, std::string>&, const std::map<std::string, std::string>&,
const std::function<void(const InferenceEngine::CNNNetwork&)>&)); const std::function<void(const InferenceEngine::CNNNetwork&)>&));
MOCK_METHOD3(ImportNetwork, InferenceEngine::SoExecutableNetworkInternal( MOCK_METHOD3(ImportNetwork,
std::istream&, const std::string&, const std::map<std::string, std::string>&)); InferenceEngine::SoExecutableNetworkInternal(std::istream&,
MOCK_METHOD3(ImportNetwork, InferenceEngine::SoExecutableNetworkInternal( const std::string&,
std::istream&, const std::shared_ptr<InferenceEngine::RemoteContext>&, const std::map<std::string, std::string>&)); const std::map<std::string, std::string>&));
MOCK_METHOD3(ImportNetwork,
InferenceEngine::SoExecutableNetworkInternal(std::istream&,
const std::shared_ptr<InferenceEngine::RemoteContext>&,
const std::map<std::string, std::string>&));
MOCK_METHOD2(CreateContext, InferenceEngine::RemoteContext::Ptr(const std::string& deviceName, MOCK_METHOD2(CreateContext,
const InferenceEngine::ParamMap& params)); InferenceEngine::RemoteContext::Ptr(const std::string& deviceName,
const InferenceEngine::ParamMap& params));
MOCK_CONST_METHOD3(QueryNetwork, InferenceEngine::QueryNetworkResult( MOCK_CONST_METHOD3(QueryNetwork,
const InferenceEngine::CNNNetwork&, const std::string&, const std::map<std::string, std::string>&)); InferenceEngine::QueryNetworkResult(const InferenceEngine::CNNNetwork&,
const std::string&,
const std::map<std::string, std::string>&));
MOCK_CONST_METHOD3(GetMetric, ov::Any(const std::string&, const std::string&, const ov::AnyMap&)); MOCK_CONST_METHOD3(GetMetric, ov::Any(const std::string&, const std::string&, const ov::AnyMap&));
MOCK_CONST_METHOD2(GetConfig, ov::Any(const std::string&, const std::string&)); MOCK_CONST_METHOD2(GetConfig, ov::Any(const std::string&, const std::string&));
MOCK_CONST_METHOD3(get_property, ov::Any(const std::string&, const std::string&, const ov::AnyMap&)); MOCK_CONST_METHOD3(get_property, ov::Any(const std::string&, const std::string&, const ov::AnyMap&));
MOCK_METHOD2(set_property, void(const std::string&, const ov::AnyMap&));
MOCK_CONST_METHOD0(GetAvailableDevices, std::vector<std::string>()); MOCK_CONST_METHOD0(GetAvailableDevices, std::vector<std::string>());
MOCK_CONST_METHOD1(DeviceSupportsImportExport, bool(const std::string&)); // NOLINT not a cast to bool MOCK_CONST_METHOD1(DeviceSupportsImportExport, bool(const std::string&)); // NOLINT not a cast to bool
MOCK_METHOD2(GetSupportedConfig, std::map<std::string, std::string>(const std::string&, const std::map<std::string, std::string>&)); MOCK_METHOD2(GetSupportedConfig,
std::map<std::string, std::string>(const std::string&, const std::map<std::string, std::string>&));
MOCK_CONST_METHOD0(isNewAPI, bool()); MOCK_CONST_METHOD0(isNewAPI, bool());
MOCK_METHOD1(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(const std::string&)); MOCK_METHOD1(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(const std::string&));
MOCK_CONST_METHOD0(is_new_api, bool());
MOCK_CONST_METHOD2(create_context, ov::RemoteContext(const std::string& deviceName, const ov::AnyMap& params));
MOCK_CONST_METHOD0(get_available_devices, std::vector<std::string>());
MOCK_CONST_METHOD3(query_model,
ov::SupportedOpsMap(const std::shared_ptr<const ov::Model>&,
const std::string&,
const ov::AnyMap&));
MOCK_CONST_METHOD3(import_model,
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>(std::istream&,
const std::string&,
const ov::AnyMap&));
MOCK_CONST_METHOD3(compile_model,
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>(const std::shared_ptr<const ov::Model>&,
const std::string&,
const ov::AnyMap&));
MOCK_CONST_METHOD3(compile_model,
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>(const std::shared_ptr<const ov::Model>&,
const ov::RemoteContext&,
const ov::AnyMap&));
MOCK_CONST_METHOD3(compile_model,
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal>(const std::string&,
const std::string&,
const ov::AnyMap&));
MOCK_CONST_METHOD4(compile_model,
InferenceEngine::SoExecutableNetworkInternal(const std::string&,
const ov::Tensor&,
const std::string&,
const ov::AnyMap&));
MOCK_CONST_METHOD3(read_model, std::shared_ptr<ov::Model>(const std::string&, const ov::Tensor&, bool));
MOCK_CONST_METHOD2(read_model, std::shared_ptr<ov::Model>(const std::string&, const std::string&));
MOCK_CONST_METHOD1(get_default_context, ov::RemoteContext(const std::string&));
~MockICore() = default; ~MockICore() = default;
}; };

View File

@ -2,19 +2,20 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#include "mock_plugin.hpp"
#include <iostream> #include <iostream>
#include <utility>
#include <map> #include <map>
#include <string> #include <string>
#include <utility>
#include "openvino/runtime/common.hpp"
#include "mock_plugin.hpp"
#include "description_buffer.hpp" #include "description_buffer.hpp"
#include "openvino/runtime/common.hpp"
using namespace std; using namespace std;
using namespace InferenceEngine; using namespace InferenceEngine;
MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin *target) { MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin* target) {
_target = target; _target = target;
} }
@ -25,7 +26,8 @@ void MockPlugin::SetConfig(const std::map<std::string, std::string>& _config) {
} }
} }
Parameter MockPlugin::GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const { Parameter MockPlugin::GetMetric(const std::string& name,
const std::map<std::string, InferenceEngine::Parameter>& options) const {
if (_target) { if (_target) {
return _target->GetMetric(name, options); return _target->GetMetric(name, options);
} else { } else {
@ -33,9 +35,9 @@ Parameter MockPlugin::GetMetric(const std::string& name, const std::map<std::str
} }
} }
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::LoadNetwork(
MockPlugin::LoadNetwork(const CNNNetwork &network, const CNNNetwork& network,
const std::map<std::string, std::string> &config) { const std::map<std::string, std::string>& config) {
if (_target) { if (_target) {
return _target->LoadNetwork(network, config); return _target->LoadNetwork(network, config);
} else { } else {
@ -43,10 +45,10 @@ MockPlugin::LoadNetwork(const CNNNetwork &network,
} }
} }
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::LoadNetwork(
MockPlugin::LoadNetwork(const CNNNetwork& network, const CNNNetwork& network,
const std::map<std::string, std::string>& config, const std::map<std::string, std::string>& config,
const std::shared_ptr<RemoteContext>& context) { const std::shared_ptr<RemoteContext>& context) {
if (_target) { if (_target) {
return _target->LoadNetwork(network, config, context); return _target->LoadNetwork(network, config, context);
} else { } else {
@ -54,9 +56,9 @@ MockPlugin::LoadNetwork(const CNNNetwork& network,
} }
} }
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::LoadNetwork(
MockPlugin::LoadNetwork(const std::string &modelPath, const std::string& modelPath,
const std::map<std::string, std::string> &config) { const std::map<std::string, std::string>& config) {
if (_target) { if (_target) {
return _target->LoadNetwork(modelPath, config); return _target->LoadNetwork(modelPath, config);
} else { } else {
@ -64,15 +66,15 @@ MockPlugin::LoadNetwork(const std::string &modelPath,
} }
} }
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::LoadExeNetworkImpl(
MockPlugin::LoadExeNetworkImpl(const CNNNetwork& network, const CNNNetwork& network,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
return {}; return {};
} }
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::ImportNetwork(
MockPlugin::ImportNetwork(std::istream& networkModel, std::istream& networkModel,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
if (_target) { if (_target) {
return _target->ImportNetwork(networkModel, config); return _target->ImportNetwork(networkModel, config);
} else { } else {
@ -80,10 +82,10 @@ MockPlugin::ImportNetwork(std::istream& networkModel,
} }
} }
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> MockPlugin::ImportNetwork(
MockPlugin::ImportNetwork(std::istream& networkModel, std::istream& networkModel,
const std::shared_ptr<InferenceEngine::RemoteContext>& context, const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
if (_target) { if (_target) {
return _target->ImportNetwork(networkModel, context, config); return _target->ImportNetwork(networkModel, context, config);
} else { } else {
@ -99,9 +101,8 @@ std::shared_ptr<InferenceEngine::RemoteContext> MockPlugin::GetDefaultContext(co
} }
} }
InferenceEngine::QueryNetworkResult InferenceEngine::QueryNetworkResult MockPlugin::QueryNetwork(const InferenceEngine::CNNNetwork& network,
MockPlugin::QueryNetwork(const InferenceEngine::CNNNetwork& network, const std::map<std::string, std::string>& config) const {
const std::map<std::string, std::string>& config) const {
if (_target) { if (_target) {
return _target->QueryNetwork(network, config); return _target->QueryNetwork(network, config);
} else { } else {
@ -130,20 +131,14 @@ std::string MockPlugin::GetName() const noexcept {
return InferenceEngine::IInferencePlugin::GetName(); return InferenceEngine::IInferencePlugin::GetName();
} }
InferenceEngine::IInferencePlugin* __target = nullptr;
InferenceEngine::IInferencePlugin *__target = nullptr; OPENVINO_PLUGIN_API void CreatePluginEngine(std::shared_ptr<ov::IPlugin>& plugin) {
IInferencePlugin* p = nullptr;
OPENVINO_PLUGIN_API void CreatePluginEngine(std::shared_ptr<InferenceEngine::IInferencePlugin>& plugin) {
IInferencePlugin *p = nullptr;
std::swap(__target, p); std::swap(__target, p);
plugin = std::make_shared<MockPlugin>(p); plugin = convert_plugin(std::make_shared<MockPlugin>(p));
} }
OPENVINO_PLUGIN_API InferenceEngine::IInferencePlugin* OPENVINO_PLUGIN_API void InjectProxyEngine(InferenceEngine::IInferencePlugin* target) {
CreatePluginEngineProxy(InferenceEngine::IInferencePlugin *target) {
return new MockPlugin(target);
}
OPENVINO_PLUGIN_API void InjectProxyEngine(InferenceEngine::IInferencePlugin *target) {
__target = target; __target = target;
} }

View File

@ -16,7 +16,7 @@
std::function<void()> load_unload_plugin(const std::string &target_device, const int &api_version) { std::function<void()> load_unload_plugin(const std::string &target_device, const int &api_version) {
return [&] { return [&] {
auto ie_api_wrapper = create_infer_api_wrapper(api_version); auto ie_api_wrapper = create_infer_api_wrapper(api_version);
// get_versions silently register plugin in `plugins` through `GetCPPPluginByName` // get_versions silently register plugin in `plugins` through `get_plugin`
ie_api_wrapper->load_plugin(target_device); ie_api_wrapper->load_plugin(target_device);
// Remove plugin for target_device from `plugins` // Remove plugin for target_device from `plugins`
ie_api_wrapper->unload_plugin(target_device); ie_api_wrapper->unload_plugin(target_device);

View File

@ -39,7 +39,7 @@ void test_load_unload_plugin_full_pipeline(const std::string &model, const std::
if (i == n / 2) { if (i == n / 2) {
log_info("Half of the test have already passed"); log_info("Half of the test have already passed");
} }
// get_versions silently register plugin in `plugins` through `GetCPPPluginByName` // get_versions silently register plugin in `plugins` through `get_plugin`
ie_api_wrapper->load_plugin(target_device); ie_api_wrapper->load_plugin(target_device);
// Remove plugin for target_device from `plugins` // Remove plugin for target_device from `plugins`
ie_api_wrapper->unload_plugin(target_device); ie_api_wrapper->unload_plugin(target_device);