From c8e331003ff2c498c0957b857673ce05114ba6ca Mon Sep 17 00:00:00 2001 From: Ilya Churaev Date: Fri, 9 Jun 2023 18:08:53 +0400 Subject: [PATCH] Port some changes from proxy branch (#17961) * Port some changes from proxy branch * Port test changes * Rewrite approach for compile model and tensor * Fixed review --- src/core/include/openvino/runtime/tensor.hpp | 8 ++++ src/core/src/runtime/ov_tensor.cpp | 5 ++ .../openvino/runtime/icompiled_model.hpp | 6 ++- .../dev_api/openvino/runtime/icore.hpp | 48 ++++++++++++------- .../dev_api/openvino/runtime/iplugin.hpp | 4 +- .../openvino/runtime/iremote_context.hpp | 2 + src/inference/src/core.cpp | 3 +- src/inference/src/dev/core_impl.cpp | 16 ++++++- src/inference/src/dev/core_impl.hpp | 8 +++- .../cpp_interfaces/interface/mock_icore.hpp | 5 +- .../mocks/mock_engine/mock_plugin.cpp | 4 +- 11 files changed, 77 insertions(+), 32 deletions(-) diff --git a/src/core/include/openvino/runtime/tensor.hpp b/src/core/include/openvino/runtime/tensor.hpp index a43f99fe3a6..29ce1a0ceb3 100644 --- a/src/core/include/openvino/runtime/tensor.hpp +++ b/src/core/include/openvino/runtime/tensor.hpp @@ -76,6 +76,14 @@ public: /// @brief Default constructor Tensor() = default; + /** + * @brief Copy constructor with adding new shared object + * + * @param other Original tensor + * @param so Shared object + */ + Tensor(const Tensor& other, const std::shared_ptr& so); + /// @brief Default copy constructor /// @param other other Tensor object Tensor(const Tensor& other) = default; diff --git a/src/core/src/runtime/ov_tensor.cpp b/src/core/src/runtime/ov_tensor.cpp index 10c069b3715..8b64c0322b7 100644 --- a/src/core/src/runtime/ov_tensor.cpp +++ b/src/core/src/runtime/ov_tensor.cpp @@ -32,6 +32,11 @@ Tensor::~Tensor() { _impl = {}; } +Tensor::Tensor(const Tensor& tensor, const std::shared_ptr& so) : _impl{tensor._impl}, _so{tensor._so} { + OPENVINO_ASSERT(_impl != nullptr, "Tensor was not initialized."); + _so.emplace_back(so); +} + Tensor::Tensor(const std::shared_ptr& impl, const std::vector>& so) : _impl{impl}, _so{so} { diff --git a/src/inference/dev_api/openvino/runtime/icompiled_model.hpp b/src/inference/dev_api/openvino/runtime/icompiled_model.hpp index 50cc6f0ac79..84e188ca9a0 100644 --- a/src/inference/dev_api/openvino/runtime/icompiled_model.hpp +++ b/src/inference/dev_api/openvino/runtime/icompiled_model.hpp @@ -83,14 +83,14 @@ public: * * @return model outputs */ - const std::vector>& outputs() const; + virtual const std::vector>& outputs() const; /** * @brief Gets all inputs from compiled model * * @return model inputs */ - const std::vector>& inputs() const; + virtual const std::vector>& inputs() const; /** * @brief Create infer request @@ -136,6 +136,8 @@ public: */ std::shared_ptr get_context() const; + virtual ~ICompiledModel() = default; + private: std::shared_ptr m_plugin; std::vector> m_inputs; diff --git a/src/inference/dev_api/openvino/runtime/icore.hpp b/src/inference/dev_api/openvino/runtime/icore.hpp index c19b73b16ae..00f94c2dfb7 100644 --- a/src/inference/dev_api/openvino/runtime/icore.hpp +++ b/src/inference/dev_api/openvino/runtime/icore.hpp @@ -46,65 +46,65 @@ public: virtual std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path) const = 0; /** - * @brief Creates an executable network from a network object. + * @brief Creates a compiled mdel from a model object. * - * Users can create as many networks as they need and use + * Users can create as many models as they need and use * them simultaneously (up to the limitation of the hardware resources) * * @param model OpenVINO Model - * @param device_name Name of device to load network to + * @param device_name Name of device to load model to * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load * operation - * @return An executable network reference + * @return A pointer to compiled model */ virtual ov::SoPtr compile_model(const std::shared_ptr& model, const std::string& device_name, const ov::AnyMap& config = {}) const = 0; /** - * @brief Creates an executable network from a network object. + * @brief Creates a compiled model from a model object. * - * Users can create as many networks as they need and use + * Users can create as many models as they need and use * them simultaneously (up to the limitation of the hardware resources) * * @param model OpenVINO Model * @param context "Remote" (non-CPU) accelerator device-specific execution context to use * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load * operation - * @return An executable network reference + * @return A pointer to compiled model */ virtual ov::SoPtr compile_model(const std::shared_ptr& model, const ov::RemoteContext& context, const ov::AnyMap& config = {}) const = 0; /** - * @brief Creates an executable network from a model file. + * @brief Creates a compiled model from a model file. * - * Users can create as many networks as they need and use + * Users can create as many models as they need and use * them simultaneously (up to the limitation of the hardware resources) * * @param model_path Path to model - * @param device_name Name of device to load network to + * @param device_name Name of device to load model to * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load * operation - * @return An executable network reference + * @return A pointer to compiled model */ virtual ov::SoPtr compile_model(const std::string& model_path, const std::string& device_name, const ov::AnyMap& config) const = 0; /** - * @brief Creates an executable network from a model memory. + * @brief Creates a compiled model from a model memory. * - * Users can create as many networks as they need and use + * Users can create as many models as they need and use * them simultaneously (up to the limitation of the hardware resources) * * @param model_str String data of model * @param weights Model's weights - * @param device_name Name of device to load network to + * @param device_name Name of device to load model to * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load * operation - * @return An executable network reference + * @return A pointer to compiled model */ virtual ov::SoPtr compile_model(const std::string& model_str, const ov::Tensor& weights, @@ -112,17 +112,29 @@ public: const ov::AnyMap& config) const = 0; /** - * @brief Creates an executable network from a previously exported network + * @brief Creates a compiled model from a previously exported model * @param model model stream - * @param device_name Name of device load executable network on + * @param device_name Name of device load executable model on * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load * operation* - * @return An executable network reference + * @return A pointer to compiled model */ virtual ov::SoPtr import_model(std::istream& model, const std::string& device_name, const ov::AnyMap& config = {}) const = 0; + /** + * @brief Creates a compiled model from a previously exported model + * @param model model stream + * @param context Remote context + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation* + * @return A pointer to compiled model + */ + virtual ov::SoPtr import_model(std::istream& modelStream, + const ov::RemoteContext& context, + const ov::AnyMap& config = {}) const = 0; + /** * @brief Query device if it supports specified network with specified configuration * diff --git a/src/inference/dev_api/openvino/runtime/iplugin.hpp b/src/inference/dev_api/openvino/runtime/iplugin.hpp index 5015daa7eb8..4b2ee3bfa90 100644 --- a/src/inference/dev_api/openvino/runtime/iplugin.hpp +++ b/src/inference/dev_api/openvino/runtime/iplugin.hpp @@ -234,7 +234,7 @@ public: */ const std::shared_ptr& get_executor_manager() const; - ~IPlugin() = default; + virtual ~IPlugin() = default; protected: IPlugin(); @@ -294,8 +294,6 @@ constexpr static const auto create_plugin_function = OV_PP_TOSTRING(OV_CREATE_PL try { \ plugin = ::std::make_shared(__VA_ARGS__); \ plugin->set_version(version); \ - } catch (const InferenceEngine::Exception& ex) { \ - OPENVINO_THROW(ex.what()); \ } catch (const std::exception& ex) { \ OPENVINO_THROW(ex.what()); \ } \ diff --git a/src/inference/dev_api/openvino/runtime/iremote_context.hpp b/src/inference/dev_api/openvino/runtime/iremote_context.hpp index fa96bbb6a16..9fddfae1e1d 100644 --- a/src/inference/dev_api/openvino/runtime/iremote_context.hpp +++ b/src/inference/dev_api/openvino/runtime/iremote_context.hpp @@ -21,6 +21,8 @@ namespace ov { class OPENVINO_RUNTIME_API IRemoteContext : public std::enable_shared_from_this { public: + virtual ~IRemoteContext() = default; + /** * @brief Returns name of a device on which underlying object is allocated. * Abstract method. diff --git a/src/inference/src/core.cpp b/src/inference/src/core.cpp index 19008d60ac9..5a7ef4ef1de 100644 --- a/src/inference/src/core.cpp +++ b/src/inference/src/core.cpp @@ -221,9 +221,8 @@ CompiledModel Core::import_model(std::istream& modelStream, const std::string& d CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& context, const AnyMap& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); - auto parsed = parseDeviceNameIntoConfig(context.get_device_name(), config); OV_CORE_CALL_STATEMENT({ - auto exec = _impl->get_plugin(parsed._deviceName).import_model(modelStream, context, parsed._config); + auto exec = _impl->import_model(modelStream, context, config); return {exec._ptr, exec._so}; }); } diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index c8a8bdd5c2f..17d31f19167 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -597,7 +597,8 @@ ov::SoPtr ov::CoreImpl::compile_model_with_preprocess(ov::Pl const ov::AnyMap& config) const { std::shared_ptr preprocessed_model = model; - if (!is_new_api() && !std::dynamic_pointer_cast(plugin.m_ptr)) { + if (!is_new_api() && !std::dynamic_pointer_cast(plugin.m_ptr) && + !is_virtual_device(plugin.get_name())) { ov::pass::Manager manager; manager.register_pass(); @@ -680,6 +681,19 @@ ov::SoPtr ov::CoreImpl::import_model(std::istream& model, return compiled_model; } +ov::SoPtr ov::CoreImpl::import_model(std::istream& modelStream, + const ov::RemoteContext& context, + const ov::AnyMap& config) const { + OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); + auto parsed = parseDeviceNameIntoConfig(context.get_device_name(), config); + auto compiled_model = get_plugin(parsed._deviceName).import_model(modelStream, parsed._config); + if (auto wrapper = std::dynamic_pointer_cast(compiled_model._ptr)) { + wrapper->get_executable_network()->loadedFromCache(); + } + + return compiled_model; +} + ov::SupportedOpsMap ov::CoreImpl::query_model(const std::shared_ptr& model, const std::string& device_name, const ov::AnyMap& config) const { diff --git a/src/inference/src/dev/core_impl.hpp b/src/inference/src/dev/core_impl.hpp index d68380a3b6f..3b8b192f550 100644 --- a/src/inference/src/dev/core_impl.hpp +++ b/src/inference/src/dev/core_impl.hpp @@ -219,8 +219,8 @@ public: void register_plugins_in_registry(const std::string& xml_config_file, const bool& by_abs_path = false); std::shared_ptr apply_auto_batching(const std::shared_ptr& model, - std::string& deviceName, - ov::AnyMap& config) const; + std::string& deviceName, + ov::AnyMap& config) const; /* * @brief Register plugins according to the build configuration @@ -383,6 +383,10 @@ public: const std::string& device_name = {}, const ov::AnyMap& config = {}) const override; + ov::SoPtr import_model(std::istream& modelStream, + const ov::RemoteContext& context, + const ov::AnyMap& config) const override; + ov::SupportedOpsMap query_model(const std::shared_ptr& model, const std::string& device_name, const ov::AnyMap& config) const override; diff --git a/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp b/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp index 530bd9e1a7c..d53490ee91d 100644 --- a/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp +++ b/src/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp @@ -62,8 +62,7 @@ public: MOCK_CONST_METHOD1(DeviceSupportsModelCaching, bool(const std::string&)); // NOLINT not a cast to bool MOCK_METHOD2(GetSupportedConfig, std::map(const std::string&, const std::map&)); - MOCK_CONST_METHOD2(get_supported_property, - ov::AnyMap(const std::string&, const ov::AnyMap&)); + MOCK_CONST_METHOD2(get_supported_property, ov::AnyMap(const std::string&, const ov::AnyMap&)); MOCK_CONST_METHOD0(isNewAPI, bool()); MOCK_METHOD1(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(const std::string&)); @@ -92,6 +91,8 @@ public: MOCK_CONST_METHOD3(read_model, std::shared_ptr(const std::string&, const ov::Tensor&, bool)); MOCK_CONST_METHOD2(read_model, std::shared_ptr(const std::string&, const std::string&)); MOCK_CONST_METHOD1(get_default_context, ov::RemoteContext(const std::string&)); + MOCK_CONST_METHOD3(import_model, + ov::SoPtr(std::istream&, const ov::RemoteContext&, const ov::AnyMap&)); ~MockICore() = default; }; diff --git a/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp b/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp index 46b322d2ad3..3022cc95e57 100644 --- a/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp +++ b/src/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp @@ -21,9 +21,9 @@ #include "openvino/runtime/iplugin.hpp" class MockInternalPlugin : public ov::IPlugin { - ov::IPlugin* m_plugin; + ov::IPlugin* m_plugin = nullptr; std::shared_ptr m_converted_plugin; - InferenceEngine::IInferencePlugin* m_old_plugin; + InferenceEngine::IInferencePlugin* m_old_plugin = nullptr; ov::AnyMap config; public: