diff --git a/inference-engine/src/gna_plugin/gna_plugin.hpp b/inference-engine/src/gna_plugin/gna_plugin.hpp index 3e54c224746..33df51710ba 100644 --- a/inference-engine/src/gna_plugin/gna_plugin.hpp +++ b/inference-engine/src/gna_plugin/gna_plugin.hpp @@ -110,6 +110,8 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin { InferenceEngine::IExecutableNetworkInternal::Ptr LoadNetwork(const InferenceEngine::CNNNetwork &network, const std::map &config_map, InferenceEngine::RemoteContext::Ptr context) override { THROW_GNA_EXCEPTION << "Not implemented"; } + InferenceEngine::ExecutableNetwork LoadNetwork(const std::string &modelPath, + const std::map &config_map) override { THROW_GNA_EXCEPTION << "Not implemented"; } bool Infer(const InferenceEngine::Blob &input, InferenceEngine::Blob &result); void SetCore(InferenceEngine::ICore*) noexcept override {} InferenceEngine::ICore* GetCore() const noexcept override {return nullptr;} diff --git a/inference-engine/src/inference_engine/ie_core.cpp b/inference-engine/src/inference_engine/ie_core.cpp index 1bc038ff071..14cd4d9a40e 100644 --- a/inference-engine/src/inference_engine/ie_core.cpp +++ b/inference-engine/src/inference_engine/ie_core.cpp @@ -493,9 +493,8 @@ public: return res; } - // TODO: In future this method can be added to ICore interface ExecutableNetwork LoadNetwork(const std::string& modelPath, const std::string& deviceName, - const std::map& config) { + const std::map& config) override { OV_ITT_SCOPE(FIRST_INFERENCE, itt::domains::IE_LT, "Core::LoadNetwork::Path"); auto parsed = parseDeviceNameIntoConfig(deviceName, config); auto plugin = GetCPPPluginByName(parsed._deviceName); @@ -511,6 +510,8 @@ public: auto cnnNetwork = ReadNetwork(modelPath, std::string()); res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, hash, modelPath); } + } else if (cacheManager) { + res = plugin.LoadNetwork(modelPath, parsed._config); } else { auto cnnNetwork = ReadNetwork(modelPath, std::string()); res = LoadNetworkImpl(cnnNetwork, plugin, parsed._config, nullptr, {}, modelPath); diff --git a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp index d87b16765d6..d40bdc478aa 100644 --- a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp +++ b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp @@ -88,6 +88,10 @@ public: PLUGIN_CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config, context), actual)); } + ExecutableNetwork LoadNetwork(const std::string& modelPath, const std::map& config) { + PLUGIN_CALL_STATEMENT(return actual->LoadNetwork(modelPath, config)); + } + QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map& config) const { QueryNetworkResult res; diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp index 2621c73a340..dcaf4a1e529 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp @@ -72,6 +72,12 @@ public: return impl; } + ExecutableNetwork LoadNetwork(const std::string& modelPath, + const std::map& config) override { + auto cnnNet = GetCore()->ReadNetwork(modelPath, std::string()); + return GetCore()->LoadNetwork(cnnNet, GetName(), config); + } + IExecutableNetworkInternal::Ptr ImportNetwork(const std::string& modelFileName, const std::map& config) override { (void)modelFileName; diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index 894605d3d79..18f4658d6a4 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -166,6 +166,16 @@ public: virtual std::shared_ptr LoadNetwork(const CNNNetwork& network, const std::map& config, RemoteContext::Ptr context) = 0; + + /** + * @brief Creates an executable network from model file path + * @param modelPath A path to model + * @param config A string-string map of config parameters relevant only for this load operation + * @return Created Executable Network object + */ + virtual ExecutableNetwork LoadNetwork(const std::string& modelPath, + const std::map& config) = 0; + /** * @brief Registers extension within plugin * @param extension - pointer to already loaded extension diff --git a/inference-engine/src/plugin_api/ie_icore.hpp b/inference-engine/src/plugin_api/ie_icore.hpp index d8acf837640..6c35277726f 100644 --- a/inference-engine/src/plugin_api/ie_icore.hpp +++ b/inference-engine/src/plugin_api/ie_icore.hpp @@ -66,6 +66,21 @@ public: virtual ExecutableNetwork LoadNetwork(const CNNNetwork& network, const std::string& deviceName, const std::map& config = {}) = 0; + /** + * @brief Creates an executable network from a model file. + * + * Users can create as many networks as they need and use + * them simultaneously (up to the limitation of the hardware resources) + * + * @param modelPath Path to model + * @param deviceName Name of device to load network to + * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load + * operation + * @return An executable network reference + */ + virtual ExecutableNetwork LoadNetwork(const std::string& modelPath, const std::string& deviceName, + const std::map& config) = 0; + /** * @brief Creates an executable network from a previously exported network * @param networkModel network model stream diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp index c1adf355f16..7e264217bf9 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp @@ -18,6 +18,8 @@ public: const InferenceEngine::CNNNetwork&, const std::string&, const std::map&)); MOCK_METHOD3(LoadNetwork, InferenceEngine::ExecutableNetwork( const InferenceEngine::CNNNetwork&, const InferenceEngine::RemoteContext::Ptr &, const std::map&)); + MOCK_METHOD3(LoadNetwork, InferenceEngine::ExecutableNetwork( + const std::string &, const std::string &, const std::map&)); MOCK_METHOD3(ImportNetwork, InferenceEngine::ExecutableNetwork( std::istream&, const std::string&, const std::map&)); diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp index a36e1edcf93..7f450f660f3 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp @@ -15,6 +15,8 @@ public: MOCK_METHOD1(AddExtension, void(InferenceEngine::IExtensionPtr)); MOCK_METHOD2(LoadNetwork, std::shared_ptr( const InferenceEngine::CNNNetwork&, const std::map&)); + MOCK_METHOD2(LoadNetwork, InferenceEngine::ExecutableNetwork( + const std::string&, const std::map&)); MOCK_METHOD2(ImportNetwork, std::shared_ptr( const std::string&, const std::map&)); MOCK_METHOD1(SetConfig, void(const std::map &)); diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp index a848d66fda6..73ecba07d82 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp @@ -199,9 +199,9 @@ protected: mockIExeNet = std::make_shared(); ON_CALL(*mockIExeNet, CreateInferRequest()).WillByDefault(Return(mock_request)); std::unique_ptr mockIPluginPtr{new MockIInferencePlugin}; - ON_CALL(*mockIPluginPtr, LoadNetwork(_, _)).WillByDefault(Return(mockIExeNet)); + ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast(_), _)).WillByDefault(Return(mockIExeNet)); plugin = InferenceEngine::InferencePlugin{InferenceEngine::details::SOPointer{mockIPluginPtr.release()}}; - exeNetwork = plugin.LoadNetwork({}, {}); + exeNetwork = plugin.LoadNetwork(CNNNetwork{}, {}); request = exeNetwork.CreateInferRequest(); _incorrectName = "incorrect_name"; _inputName = MockNotEmptyICNNNetwork::INPUT_BLOB_NAME; @@ -223,9 +223,9 @@ protected: auto mockIExeNet = std::make_shared(); ON_CALL(*mockIExeNet, CreateInferRequest()).WillByDefault(Return(mockInferRequestInternal)); std::unique_ptr mockIPluginPtr{new MockIInferencePlugin}; - ON_CALL(*mockIPluginPtr, LoadNetwork(_, _)).WillByDefault(Return(mockIExeNet)); + ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast(_), _)).WillByDefault(Return(mockIExeNet)); auto plugin = InferenceEngine::InferencePlugin{InferenceEngine::details::SOPointer{mockIPluginPtr.release()}}; - auto exeNetwork = plugin.LoadNetwork({}, {}); + auto exeNetwork = plugin.LoadNetwork(CNNNetwork{}, {}); return exeNetwork.CreateInferRequest(); } diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp index c37408606d2..0557ed203b1 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp @@ -36,9 +36,9 @@ class VariableStateTests : public ::testing::Test { mockVariableStateInternal = make_shared(); ON_CALL(*mockExeNetworkInternal, CreateInferRequest()).WillByDefault(Return(mockInferRequestInternal)); std::unique_ptr mockIPluginPtr{new MockIInferencePlugin}; - ON_CALL(*mockIPluginPtr, LoadNetwork(_, _)).WillByDefault(Return(mockExeNetworkInternal)); + ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast(_), _)).WillByDefault(Return(mockExeNetworkInternal)); plugin = InferenceEngine::InferencePlugin{InferenceEngine::details::SOPointer{mockIPluginPtr.release()}}; - net = plugin.LoadNetwork({}, {}); + net = plugin.LoadNetwork(CNNNetwork{}, {}); req = net.CreateInferRequest(); } }; diff --git a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp index e205be6cd92..1df88845775 100644 --- a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp @@ -20,6 +20,7 @@ #include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp" using testing::_; +using testing::MatcherCast; using testing::Throw; using testing::Ref; using testing::Return; @@ -52,9 +53,9 @@ protected: virtual void SetUp() { mockIExeNet = std::make_shared(); std::unique_ptr mockIPluginPtr{new MockIInferencePlugin}; - ON_CALL(*mockIPluginPtr, LoadNetwork(_, _)).WillByDefault(Return(mockIExeNet)); + ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast(_), _)).WillByDefault(Return(mockIExeNet)); plugin = InferenceEngine::InferencePlugin{InferenceEngine::details::SOPointer{mockIPluginPtr.release()}}; - exeNetwork = plugin.LoadNetwork({}, {}); + exeNetwork = plugin.LoadNetwork(CNNNetwork{}, {}); } };