From 2fefe1164d694cfa36cba7a74f734d2d4bedd237 Mon Sep 17 00:00:00 2001 From: Anton Pankratv Date: Fri, 20 Aug 2021 07:17:56 +0300 Subject: [PATCH] Added common.hpp file with aliases (#7158) --- .../include/openvino/runtime/common.hpp | 23 ++++++ .../include/openvino/runtime/core.hpp | 54 +++++++------- .../src/inference_engine/src/ie_core.cpp | 73 +++++++++---------- 3 files changed, 83 insertions(+), 67 deletions(-) create mode 100644 inference-engine/src/inference_engine/include/openvino/runtime/common.hpp diff --git a/inference-engine/src/inference_engine/include/openvino/runtime/common.hpp b/inference-engine/src/inference_engine/include/openvino/runtime/common.hpp new file mode 100644 index 00000000000..9c0c2e93192 --- /dev/null +++ b/inference-engine/src/inference_engine/include/openvino/runtime/common.hpp @@ -0,0 +1,23 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +/** + * @brief This is a header file for the OpenVINO Runtime common aliases that depend only from external API + * + * @file openvino/runtime/common.hpp + */ +#pragma once + +#include +#include + +namespace ov { +namespace ie = InferenceEngine; +namespace runtime { +/** + * @brief This type of map is commonly used to pass set of parameters + */ +using ConfigMap = std::map; +} // namespace runtime +} // namespace ov \ No newline at end of file diff --git a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp index 0ececc87aa5..e54babcc3f3 100644 --- a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp +++ b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp @@ -15,6 +15,7 @@ #include #include +#include "common.hpp" #include "cpp/ie_executable_network.hpp" #include "ie_plugin_config.hpp" #include "ie_version.hpp" @@ -57,7 +58,7 @@ public: * @param deviceName Device name to identify plugin * @return A vector of versions */ - std::map get_versions(const std::string& deviceName) const; + std::map get_versions(const std::string& deviceName) const; #ifdef ENABLE_UNICODE_PATH_SUPPORT /** @@ -101,7 +102,7 @@ public: * @return Function */ std::shared_ptr read_model(const std::string& model, - const std::shared_ptr& weights) const; + const std::shared_ptr& weights) const; /** * @brief Creates an executable network from a network object. @@ -115,9 +116,9 @@ public: * operation * @return An executable network reference */ - InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr& network, - const std::string& deviceName, - const std::map& config = {}); + ie::ExecutableNetwork compile_model(const std::shared_ptr& network, + const std::string& deviceName, + const ConfigMap& config = {}); /** * @brief Reads model and creates an executable network from IR or ONNX file @@ -132,9 +133,9 @@ public: * * @return An executable network reference */ - InferenceEngine::ExecutableNetwork compile_model(const std::string& modelPath, - const std::string& deviceName, - const std::map& config = {}); + ie::ExecutableNetwork compile_model(const std::string& modelPath, + const std::string& deviceName, + const ConfigMap& config = {}); /** * @brief Creates an executable network from a network object within a specified remote context. @@ -144,15 +145,15 @@ public: * operation * @return An executable network object */ - InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr& network, - const std::shared_ptr& context, - const std::map& config = {}); + ie::ExecutableNetwork compile_model(const std::shared_ptr& network, + const std::shared_ptr& context, + const ConfigMap& config = {}); /** * @brief Registers extension * @param extension Pointer to already loaded extension */ - void add_extension(const std::shared_ptr& extension); + void add_extension(const std::shared_ptr& extension); /** * @brief Creates an executable network from a previously exported network @@ -162,9 +163,9 @@ public: * operation* * @return An executable network reference */ - InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel, - const std::string& deviceName, - const std::map& config = {}); + ie::ExecutableNetwork import_model(std::istream& networkModel, + const std::string& deviceName, + const ConfigMap& config = {}); /** * @brief Creates an executable network from a previously exported network within a specified @@ -176,9 +177,9 @@ public: * operation * @return An executable network reference */ - InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel, - const std::shared_ptr& context, - const std::map& config = {}); + ie::ExecutableNetwork import_model(std::istream& networkModel, + const std::shared_ptr& context, + const ConfigMap& config = {}); /** * @brief Query device if it supports specified network with specified configuration @@ -188,9 +189,9 @@ public: * @param config Optional map of pairs: (config parameter name, config parameter value) * @return An object containing a map of pairs a layer name -> a device name supporting this layer. */ - InferenceEngine::QueryNetworkResult query_model(const std::shared_ptr& network, - const std::string& deviceName, - const std::map& config = {}) const; + ie::QueryNetworkResult query_model(const std::shared_ptr& network, + const std::string& deviceName, + const ConfigMap& config = {}) const; /** * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp @@ -200,7 +201,7 @@ public: * * @param config Map of pairs: (config parameter name, config parameter value) */ - void set_config(const std::map& config, const std::string& deviceName = {}); + void set_config(const ConfigMap& config, const std::string& deviceName = {}); /** * @brief Gets configuration dedicated to device behaviour. @@ -211,7 +212,7 @@ public: * @param name - config key. * @return Value of config corresponding to config key. */ - InferenceEngine::Parameter get_config(const std::string& deviceName, const std::string& name) const; + ie::Parameter get_config(const std::string& deviceName, const std::string& name) const; /** * @brief Gets general runtime metric for dedicated hardware. @@ -223,7 +224,7 @@ public: * @param name - metric name to request. * @return Metric value corresponding to metric key. */ - InferenceEngine::Parameter get_metric(const std::string& deviceName, const std::string& name) const; + ie::Parameter get_metric(const std::string& deviceName, const std::string& name) const; /** * @brief Returns devices available for neural networks inference @@ -290,15 +291,14 @@ public: * @param params Map of device-specific shared context parameters. * @return A shared pointer to a created remote context. */ - std::shared_ptr create_context(const std::string& deviceName, - const InferenceEngine::ParamMap& params); + std::shared_ptr create_context(const std::string& deviceName, const ie::ParamMap& params); /** * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device. * @param deviceName - A name of a device to get create shared context from. * @return A shared pointer to a default remote context. */ - std::shared_ptr get_default_context(const std::string& deviceName); + std::shared_ptr get_default_context(const std::string& deviceName); }; } // namespace runtime } // namespace ov diff --git a/inference-engine/src/inference_engine/src/ie_core.cpp b/inference-engine/src/inference_engine/src/ie_core.cpp index 5792e6388e0..0c63b75b2bf 100644 --- a/inference-engine/src/inference_engine/src/ie_core.cpp +++ b/inference-engine/src/inference_engine/src/ie_core.cpp @@ -1218,7 +1218,7 @@ Core::Core(const std::string& xmlConfigFile) { register_plugins(core_detail::parseXmlConfig(xmlConfigFile)); } -std::map Core::get_versions(const std::string& deviceName) const { +std::map Core::get_versions(const std::string& deviceName) const { return _impl->GetVersions(deviceName); } @@ -1232,49 +1232,45 @@ std::shared_ptr Core::read_model(const std::wstring& modelPath std::shared_ptr Core::read_model(const std::string& modelPath, const std::string& binPath) const { return _impl->ReadNetwork(modelPath, binPath).getFunction(); } -std::shared_ptr Core::read_model(const std::string& model, - const InferenceEngine::Blob::CPtr& weights) const { +std::shared_ptr Core::read_model(const std::string& model, const ie::Blob::CPtr& weights) const { return _impl->ReadNetwork(model, weights).getFunction(); } -InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr& network, - const std::string& deviceName, - const std::map& config) { - auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast(network)), - deviceName, - config); +ie::ExecutableNetwork Core::compile_model(const std::shared_ptr& network, + const std::string& deviceName, + const ConfigMap& config) { + auto exec = + _impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast(network)), deviceName, config); return {exec, exec}; } -InferenceEngine::ExecutableNetwork Core::compile_model(const std::string& modelPath, - const std::string& deviceName, - const std::map& config) { +ie::ExecutableNetwork Core::compile_model(const std::string& modelPath, + const std::string& deviceName, + const ConfigMap& config) { auto exec = _impl->LoadNetwork(modelPath, deviceName, config); return {exec, exec}; } -InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr& network, - const InferenceEngine::RemoteContext::Ptr& context, - const std::map& config) { - auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast(network)), - context, - config); +ie::ExecutableNetwork Core::compile_model(const std::shared_ptr& network, + const ie::RemoteContext::Ptr& context, + const ConfigMap& config) { + auto exec = _impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast(network)), context, config); return {exec, exec}; } -void Core::add_extension(const InferenceEngine::IExtensionPtr& extension) { +void Core::add_extension(const ie::IExtensionPtr& extension) { _impl->AddExtension(extension); } -InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel, - const std::string& deviceName, - const std::map& config) { +ie::ExecutableNetwork Core::import_model(std::istream& networkModel, + const std::string& deviceName, + const ConfigMap& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); auto exec = _impl->ImportNetwork(networkModel, deviceName, config); return {exec, exec}; } -InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel, - const InferenceEngine::RemoteContext::Ptr& context, - const std::map& config) { +ie::ExecutableNetwork Core::import_model(std::istream& networkModel, + const ie::RemoteContext::Ptr& context, + const ConfigMap& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); using ExportMagic = std::array; @@ -1296,14 +1292,12 @@ InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel return {exec, exec}; } -InferenceEngine::QueryNetworkResult Core::query_model(const std::shared_ptr& network, - const std::string& deviceName, - const std::map& config) const { - return _impl->QueryNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast(network)), - deviceName, - config); +ie::QueryNetworkResult Core::query_model(const std::shared_ptr& network, + const std::string& deviceName, + const ConfigMap& config) const { + return _impl->QueryNetwork(ie::CNNNetwork(std::const_pointer_cast(network)), deviceName, config); } -void Core::set_config(const std::map& config, const std::string& deviceName) { +void Core::set_config(const ConfigMap& config, const std::string& deviceName) { // HETERO case if (deviceName.find("HETERO:") == 0) { IE_THROW() << "SetConfig is supported only for HETERO itself (without devices). " @@ -1337,7 +1331,7 @@ void Core::set_config(const std::map& config, const st } } -InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const { +ie::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const { // HETERO case { if (deviceName.find("HETERO:") == 0) { @@ -1363,13 +1357,13 @@ InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName); // we need to return a copy of Parameter object which is created on Core side, - // not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread + // not in ie plugin side, which can be unloaded from Core in a parallel thread // TODO: remove this WA after *-31417 is resolved return core_detail::copyParameterValue( _impl->GetCPPPluginByName(parsed._deviceName).GetConfig(name, parsed._config)); } -InferenceEngine::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const { +ie::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const { return _impl->GetMetric(deviceName, name); } @@ -1382,7 +1376,7 @@ void Core::register_plugin(const std::string& pluginName, const std::string& dev } void Core::unload_plugin(const std::string& deviceName) { - InferenceEngine::DeviceIDParser parser(deviceName); + ie::DeviceIDParser parser(deviceName); std::string devName = parser.getDeviceName(); _impl->UnloadPluginByName(devName); @@ -1392,8 +1386,7 @@ void Core::register_plugins(const std::string& xmlConfigFile) { _impl->RegisterPluginsInRegistry(xmlConfigFile); } -InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& deviceName, - const InferenceEngine::ParamMap& params) { +ie::RemoteContext::Ptr Core::create_context(const std::string& deviceName, const ie::ParamMap& params) { if (deviceName.find("HETERO") == 0) { IE_THROW() << "HETERO device does not support remote context"; } @@ -1408,7 +1401,7 @@ InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& devi return _impl->GetCPPPluginByName(parsed._deviceName).CreateContext(parsed._config); } -InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) { +ie::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) { if (deviceName.find("HETERO") == 0) { IE_THROW() << "HETERO device does not support remote context"; } @@ -1419,7 +1412,7 @@ InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string& IE_THROW() << "AUTO device does not support remote context"; } - auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, InferenceEngine::ParamMap()); + auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, ie::ParamMap()); return _impl->GetCPPPluginByName(parsed._deviceName).GetDefaultContext(parsed._config); }