Added common.hpp file with aliases (#7158)

This commit is contained in:
Anton Pankratv 2021-08-20 07:17:56 +03:00 committed by GitHub
parent 7aeec6ffe4
commit 2fefe1164d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 83 additions and 67 deletions

View File

@ -0,0 +1,23 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief This is a header file for the OpenVINO Runtime common aliases that depend only from external API
*
* @file openvino/runtime/common.hpp
*/
#pragma once
#include <map>
#include <string>
namespace ov {
namespace ie = InferenceEngine;
namespace runtime {
/**
* @brief This type of map is commonly used to pass set of parameters
*/
using ConfigMap = std::map<std::string, std::string>;
} // namespace runtime
} // namespace ov

View File

@ -15,6 +15,7 @@
#include <string>
#include <vector>
#include "common.hpp"
#include "cpp/ie_executable_network.hpp"
#include "ie_plugin_config.hpp"
#include "ie_version.hpp"
@ -57,7 +58,7 @@ public:
* @param deviceName Device name to identify plugin
* @return A vector of versions
*/
std::map<std::string, InferenceEngine::Version> get_versions(const std::string& deviceName) const;
std::map<std::string, ie::Version> get_versions(const std::string& deviceName) const;
#ifdef ENABLE_UNICODE_PATH_SUPPORT
/**
@ -101,7 +102,7 @@ public:
* @return Function
*/
std::shared_ptr<ov::Function> read_model(const std::string& model,
const std::shared_ptr<const InferenceEngine::Blob>& weights) const;
const std::shared_ptr<const ie::Blob>& weights) const;
/**
* @brief Creates an executable network from a network object.
@ -115,9 +116,9 @@ public:
* operation
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
const ConfigMap& config = {});
/**
* @brief Reads model and creates an executable network from IR or ONNX file
@ -132,9 +133,9 @@ public:
*
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork compile_model(const std::string& modelPath,
ie::ExecutableNetwork compile_model(const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
const ConfigMap& config = {});
/**
* @brief Creates an executable network from a network object within a specified remote context.
@ -144,15 +145,15 @@ public:
* operation
* @return An executable network object
*/
InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config = {});
/**
* @brief Registers extension
* @param extension Pointer to already loaded extension
*/
void add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension);
void add_extension(const std::shared_ptr<ie::IExtension>& extension);
/**
* @brief Creates an executable network from a previously exported network
@ -162,9 +163,9 @@ public:
* operation*
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel,
ie::ExecutableNetwork import_model(std::istream& networkModel,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
const ConfigMap& config = {});
/**
* @brief Creates an executable network from a previously exported network within a specified
@ -176,9 +177,9 @@ public:
* operation
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork import_model(std::istream& networkModel,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config = {});
/**
* @brief Query device if it supports specified network with specified configuration
@ -188,9 +189,9 @@ public:
* @param config Optional map of pairs: (config parameter name, config parameter value)
* @return An object containing a map of pairs a layer name -> a device name supporting this layer.
*/
InferenceEngine::QueryNetworkResult query_model(const std::shared_ptr<const ov::Function>& network,
ie::QueryNetworkResult query_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {}) const;
const ConfigMap& config = {}) const;
/**
* @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
@ -200,7 +201,7 @@ public:
*
* @param config Map of pairs: (config parameter name, config parameter value)
*/
void set_config(const std::map<std::string, std::string>& config, const std::string& deviceName = {});
void set_config(const ConfigMap& config, const std::string& deviceName = {});
/**
* @brief Gets configuration dedicated to device behaviour.
@ -211,7 +212,7 @@ public:
* @param name - config key.
* @return Value of config corresponding to config key.
*/
InferenceEngine::Parameter get_config(const std::string& deviceName, const std::string& name) const;
ie::Parameter get_config(const std::string& deviceName, const std::string& name) const;
/**
* @brief Gets general runtime metric for dedicated hardware.
@ -223,7 +224,7 @@ public:
* @param name - metric name to request.
* @return Metric value corresponding to metric key.
*/
InferenceEngine::Parameter get_metric(const std::string& deviceName, const std::string& name) const;
ie::Parameter get_metric(const std::string& deviceName, const std::string& name) const;
/**
* @brief Returns devices available for neural networks inference
@ -290,15 +291,14 @@ public:
* @param params Map of device-specific shared context parameters.
* @return A shared pointer to a created remote context.
*/
std::shared_ptr<InferenceEngine::RemoteContext> create_context(const std::string& deviceName,
const InferenceEngine::ParamMap& params);
std::shared_ptr<ie::RemoteContext> create_context(const std::string& deviceName, const ie::ParamMap& params);
/**
* @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
* @param deviceName - A name of a device to get create shared context from.
* @return A shared pointer to a default remote context.
*/
std::shared_ptr<InferenceEngine::RemoteContext> get_default_context(const std::string& deviceName);
std::shared_ptr<ie::RemoteContext> get_default_context(const std::string& deviceName);
};
} // namespace runtime
} // namespace ov

View File

@ -1218,7 +1218,7 @@ Core::Core(const std::string& xmlConfigFile) {
register_plugins(core_detail::parseXmlConfig(xmlConfigFile));
}
std::map<std::string, InferenceEngine::Version> Core::get_versions(const std::string& deviceName) const {
std::map<std::string, ie::Version> Core::get_versions(const std::string& deviceName) const {
return _impl->GetVersions(deviceName);
}
@ -1232,49 +1232,45 @@ std::shared_ptr<ngraph::Function> Core::read_model(const std::wstring& modelPath
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& modelPath, const std::string& binPath) const {
return _impl->ReadNetwork(modelPath, binPath).getFunction();
}
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& model,
const InferenceEngine::Blob::CPtr& weights) const {
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& model, const ie::Blob::CPtr& weights) const {
return _impl->ReadNetwork(model, weights).getFunction();
}
InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
ie::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
deviceName,
config);
const ConfigMap& config) {
auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
return {exec, exec};
}
InferenceEngine::ExecutableNetwork Core::compile_model(const std::string& modelPath,
ie::ExecutableNetwork Core::compile_model(const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
const ConfigMap& config) {
auto exec = _impl->LoadNetwork(modelPath, deviceName, config);
return {exec, exec};
}
InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const InferenceEngine::RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
context,
config);
ie::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const ie::RemoteContext::Ptr& context,
const ConfigMap& config) {
auto exec = _impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), context, config);
return {exec, exec};
}
void Core::add_extension(const InferenceEngine::IExtensionPtr& extension) {
void Core::add_extension(const ie::IExtensionPtr& extension) {
_impl->AddExtension(extension);
}
InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel,
ie::ExecutableNetwork Core::import_model(std::istream& networkModel,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
const ConfigMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
auto exec = _impl->ImportNetwork(networkModel, deviceName, config);
return {exec, exec};
}
InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel,
const InferenceEngine::RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
ie::ExecutableNetwork Core::import_model(std::istream& networkModel,
const ie::RemoteContext::Ptr& context,
const ConfigMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
using ExportMagic = std::array<char, 4>;
@ -1296,14 +1292,12 @@ InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel
return {exec, exec};
}
InferenceEngine::QueryNetworkResult Core::query_model(const std::shared_ptr<const ngraph::Function>& network,
ie::QueryNetworkResult Core::query_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config) const {
return _impl->QueryNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
deviceName,
config);
const ConfigMap& config) const {
return _impl->QueryNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
}
void Core::set_config(const std::map<std::string, std::string>& config, const std::string& deviceName) {
void Core::set_config(const ConfigMap& config, const std::string& deviceName) {
// HETERO case
if (deviceName.find("HETERO:") == 0) {
IE_THROW() << "SetConfig is supported only for HETERO itself (without devices). "
@ -1337,7 +1331,7 @@ void Core::set_config(const std::map<std::string, std::string>& config, const st
}
}
InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const {
ie::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const {
// HETERO case
{
if (deviceName.find("HETERO:") == 0) {
@ -1363,13 +1357,13 @@ InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const
auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName);
// we need to return a copy of Parameter object which is created on Core side,
// not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread
// not in ie plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return core_detail::copyParameterValue(
_impl->GetCPPPluginByName(parsed._deviceName).GetConfig(name, parsed._config));
}
InferenceEngine::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const {
ie::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const {
return _impl->GetMetric(deviceName, name);
}
@ -1382,7 +1376,7 @@ void Core::register_plugin(const std::string& pluginName, const std::string& dev
}
void Core::unload_plugin(const std::string& deviceName) {
InferenceEngine::DeviceIDParser parser(deviceName);
ie::DeviceIDParser parser(deviceName);
std::string devName = parser.getDeviceName();
_impl->UnloadPluginByName(devName);
@ -1392,8 +1386,7 @@ void Core::register_plugins(const std::string& xmlConfigFile) {
_impl->RegisterPluginsInRegistry(xmlConfigFile);
}
InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& deviceName,
const InferenceEngine::ParamMap& params) {
ie::RemoteContext::Ptr Core::create_context(const std::string& deviceName, const ie::ParamMap& params) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
@ -1408,7 +1401,7 @@ InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& devi
return _impl->GetCPPPluginByName(parsed._deviceName).CreateContext(parsed._config);
}
InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) {
ie::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
@ -1419,7 +1412,7 @@ InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string&
IE_THROW() << "AUTO device does not support remote context";
}
auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, InferenceEngine::ParamMap());
auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, ie::ParamMap());
return _impl->GetCPPPluginByName(parsed._deviceName).GetDefaultContext(parsed._config);
}