Removed global using namespace from Plugin API (#3451)

This commit is contained in:
Ilya Lavrenov 2020-12-03 17:52:55 +03:00 committed by GitHub
parent f2c2636bb5
commit 2d75d8aff2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 82 additions and 86 deletions

View File

@ -25,7 +25,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<const
try { try {
CompileNetwork(function); CompileNetwork(function);
InitExecutor(); // creates thread-based executor using for async requests InitExecutor(); // creates thread-based executor using for async requests
} catch (const InferenceEngineException&) { } catch (const InferenceEngine::details::InferenceEngineException&) {
throw; throw;
} catch (const std::exception & e) { } catch (const std::exception & e) {
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what(); THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
@ -83,9 +83,9 @@ void TemplatePlugin::ExecutableNetwork::InitExecutor() {
// it is better to avoid threads recreateion as some OSs memory allocator can not manage such usage cases // it is better to avoid threads recreateion as some OSs memory allocator can not manage such usage cases
// and memory consumption can be larger than it is expected. // and memory consumption can be larger than it is expected.
// So Inference Engone provides executors cache. // So Inference Engone provides executors cache.
_taskExecutor = ExecutorManager::getInstance()->getIdleCPUStreamsExecutor(streamsExecutorConfig); _taskExecutor = InferenceEngine::ExecutorManager::getInstance()->getIdleCPUStreamsExecutor(streamsExecutorConfig);
// NOTE: callback Executor is not configured. So callback will be called in the thread of the last stage of inference request pipeline // NOTE: callback Executor is not configured. So callback will be called in the thread of the last stage of inference request pipeline
// _callbackExecutor = ExecutorManager::getInstance()->getIdleCPUStreamsExecutor({"TemplateCallbackExecutor"}); // _callbackExecutor = InferenceEngine::ExecutorManager::getInstance()->getIdleCPUStreamsExecutor({"TemplateCallbackExecutor"});
} }
// ! [executable_network:init_executor] // ! [executable_network:init_executor]
@ -98,8 +98,8 @@ InferenceEngine::InferRequestInternal::Ptr TemplatePlugin::ExecutableNetwork::Cr
// ! [executable_network:create_infer_request_impl] // ! [executable_network:create_infer_request_impl]
// ! [executable_network:create_infer_request] // ! [executable_network:create_infer_request]
IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInferRequest() { InferenceEngine::IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInferRequest() {
IInferRequest::Ptr asyncRequest; InferenceEngine::IInferRequest::Ptr asyncRequest;
auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs); auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest), auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
_taskExecutor, _plugin->_waitExecutor, _callbackExecutor); _taskExecutor, _plugin->_waitExecutor, _callbackExecutor);
@ -111,7 +111,7 @@ IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInferRequest() {
// ! [executable_network:create_infer_request] // ! [executable_network:create_infer_request]
// ! [executable_network:get_config] // ! [executable_network:get_config]
Parameter TemplatePlugin::ExecutableNetwork::GetConfig(const std::string &name) const { InferenceEngine::Parameter TemplatePlugin::ExecutableNetwork::GetConfig(const std::string &name) const {
return _cfg.Get(name); return _cfg.Get(name);
} }
// ! [executable_network:get_config] // ! [executable_network:get_config]
@ -130,7 +130,7 @@ InferenceEngine::Parameter TemplatePlugin::ExecutableNetwork::GetMetric(const st
CONFIG_KEY(DEVICE_ID), CONFIG_KEY(DEVICE_ID),
CONFIG_KEY(PERF_COUNT), CONFIG_KEY(PERF_COUNT),
TEMPLATE_CONFIG_KEY(THROUGHPUT_STREAMS) }; TEMPLATE_CONFIG_KEY(THROUGHPUT_STREAMS) };
auto streamExecutorConfigKeys = IStreamsExecutor::Config{}.SupportedKeys(); auto streamExecutorConfigKeys = InferenceEngine::IStreamsExecutor::Config{}.SupportedKeys();
for (auto&& configKey : streamExecutorConfigKeys) { for (auto&& configKey : streamExecutorConfigKeys) {
configKeys.emplace_back(configKey); configKeys.emplace_back(configKey);
} }

View File

@ -33,15 +33,15 @@ Plugin::Plugin() {
_backend = ngraph::runtime::Backend::create("INTERPRETER"); _backend = ngraph::runtime::Backend::create("INTERPRETER");
// create default stream executor with a given name // create default stream executor with a given name
_waitExecutor = ExecutorManager::getInstance()->getIdleCPUStreamsExecutor({"TemplateWaitExecutor"}); _waitExecutor = InferenceEngine::ExecutorManager::getInstance()->getIdleCPUStreamsExecutor({"TemplateWaitExecutor"});
} }
// ! [plugin:ctor] // ! [plugin:ctor]
// ! [plugin:dtor] // ! [plugin:dtor]
Plugin::~Plugin() { Plugin::~Plugin() {
// Plugin should remove executors from executor cache to avoid threads number growth in the whole application // Plugin should remove executors from executor cache to avoid threads number growth in the whole application
ExecutorManager::getInstance()->clear("TemplateStreamsExecutor"); InferenceEngine::ExecutorManager::getInstance()->clear("TemplateStreamsExecutor");
ExecutorManager::getInstance()->clear("TemplateWaitExecutor"); InferenceEngine::ExecutorManager::getInstance()->clear("TemplateWaitExecutor");
// NOTE: Uncomment this if Inference Engine Executor cache is used to create callback executor // NOTE: Uncomment this if Inference Engine Executor cache is used to create callback executor
// ExecutorManager::getInstance()->clear("TemplateCallbackExecutor"); // ExecutorManager::getInstance()->clear("TemplateCallbackExecutor");
} }
@ -91,8 +91,8 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const
for (auto networkOutput : networkOutputs) { for (auto networkOutput : networkOutputs) {
auto output_precision = networkOutput.second->getPrecision(); auto output_precision = networkOutput.second->getPrecision();
if (output_precision != Precision::FP32 && if (output_precision != InferenceEngine::Precision::FP32 &&
output_precision != Precision::FP16) { output_precision != InferenceEngine::Precision::FP16) {
THROW_IE_EXCEPTION << "Template device supports only FP16 and FP32 output precision."; THROW_IE_EXCEPTION << "Template device supports only FP16 and FP32 output precision.";
} }
} }
@ -135,8 +135,8 @@ InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model
// ! [plugin:import_network_impl] // ! [plugin:import_network_impl]
// ! [plugin:query_network] // ! [plugin:query_network]
QueryNetworkResult Plugin::QueryNetwork(const CNNNetwork &network, const ConfigMap& config) const { InferenceEngine::QueryNetworkResult Plugin::QueryNetwork(const InferenceEngine::CNNNetwork &network, const ConfigMap& config) const {
QueryNetworkResult res; InferenceEngine::QueryNetworkResult res;
Configuration cfg{config, _cfg, false}; Configuration cfg{config, _cfg, false};
auto function = network.getFunction(); auto function = network.getFunction();
@ -163,7 +163,7 @@ QueryNetworkResult Plugin::QueryNetwork(const CNNNetwork &network, const ConfigM
for (auto&& fusedLayerName : ngraph::getFusedNamesVector(node)) { for (auto&& fusedLayerName : ngraph::getFusedNamesVector(node)) {
// Filter just nodes from original operation set // Filter just nodes from original operation set
// TODO: fill with actual decision rules based on whether kernel is supported by backend // TODO: fill with actual decision rules based on whether kernel is supported by backend
if (contains(originalOps, fusedLayerName)) { if (InferenceEngine::details::contains(originalOps, fusedLayerName)) {
if (opset.contains_type_insensitive(fusedLayerName)) { if (opset.contains_type_insensitive(fusedLayerName)) {
supported.emplace(fusedLayerName); supported.emplace(fusedLayerName);
} else { } else {
@ -175,7 +175,7 @@ QueryNetworkResult Plugin::QueryNetwork(const CNNNetwork &network, const ConfigM
// 4. The result set should contains just nodes from supported set // 4. The result set should contains just nodes from supported set
for (auto&& layerName : supported) { for (auto&& layerName : supported) {
if (!contains(unsupported, layerName)) { if (!InferenceEngine::details::contains(unsupported, layerName)) {
res.supportedLayersMap.emplace(layerName, GetName()); res.supportedLayersMap.emplace(layerName, GetName());
} }
} }
@ -219,7 +219,7 @@ InferenceEngine::Parameter Plugin::GetMetric(const std::string& name, const std:
CONFIG_KEY(DEVICE_ID), CONFIG_KEY(DEVICE_ID),
CONFIG_KEY(PERF_COUNT), CONFIG_KEY(PERF_COUNT),
TEMPLATE_CONFIG_KEY(THROUGHPUT_STREAMS)}; TEMPLATE_CONFIG_KEY(THROUGHPUT_STREAMS)};
auto streamExecutorConfigKeys = IStreamsExecutor::Config{}.SupportedKeys(); auto streamExecutorConfigKeys = InferenceEngine::IStreamsExecutor::Config{}.SupportedKeys();
for (auto&& configKey : streamExecutorConfigKeys) { for (auto&& configKey : streamExecutorConfigKeys) {
if (configKey != InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS) { if (configKey != InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS) {
configKeys.emplace_back(configKey); configKeys.emplace_back(configKey);
@ -248,6 +248,6 @@ InferenceEngine::Parameter Plugin::GetMetric(const std::string& name, const std:
// ! [plugin:get_metric] // ! [plugin:get_metric]
// ! [plugin:create_plugin_engine] // ! [plugin:create_plugin_engine]
static const Version version = {{2, 1}, CI_BUILD_NUMBER, "templatePlugin"}; static const InferenceEngine::Version version = {{2, 1}, CI_BUILD_NUMBER, "templatePlugin"};
IE_DEFINE_PLUGIN_CREATE_FUNCTION(Plugin, version) IE_DEFINE_PLUGIN_CREATE_FUNCTION(Plugin, version)
// ! [plugin:create_plugin_engine] // ! [plugin:create_plugin_engine]

View File

@ -10,8 +10,6 @@
#include "backend.hpp" #include "backend.hpp"
#include "backend.hpp"
//! [plugin:header] //! [plugin:header]
namespace TemplatePlugin { namespace TemplatePlugin {

View File

@ -452,8 +452,8 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
std::vector<std::shared_ptr<ngraph::Node>> concats; std::vector<std::shared_ptr<ngraph::Node>> concats;
std::vector<std::shared_ptr<ngraph::Node>> nextLayerDependent; std::vector<std::shared_ptr<ngraph::Node>> nextLayerDependent;
for (CNNNetworkIterator itLayer{clonedNetwork.get()}; for (InferenceEngine::details::CNNNetworkIterator itLayer{clonedNetwork.get()};
itLayer != CNNNetworkIterator(); itLayer != InferenceEngine::details::CNNNetworkIterator();
itLayer++) { itLayer++) {
auto layerIsSupported = [&] { auto layerIsSupported = [&] {
auto node = (*itLayer)->getNode(); auto node = (*itLayer)->getNode();
@ -490,7 +490,7 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
continue; continue;
} }
for (auto&& fusedLayerName : ngraph::getFusedNamesVector(fusedNode)) { for (auto&& fusedLayerName : ngraph::getFusedNamesVector(fusedNode)) {
if (contains(originalOps, fusedLayerName)) { if (InferenceEngine::details::contains(originalOps, fusedLayerName)) {
if (layerIsSupported) { if (layerIsSupported) {
supported.emplace(fusedLayerName); supported.emplace(fusedLayerName);
} else { } else {
@ -501,7 +501,7 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
} }
for (auto&& layerName : supported) { for (auto&& layerName : supported) {
if (contains(unsupported, layerName)) { if (InferenceEngine::details::contains(unsupported, layerName)) {
supported.erase(layerName); supported.erase(layerName);
} }
} }
@ -512,10 +512,10 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
const auto outputs = split->outputs(); const auto outputs = split->outputs();
for (const auto& output : outputs) { for (const auto& output : outputs) {
const auto& name = output.get_node()->get_friendly_name(); const auto& name = output.get_node()->get_friendly_name();
if (!contains(supported, name) && if (!InferenceEngine::details::contains(supported, name) &&
!contains(depLayerNames, name) && !InferenceEngine::details::contains(depLayerNames, name) &&
!contains(concatNames, name) && !InferenceEngine::details::contains(concatNames, name) &&
!contains(splitNames, name)) { !InferenceEngine::details::contains(splitNames, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -530,9 +530,9 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
const auto inputs = concat->inputs(); const auto inputs = concat->inputs();
for (const auto& input : inputs) { for (const auto& input : inputs) {
const auto& name = input.get_node()->get_friendly_name(); const auto& name = input.get_node()->get_friendly_name();
if (!contains(supported, name) && if (!InferenceEngine::details::contains(supported, name) &&
!contains(depLayerNames, name) && !InferenceEngine::details::contains(depLayerNames, name) &&
!contains(concatNames, name)) { !InferenceEngine::details::contains(concatNames, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -548,7 +548,7 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
const auto inputs = cnl->inputs(); const auto inputs = cnl->inputs();
for (const auto& input : inputs) { for (const auto& input : inputs) {
const auto& name = input.get_node()->get_friendly_name(); const auto& name = input.get_node()->get_friendly_name();
if (!contains(supported, name)) { if (!InferenceEngine::details::contains(supported, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -556,7 +556,7 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
const auto outputs = cnl->outputs(); const auto outputs = cnl->outputs();
for (const auto& output : outputs) { for (const auto& output : outputs) {
const auto& name = output.get_node()->get_friendly_name(); const auto& name = output.get_node()->get_friendly_name();
if (!contains(supported, name)) { if (!InferenceEngine::details::contains(supported, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -567,7 +567,7 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
} }
for (auto&& node : function->get_ops()) { for (auto&& node : function->get_ops()) {
if (contains(supported, node->get_friendly_name())) { if (InferenceEngine::details::contains(supported, node->get_friendly_name())) {
for (auto&& inputNodeOutput : node->input_values()) { for (auto&& inputNodeOutput : node->input_values()) {
if (ngraph::op::is_constant(inputNodeOutput.get_node()) || ngraph::op::is_parameter(inputNodeOutput.get_node())) { if (ngraph::op::is_constant(inputNodeOutput.get_node()) || ngraph::op::is_parameter(inputNodeOutput.get_node())) {
supported.emplace(inputNodeOutput.get_node()->get_friendly_name()); supported.emplace(inputNodeOutput.get_node()->get_friendly_name());

View File

@ -55,7 +55,7 @@ public:
return make_executable_network(std::make_shared<GNAExecutableNetwork>(modelFileName, plg)); return make_executable_network(std::make_shared<GNAExecutableNetwork>(modelFileName, plg));
} }
ExecutableNetwork ImportNetwork(std::istream& networkModel, InferenceEngine::ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config) override { const std::map<std::string, std::string>& config) override {
Config updated_config(defaultConfig); Config updated_config(defaultConfig);
updated_config.UpdateFromMap(config); updated_config.UpdateFromMap(config);

View File

@ -37,9 +37,9 @@ HeteroInferRequest::HeteroInferRequest(InferenceEngine::InputsDataMap networkInp
std::tie(itBlob, emplaced) = _blobs.emplace(intermediateBlobName, Blob::Ptr{}); std::tie(itBlob, emplaced) = _blobs.emplace(intermediateBlobName, Blob::Ptr{});
if (emplaced) { if (emplaced) {
itBlob->second = r->GetBlob(blobName); itBlob->second = r->GetBlob(blobName);
if (contains(networkInputs, blobName)) { if (InferenceEngine::details::contains(networkInputs, blobName)) {
_inputs[blobName] = itBlob->second; _inputs[blobName] = itBlob->second;
} else if (contains(networkOutputs, blobName)) { } else if (InferenceEngine::details::contains(networkOutputs, blobName)) {
_outputs[blobName] = itBlob->second; _outputs[blobName] = itBlob->second;
} }
} else { } else {

View File

@ -65,7 +65,7 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Engine::LoadExeNetworkImpl(const
return std::make_shared<HeteroExecutableNetwork>(network, mergeConfigs(_config, config), this); return std::make_shared<HeteroExecutableNetwork>(network, mergeConfigs(_config, config), this);
} }
ExecutableNetwork Engine::ImportNetworkImpl(std::istream& heteroModel, const Configs& config) { InferenceEngine::ExecutableNetwork Engine::ImportNetworkImpl(std::istream& heteroModel, const Configs& config) {
if (GetCore() == nullptr) { if (GetCore() == nullptr) {
THROW_IE_EXCEPTION << "Please, work with HETERO device via InferencEngine::Core object"; THROW_IE_EXCEPTION << "Please, work with HETERO device via InferencEngine::Core object";
} }

View File

@ -37,7 +37,7 @@ public:
InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string,
InferenceEngine::Parameter> & options) const override; InferenceEngine::Parameter> & options) const override;
ExecutableNetwork ImportNetworkImpl(std::istream& heteroModel, const Configs& config) override; InferenceEngine::ExecutableNetwork ImportNetworkImpl(std::istream& heteroModel, const Configs& config) override;
DeviceMetaInformationMap GetDevicePlugins(const std::string& targetFallback, DeviceMetaInformationMap GetDevicePlugins(const std::string& targetFallback,
const Configs & localConfig) const; const Configs & localConfig) const;

View File

@ -145,14 +145,14 @@ MKLDNNExecNetwork::MKLDNNExecNetwork(const InferenceEngine::ICNNNetwork &network
if (cfg.exclusiveAsyncRequests) { if (cfg.exclusiveAsyncRequests) {
// special case when all InferRequests are muxed into a single queue // special case when all InferRequests are muxed into a single queue
_taskExecutor = ExecutorManager::getInstance()->getExecutor("CPU"); _taskExecutor = InferenceEngine::ExecutorManager::getInstance()->getExecutor("CPU");
} else { } else {
auto streamsExecutorConfig = InferenceEngine::IStreamsExecutor::Config::MakeDefaultMultiThreaded(_cfg.streamExecutorConfig); auto streamsExecutorConfig = InferenceEngine::IStreamsExecutor::Config::MakeDefaultMultiThreaded(_cfg.streamExecutorConfig);
streamsExecutorConfig._name = "CPUStreamsExecutor"; streamsExecutorConfig._name = "CPUStreamsExecutor";
_taskExecutor = ExecutorManager::getInstance()->getIdleCPUStreamsExecutor(streamsExecutorConfig); _taskExecutor = InferenceEngine::ExecutorManager::getInstance()->getIdleCPUStreamsExecutor(streamsExecutorConfig);
} }
if (0 != cfg.streamExecutorConfig._streams) { if (0 != cfg.streamExecutorConfig._streams) {
_callbackExecutor = ExecutorManager::getInstance()->getIdleCPUStreamsExecutor( _callbackExecutor = InferenceEngine::ExecutorManager::getInstance()->getIdleCPUStreamsExecutor(
IStreamsExecutor::Config{"CPUCallbackExecutor", 1, 0, IStreamsExecutor::ThreadBindingType::NONE}); IStreamsExecutor::Config{"CPUCallbackExecutor", 1, 0, IStreamsExecutor::ThreadBindingType::NONE});
} else { } else {
_callbackExecutor = _taskExecutor; _callbackExecutor = _taskExecutor;

View File

@ -259,7 +259,9 @@ static void Transformation(ICNNNetwork::Ptr& clonedNetwork, const Config& conf)
// WA: after conversion to CNNNetwork user precision can redefine input/output precisions // WA: after conversion to CNNNetwork user precision can redefine input/output precisions
// so we need to apply additional precision conversion but only for inputs and outputs // so we need to apply additional precision conversion but only for inputs and outputs
for (auto & precision : convert_precision_list) { for (auto & precision : convert_precision_list) {
NetPass::ConvertIOPrecision(*clonedNetwork, convertPrecision(precision.first), convertPrecision(precision.second)); NetPass::ConvertIOPrecision(*clonedNetwork,
InferenceEngine::details::convertPrecision(precision.first),
InferenceEngine::details::convertPrecision(precision.second));
} }
} }
@ -450,7 +452,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
return true; return true;
} (); } ();
for (auto&& fusedLayerName : ngraph::getFusedNamesVector((*itLayer)->getNode())) { for (auto&& fusedLayerName : ngraph::getFusedNamesVector((*itLayer)->getNode())) {
if (contains(originalOps, fusedLayerName)) { if (InferenceEngine::details::contains(originalOps, fusedLayerName)) {
if (layerIsSupported) { if (layerIsSupported) {
supported.emplace(fusedLayerName); supported.emplace(fusedLayerName);
} else { } else {
@ -461,7 +463,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
} }
for (auto&& node : function->get_ops()) { for (auto&& node : function->get_ops()) {
if (!contains(unsupported, node->get_friendly_name())) { if (!InferenceEngine::details::contains(unsupported, node->get_friendly_name())) {
for (auto&& inputNodeOutput : node->input_values()) { for (auto&& inputNodeOutput : node->input_values()) {
if (ngraph::op::is_constant(inputNodeOutput.get_node())) { if (ngraph::op::is_constant(inputNodeOutput.get_node())) {
supported.emplace(inputNodeOutput.get_node()->get_friendly_name()); supported.emplace(inputNodeOutput.get_node()->get_friendly_name());
@ -478,7 +480,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
} }
for (auto&& layerName : supported) { for (auto&& layerName : supported) {
if (!contains(unsupported, layerName)) { if (!InferenceEngine::details::contains(unsupported, layerName)) {
res.supportedLayersMap.emplace(layerName, GetName()); res.supportedLayersMap.emplace(layerName, GetName());
} }
} }

View File

@ -99,8 +99,8 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
return metaDevices; return metaDevices;
} }
Parameter MultiDeviceInferencePlugin::GetConfig(const std::string& name, InferenceEngine::Parameter MultiDeviceInferencePlugin::GetConfig(const std::string& name,
const std::map<std::string, Parameter> & options) const { const std::map<std::string, InferenceEngine::Parameter> & options) const {
if (name == MULTI_CONFIG_KEY(DEVICE_PRIORITIES)) { if (name == MULTI_CONFIG_KEY(DEVICE_PRIORITIES)) {
auto it = _config.find(MULTI_CONFIG_KEY(DEVICE_PRIORITIES)); auto it = _config.find(MULTI_CONFIG_KEY(DEVICE_PRIORITIES));
if (it == _config.end()) { if (it == _config.end()) {
@ -219,7 +219,7 @@ QueryNetworkResult MultiDeviceInferencePlugin::QueryNetwork(const CNNNetwork&
} }
supportedLayers = supportedLayers.empty() supportedLayers = supportedLayers.empty()
? deviceSupportedLayers : (deviceSupportedLayers.empty() ? deviceSupportedLayers : (deviceSupportedLayers.empty()
? supportedLayers : Intersection(supportedLayers, deviceSupportedLayers)); ? supportedLayers : InferenceEngine::details::Intersection(supportedLayers, deviceSupportedLayers));
} }
for (auto&& supportedLayer : supportedLayers) { for (auto&& supportedLayer : supportedLayers) {
queryResult.supportedLayersMap[supportedLayer] = GetName(); queryResult.supportedLayersMap[supportedLayer] = GetName();

View File

@ -24,7 +24,7 @@ public:
const std::map<std::string, std::string>& config) override; const std::map<std::string, std::string>& config) override;
void SetConfig(const std::map<std::string, std::string>& config) override; void SetConfig(const std::map<std::string, std::string>& config) override;
Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter> & options) const override; InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & options) const override;
InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network, InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) const override; const std::map<std::string, std::string>& config) const override;
InferenceEngine::Parameter GetMetric(const std::string& name, InferenceEngine::Parameter GetMetric(const std::string& name,

View File

@ -21,10 +21,6 @@
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp" #include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "cpp_interfaces/plugin_itt.hpp" #include "cpp_interfaces/plugin_itt.hpp"
using namespace InferenceEngine;
using namespace InferenceEngine::details;
namespace InferenceEngine { namespace InferenceEngine {
namespace { namespace {

View File

@ -287,9 +287,9 @@ public:
try { \ try { \
plugin = new PluginType(__VA_ARGS__); \ plugin = new PluginType(__VA_ARGS__); \
plugin->SetVersion(version); \ plugin->SetVersion(version); \
return OK; \ return InferenceEngine::OK; \
} \ } \
catch (std::exception &ex) { \ catch (std::exception &ex) { \
return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \ return InferenceEngine::DescriptionBuffer(InferenceEngine::GENERAL_ERROR, resp) << ex.what(); \
} \ } \
} }

View File

@ -93,7 +93,7 @@ static Set Intersection(const Set& lhs, const Set& rhs) {
const auto& minSizeSet = (lhs.size() < rhs.size()) ? lhs : rhs; const auto& minSizeSet = (lhs.size() < rhs.size()) ? lhs : rhs;
const auto& maxSizeSet = (lhs.size() >= rhs.size()) ? lhs : rhs; const auto& maxSizeSet = (lhs.size() >= rhs.size()) ? lhs : rhs;
for (auto&& val : minSizeSet) { for (auto&& val : minSizeSet) {
if (contains(maxSizeSet, val)) { if (InferenceEngine::details::contains(maxSizeSet, val)) {
result.insert(val); result.insert(val);
} }
} }
@ -112,7 +112,7 @@ static bool Intersects(const Set& lhs, const Set& rhs) {
const auto& minSizeSet = (lhs.size() < rhs.size()) ? lhs : rhs; const auto& minSizeSet = (lhs.size() < rhs.size()) ? lhs : rhs;
const auto& maxSizeSet = (lhs.size() >= rhs.size()) ? lhs : rhs; const auto& maxSizeSet = (lhs.size() >= rhs.size()) ? lhs : rhs;
for (auto&& val : minSizeSet) { for (auto&& val : minSizeSet) {
if (contains(maxSizeSet, val)) { if (InferenceEngine::details::contains(maxSizeSet, val)) {
return true; return true;
} }
} }

View File

@ -100,7 +100,7 @@ QueryNetworkResult Engine::QueryNetwork(
ngraph::NodeVector splits; ngraph::NodeVector splits;
ngraph::NodeVector concats; ngraph::NodeVector concats;
const auto isLayerSupported = [this, &splitNames, &concatNames, &concats, &splits](CNNNetworkIterator& layer) -> bool { const auto isLayerSupported = [this, &splitNames, &concatNames, &concats, &splits](InferenceEngine::details::CNNNetworkIterator& layer) -> bool {
auto node = (*layer)->getNode(); auto node = (*layer)->getNode();
if (std::dynamic_pointer_cast<const ::ngraph::opset3::Split>(node) != nullptr) { if (std::dynamic_pointer_cast<const ::ngraph::opset3::Split>(node) != nullptr) {
splitNames.emplace(node->get_friendly_name()); splitNames.emplace(node->get_friendly_name());
@ -117,8 +117,8 @@ QueryNetworkResult Engine::QueryNetwork(
} }
}; };
for (CNNNetworkIterator itLayer{convertedNetwork.get()}; for (InferenceEngine::details::CNNNetworkIterator itLayer{convertedNetwork.get()};
itLayer != CNNNetworkIterator(); itLayer != InferenceEngine::details::CNNNetworkIterator();
itLayer++) { itLayer++) {
const auto fusedNode = (*itLayer)->getNode(); const auto fusedNode = (*itLayer)->getNode();
if (fusedNode == nullptr) { if (fusedNode == nullptr) {
@ -126,7 +126,7 @@ QueryNetworkResult Engine::QueryNetwork(
} }
for (auto& fusedLayerName : ngraph::getFusedNamesVector(fusedNode)) { for (auto& fusedLayerName : ngraph::getFusedNamesVector(fusedNode)) {
if (contains(originalOps, fusedLayerName)) { if (InferenceEngine::details::contains(originalOps, fusedLayerName)) {
if (isLayerSupported(itLayer)) { if (isLayerSupported(itLayer)) {
supported.emplace(fusedLayerName); supported.emplace(fusedLayerName);
} else { } else {
@ -137,7 +137,7 @@ QueryNetworkResult Engine::QueryNetwork(
} }
for (const auto& layerName : supported) { for (const auto& layerName : supported) {
if (contains(unsupported, layerName)) { if (InferenceEngine::details::contains(unsupported, layerName)) {
supported.erase(layerName); supported.erase(layerName);
} }
} }
@ -149,13 +149,13 @@ QueryNetworkResult Engine::QueryNetwork(
const auto inputs = split->inputs(); const auto inputs = split->inputs();
for (const auto& input : inputs) { for (const auto& input : inputs) {
const auto& parentName = input.get_source_output().get_node()->get_friendly_name(); const auto& parentName = input.get_source_output().get_node()->get_friendly_name();
if (contains(supported, parentName) && if (InferenceEngine::details::contains(supported, parentName) &&
contains(splitNames, parentName)) { InferenceEngine::details::contains(splitNames, parentName)) {
markParentSplitAsUnsupported(input.get_source_output().get_node_shared_ptr()); markParentSplitAsUnsupported(input.get_source_output().get_node_shared_ptr());
} }
} }
const auto& name = split->get_friendly_name(); const auto& name = split->get_friendly_name();
if (contains(supported, name)) { if (InferenceEngine::details::contains(supported, name)) {
supported.erase(name); supported.erase(name);
} }
}; };
@ -167,9 +167,9 @@ QueryNetworkResult Engine::QueryNetwork(
for (const auto& output : outputs) { for (const auto& output : outputs) {
for (const auto& consumer : output.get_target_inputs()) { for (const auto& consumer : output.get_target_inputs()) {
const auto& name = consumer.get_node()->get_friendly_name(); const auto& name = consumer.get_node()->get_friendly_name();
if (!contains(supported, name) && if (!InferenceEngine::details::contains(supported, name) &&
!contains(concatNames, name) && !InferenceEngine::details::contains(concatNames, name) &&
!contains(splitNames, name)) { !InferenceEngine::details::contains(splitNames, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -189,8 +189,8 @@ QueryNetworkResult Engine::QueryNetwork(
const auto inputs = concat->inputs(); const auto inputs = concat->inputs();
for (const auto& input : inputs) { for (const auto& input : inputs) {
const auto& name = input.get_source_output().get_node()->get_friendly_name(); const auto& name = input.get_source_output().get_node()->get_friendly_name();
if (!contains(supported, name) && if (!InferenceEngine::details::contains(supported, name) &&
!contains(concatNames, name)) { !InferenceEngine::details::contains(concatNames, name)) {
is_supported = false; is_supported = false;
break; break;
} }
@ -201,7 +201,7 @@ QueryNetworkResult Engine::QueryNetwork(
} }
for (const auto& node : function->get_ops()) { for (const auto& node : function->get_ops()) {
if (contains(supported, node->get_friendly_name())) { if (InferenceEngine::details::contains(supported, node->get_friendly_name())) {
for (const auto& inputNodeOutput : node->input_values()) { for (const auto& inputNodeOutput : node->input_values()) {
if (ngraph::op::is_constant(inputNodeOutput.get_node()) || ngraph::op::is_parameter(inputNodeOutput.get_node())) { if (ngraph::op::is_constant(inputNodeOutput.get_node()) || ngraph::op::is_parameter(inputNodeOutput.get_node())) {
supported.emplace(inputNodeOutput.get_node()->get_friendly_name()); supported.emplace(inputNodeOutput.get_node()->get_friendly_name());

View File

@ -32,7 +32,7 @@ MockPlugin::LoadNetwork(const CNNNetwork &network,
} }
} }
ExecutableNetworkInternal::Ptr InferenceEngine::ExecutableNetworkInternal::Ptr
MockPlugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network, MockPlugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) { const std::map<std::string, std::string>& config) {
return {}; return {};

View File

@ -20,7 +20,7 @@ public:
InferenceEngine::ExecutableNetwork InferenceEngine::ExecutableNetwork
LoadNetwork(const InferenceEngine::CNNNetwork &network, LoadNetwork(const InferenceEngine::CNNNetwork &network,
const std::map<std::string, std::string> &config) override; const std::map<std::string, std::string> &config) override;
ExecutableNetworkInternal::Ptr InferenceEngine::ExecutableNetworkInternal::Ptr
LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network, LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) override; const std::map<std::string, std::string>& config) override;

View File

@ -100,7 +100,7 @@ TEST(UtilTests, cloneLayers) {
namespace { namespace {
IE::CNNLayerPtr getLayer(const IE::details::CNNNetworkImplPtr n, IE::CNNLayerPtr getLayer(const IE::details::CNNNetworkImplPtr n,
const char* name) { const char* name) {
if (contains(n->allLayers(), name)) { if (InferenceEngine::details::contains(n->allLayers(), name)) {
return n->allLayers().find(name)->second; return n->allLayers().find(name)->second;
} }
return nullptr; return nullptr;