Fixed some leftovers for 2.0 dev api (#16421)
* Fixed some leftovers for 2.0 dev api * Fixed build issue
This commit is contained in:
parent
7d56c75d65
commit
ec0a1e58d1
@ -209,22 +209,4 @@ public:
|
||||
virtual RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(DeviceIDParser) {
|
||||
std::string deviceName;
|
||||
std::string deviceID;
|
||||
|
||||
public:
|
||||
explicit DeviceIDParser(const std::string& deviceNameWithID);
|
||||
|
||||
std::string getDeviceID() const;
|
||||
std::string getDeviceName() const;
|
||||
|
||||
static std::vector<std::string> getHeteroDevices(std::string fallbackDevice);
|
||||
static std::vector<std::string> getMultiDevices(std::string devicesList);
|
||||
static std::string getBatchDevice(std::string devicesList);
|
||||
};
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
36
src/inference/dev_api/openvino/runtime/device_id_parser.hpp
Normal file
36
src/inference/dev_api/openvino/runtime/device_id_parser.hpp
Normal file
@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
/**
|
||||
* @brief Provides parser for device name
|
||||
* @file openvino/runtime/device_id_paeser.hpp
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "openvino/runtime/common.hpp"
|
||||
|
||||
namespace ov {
|
||||
|
||||
/**
|
||||
* @brief Class parses device name and id
|
||||
*/
|
||||
class OPENVINO_RUNTIME_API DeviceIDParser {
|
||||
std::string m_device_name;
|
||||
std::string m_device_id;
|
||||
|
||||
public:
|
||||
explicit DeviceIDParser(const std::string& device_name_with_id);
|
||||
|
||||
const std::string& get_device_id() const;
|
||||
const std::string& get_device_name() const;
|
||||
|
||||
static std::vector<std::string> get_hetero_devices(const std::string& fallbackDevice);
|
||||
static std::vector<std::string> get_multi_devices(const std::string& devicesList);
|
||||
static std::string get_batch_device(const std::string& devicesList);
|
||||
};
|
||||
|
||||
} // namespace ov
|
@ -20,6 +20,7 @@
|
||||
#include "openvino/runtime/icore.hpp"
|
||||
#include "openvino/runtime/iremote_context.hpp"
|
||||
#include "openvino/runtime/threading/executor_manager.hpp"
|
||||
#include "openvino/util/pp.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
@ -256,7 +257,11 @@ OPENVINO_RUNTIME_API std::unordered_set<std::string> get_supported_nodes(
|
||||
std::function<void(std::shared_ptr<ov::Model>&)> transform,
|
||||
std::function<bool(const std::shared_ptr<ov::Node>)> is_node_supported);
|
||||
|
||||
} // namespace ov
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
using CreatePluginFunc = void(std::shared_ptr<::ov::IPlugin>&);
|
||||
|
||||
/**
|
||||
* @def OV_CREATE_PLUGIN
|
||||
* @brief Defines a name of a function creating plugin instance
|
||||
@ -266,6 +271,13 @@ OPENVINO_RUNTIME_API std::unordered_set<std::string> get_supported_nodes(
|
||||
# define OV_CREATE_PLUGIN CreatePluginEngine
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
constexpr static const auto create_plugin_function = OV_PP_TOSTRING(OV_CREATE_PLUGIN);
|
||||
|
||||
} // namespace ov
|
||||
|
||||
/**
|
||||
* @def OV_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version)
|
||||
* @brief Defines the exported `OV_CREATE_PLUGIN` function which is used to create a plugin instance
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "dev/converter_utils.hpp"
|
||||
#include "dev/core_impl.hpp"
|
||||
#include "ie_itt.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "so_extension.hpp"
|
||||
|
||||
#ifdef OPENVINO_STATIC_LIBRARY
|
||||
@ -252,8 +253,8 @@ void Core::register_plugin(const std::string& plugin, const std::string& device_
|
||||
|
||||
void Core::unload_plugin(const std::string& device_name) {
|
||||
OV_CORE_CALL_STATEMENT({
|
||||
ie::DeviceIDParser parser(device_name);
|
||||
std::string devName = parser.getDeviceName();
|
||||
ov::DeviceIDParser parser(device_name);
|
||||
std::string devName = parser.get_device_name();
|
||||
|
||||
_impl->unload_plugin(devName);
|
||||
});
|
||||
|
@ -28,6 +28,7 @@
|
||||
#include "openvino/core/preprocess/pre_post_process.hpp"
|
||||
#include "openvino/core/version.hpp"
|
||||
#include "openvino/pass/manager.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "openvino/runtime/icompiled_model.hpp"
|
||||
#include "openvino/runtime/itensor.hpp"
|
||||
#include "openvino/runtime/remote_context.hpp"
|
||||
@ -276,9 +277,9 @@ ov::Parsed ov::parseDeviceNameIntoConfig(const std::string& deviceName, const An
|
||||
updated_device_name = deviceName.substr(0, pos);
|
||||
parsed_device_priority = deviceName.substr(pos + 1);
|
||||
} else {
|
||||
InferenceEngine::DeviceIDParser parser(deviceName);
|
||||
updated_device_name = parser.getDeviceName();
|
||||
parsed_device_priority = parser.getDeviceID();
|
||||
ov::DeviceIDParser parser(deviceName);
|
||||
updated_device_name = parser.get_device_name();
|
||||
parsed_device_priority = parser.get_device_id();
|
||||
}
|
||||
|
||||
// checks and updates device priority
|
||||
@ -416,8 +417,7 @@ ov::Plugin ov::CoreImpl::get_plugin(const std::string& pluginName) const {
|
||||
} else {
|
||||
so = ov::util::load_shared_object(desc.libraryLocation.c_str());
|
||||
std::shared_ptr<ov::IPlugin> plugin_impl;
|
||||
reinterpret_cast<InferenceEngine::CreatePluginEngineFunc*>(
|
||||
ov::util::get_symbol(so, InferenceEngine::create_plugin_function))(plugin_impl);
|
||||
reinterpret_cast<ov::CreatePluginFunc*>(ov::util::get_symbol(so, ov::create_plugin_function))(plugin_impl);
|
||||
plugin = Plugin{plugin_impl, so};
|
||||
}
|
||||
|
||||
@ -425,8 +425,8 @@ ov::Plugin ov::CoreImpl::get_plugin(const std::string& pluginName) const {
|
||||
plugin.set_name(deviceName);
|
||||
|
||||
// Set Core class reference to plugins
|
||||
std::weak_ptr<InferenceEngine::ICore> mutableCore =
|
||||
std::const_pointer_cast<InferenceEngine::ICore>(shared_from_this());
|
||||
std::weak_ptr<ov::ICore> mutableCore =
|
||||
std::const_pointer_cast<ov::ICore>(std::dynamic_pointer_cast<const ov::ICore>(shared_from_this()));
|
||||
plugin.set_core(mutableCore);
|
||||
}
|
||||
|
||||
@ -472,9 +472,9 @@ ov::Plugin ov::CoreImpl::get_plugin(const std::string& pluginName) const {
|
||||
// for each such .0, .1, .# device to make sure plugin can handle different settings for different
|
||||
// device IDs
|
||||
for (auto pluginDesc : pluginRegistry) {
|
||||
InferenceEngine::DeviceIDParser parser(pluginDesc.first);
|
||||
if (pluginDesc.first.find(deviceName) != std::string::npos && !parser.getDeviceID().empty()) {
|
||||
pluginDesc.second.defaultConfig[deviceKey] = parser.getDeviceID();
|
||||
ov::DeviceIDParser parser(pluginDesc.first);
|
||||
if (pluginDesc.first.find(deviceName) != std::string::npos && !parser.get_device_id().empty()) {
|
||||
pluginDesc.second.defaultConfig[deviceKey] = parser.get_device_id();
|
||||
plugin.set_property(pluginDesc.second.defaultConfig);
|
||||
}
|
||||
}
|
||||
@ -795,7 +795,7 @@ void ov::CoreImpl::apply_auto_batching(const std::shared_ptr<const ov::Model>& m
|
||||
if (pos == std::string::npos)
|
||||
return; // BATCH device is already configured via the config
|
||||
deviceNameWithBatchSize = deviceName.substr(pos + 1);
|
||||
deviceNameWithoutBatch = InferenceEngine::DeviceIDParser::getBatchDevice(deviceNameWithBatchSize);
|
||||
deviceNameWithoutBatch = ov::DeviceIDParser::get_batch_device(deviceNameWithBatchSize);
|
||||
// when user sets the BATCH device explicitly, we may check the dims less strictly
|
||||
// as the result is being checked by the user
|
||||
strictly_check_dims = false;
|
||||
@ -982,8 +982,8 @@ void ov::CoreImpl::set_property_for_device(const ov::AnyMap& configMap, const st
|
||||
return;
|
||||
}
|
||||
|
||||
InferenceEngine::DeviceIDParser parser(deviceName);
|
||||
std::string clearDeviceName = parser.getDeviceName();
|
||||
ov::DeviceIDParser parser(deviceName);
|
||||
std::string clearDeviceName = parser.get_device_name();
|
||||
|
||||
std::vector<std::pair<std::string, ov::Plugin>> created_plugins;
|
||||
{
|
||||
@ -1065,8 +1065,8 @@ void ov::CoreImpl::set_property_for_device(const ov::AnyMap& configMap, const st
|
||||
const std::string deviceKey =
|
||||
supportsConfigDeviceID ? CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID) : CONFIG_KEY(DEVICE_ID);
|
||||
|
||||
if (!parser.getDeviceID().empty()) {
|
||||
configCopy[deviceKey] = parser.getDeviceID();
|
||||
if (!parser.get_device_id().empty()) {
|
||||
configCopy[deviceKey] = parser.get_device_id();
|
||||
}
|
||||
}
|
||||
plugin.second.set_property(configCopy);
|
||||
|
@ -18,6 +18,7 @@
|
||||
#include "ngraph/op/constant.hpp"
|
||||
#include "ngraph/pass/constant_folding.hpp"
|
||||
#include "openvino/itt.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "openvino/runtime/icompiled_model.hpp"
|
||||
#include "openvino/runtime/itensor.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
@ -231,25 +232,25 @@ std::map<std::string, InferenceEngine::Version> ov::CoreImpl::GetVersions(const
|
||||
if (deviceName.find("HETERO") == 0) {
|
||||
auto pos = deviceName.find_first_of(":");
|
||||
if (pos != std::string::npos) {
|
||||
deviceNames = InferenceEngine::DeviceIDParser::getHeteroDevices(deviceName.substr(pos + 1));
|
||||
deviceNames = ov::DeviceIDParser::get_hetero_devices(deviceName.substr(pos + 1));
|
||||
}
|
||||
deviceNames.push_back("HETERO");
|
||||
} else if (deviceName.find("MULTI") == 0) {
|
||||
auto pos = deviceName.find_first_of(":");
|
||||
if (pos != std::string::npos) {
|
||||
deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1));
|
||||
deviceNames = ov::DeviceIDParser::get_multi_devices(deviceName.substr(pos + 1));
|
||||
}
|
||||
deviceNames.push_back("MULTI");
|
||||
} else if (deviceName.find("AUTO") == 0) {
|
||||
auto pos = deviceName.find_first_of(":");
|
||||
if (pos != std::string::npos) {
|
||||
deviceNames = InferenceEngine::DeviceIDParser::getMultiDevices(deviceName.substr(pos + 1));
|
||||
deviceNames = ov::DeviceIDParser::get_multi_devices(deviceName.substr(pos + 1));
|
||||
}
|
||||
deviceNames.emplace_back("AUTO");
|
||||
} else if (deviceName.find("BATCH") == 0) {
|
||||
auto pos = deviceName.find_first_of(":");
|
||||
if (pos != std::string::npos) {
|
||||
deviceNames = {InferenceEngine::DeviceIDParser::getBatchDevice(deviceName.substr(pos + 1))};
|
||||
deviceNames = {ov::DeviceIDParser::get_batch_device(deviceName.substr(pos + 1))};
|
||||
}
|
||||
deviceNames.push_back("BATCH");
|
||||
} else {
|
||||
@ -258,8 +259,8 @@ std::map<std::string, InferenceEngine::Version> ov::CoreImpl::GetVersions(const
|
||||
}
|
||||
|
||||
for (auto&& deviceName_ : deviceNames) {
|
||||
ie::DeviceIDParser parser(deviceName_);
|
||||
std::string deviceNameLocal = parser.getDeviceName();
|
||||
ov::DeviceIDParser parser(deviceName_);
|
||||
std::string deviceNameLocal = parser.get_device_name();
|
||||
|
||||
ov::Plugin cppPlugin = get_plugin(deviceNameLocal);
|
||||
|
||||
|
@ -2,47 +2,51 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ie_icore.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
#include <set>
|
||||
|
||||
namespace ov {
|
||||
|
||||
DeviceIDParser::DeviceIDParser(const std::string& deviceNameWithID) {
|
||||
deviceName = deviceNameWithID;
|
||||
m_device_name = deviceNameWithID;
|
||||
|
||||
auto pos = deviceName.find('.');
|
||||
auto pos = m_device_name.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
deviceName = deviceNameWithID.substr(0, pos);
|
||||
deviceID = deviceNameWithID.substr(pos + 1, deviceNameWithID.size());
|
||||
m_device_name = deviceNameWithID.substr(0, pos);
|
||||
m_device_id = deviceNameWithID.substr(pos + 1, deviceNameWithID.size());
|
||||
}
|
||||
}
|
||||
|
||||
std::string DeviceIDParser::getDeviceID() const {
|
||||
return deviceID;
|
||||
const std::string& DeviceIDParser::get_device_id() const {
|
||||
return m_device_id;
|
||||
}
|
||||
|
||||
std::string DeviceIDParser::getDeviceName() const {
|
||||
return deviceName;
|
||||
const std::string& DeviceIDParser::get_device_name() const {
|
||||
return m_device_name;
|
||||
}
|
||||
|
||||
std::vector<std::string> DeviceIDParser::getHeteroDevices(std::string fallbackDevice) {
|
||||
std::vector<std::string> DeviceIDParser::get_hetero_devices(const std::string& fallbackDevice) {
|
||||
std::vector<std::string> deviceNames;
|
||||
std::string fallback_dev = fallbackDevice;
|
||||
|
||||
std::string cdevice;
|
||||
char delimiter = ',';
|
||||
size_t pos = 0;
|
||||
|
||||
while ((pos = fallbackDevice.find(delimiter)) != std::string::npos) {
|
||||
deviceNames.push_back(fallbackDevice.substr(0, pos));
|
||||
fallbackDevice.erase(0, pos + 1);
|
||||
while ((pos = fallback_dev.find(delimiter)) != std::string::npos) {
|
||||
deviceNames.push_back(fallback_dev.substr(0, pos));
|
||||
fallback_dev.erase(0, pos + 1);
|
||||
}
|
||||
|
||||
if (!fallbackDevice.empty())
|
||||
deviceNames.push_back(fallbackDevice);
|
||||
if (!fallback_dev.empty())
|
||||
deviceNames.push_back(fallback_dev);
|
||||
|
||||
return deviceNames;
|
||||
}
|
||||
|
||||
std::vector<std::string> DeviceIDParser::getMultiDevices(std::string devicesList) {
|
||||
std::vector<std::string> DeviceIDParser::get_multi_devices(const std::string& devicesList) {
|
||||
std::string dev_list = devicesList;
|
||||
std::set<std::string> deviceNames;
|
||||
auto trim_request_info = [](const std::string& device_with_requests) {
|
||||
auto opening_bracket = device_with_requests.find_first_of('(');
|
||||
@ -53,37 +57,37 @@ std::vector<std::string> DeviceIDParser::getMultiDevices(std::string devicesList
|
||||
size_t pos = 0;
|
||||
// in addition to the list of devices, every device can have a #requests in the brackets e.g. "CPU(100)"
|
||||
// we skip the #requests info here
|
||||
while ((pos = devicesList.find(delimiter)) != std::string::npos) {
|
||||
auto d = devicesList.substr(0, pos);
|
||||
while ((pos = dev_list.find(delimiter)) != std::string::npos) {
|
||||
auto d = dev_list.substr(0, pos);
|
||||
if (d.find("BATCH") == 0) {
|
||||
deviceNames.insert("BATCH");
|
||||
auto p = d.find_first_of(":");
|
||||
if (p != std::string::npos)
|
||||
deviceNames.insert(DeviceIDParser::getBatchDevice(d.substr(p + 1)));
|
||||
deviceNames.insert(DeviceIDParser::get_batch_device(d.substr(p + 1)));
|
||||
} else {
|
||||
deviceNames.insert(trim_request_info(d));
|
||||
}
|
||||
devicesList.erase(0, pos + 1);
|
||||
dev_list.erase(0, pos + 1);
|
||||
}
|
||||
|
||||
if (!devicesList.empty()) {
|
||||
if (devicesList.find("BATCH") == 0) {
|
||||
if (!dev_list.empty()) {
|
||||
if (dev_list.find("BATCH") == 0) {
|
||||
deviceNames.insert("BATCH");
|
||||
auto p = devicesList.find_first_of(":");
|
||||
auto p = dev_list.find_first_of(":");
|
||||
if (p != std::string::npos)
|
||||
deviceNames.insert(DeviceIDParser::getBatchDevice(devicesList.substr(p + 1)));
|
||||
deviceNames.insert(DeviceIDParser::get_batch_device(dev_list.substr(p + 1)));
|
||||
} else {
|
||||
deviceNames.insert(trim_request_info(devicesList));
|
||||
deviceNames.insert(trim_request_info(dev_list));
|
||||
}
|
||||
}
|
||||
return std::vector<std::string>(deviceNames.begin(), deviceNames.end());
|
||||
}
|
||||
|
||||
std::string DeviceIDParser::getBatchDevice(std::string device) {
|
||||
std::string DeviceIDParser::get_batch_device(const std::string& device) {
|
||||
auto trim_request_info = [](const std::string& device_with_requests) {
|
||||
auto opening_bracket = device_with_requests.find_first_of('(');
|
||||
return device_with_requests.substr(0, opening_bracket);
|
||||
};
|
||||
return trim_request_info(device);
|
||||
}
|
||||
} // namespace InferenceEngine
|
||||
} // namespace ov
|
||||
|
@ -40,6 +40,7 @@
|
||||
#include "openvino/op/result.hpp"
|
||||
#include "openvino/runtime/compiled_model.hpp"
|
||||
#include "openvino/runtime/core.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
#include "openvino/util/shared_object.hpp"
|
||||
@ -251,8 +252,8 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel,
|
||||
}
|
||||
|
||||
std::string deviceName_ = context->getDeviceName();
|
||||
DeviceIDParser device(deviceName_);
|
||||
std::string deviceName = device.getDeviceName();
|
||||
ov::DeviceIDParser device(deviceName_);
|
||||
std::string deviceName = device.get_device_name();
|
||||
|
||||
auto parsed = ov::parseDeviceNameIntoConfig(deviceName, ov::any_copy(config));
|
||||
auto exec = _impl->get_plugin(deviceName)
|
||||
@ -350,8 +351,8 @@ void Core::RegisterPlugins(const std::string& xmlConfigFile) {
|
||||
}
|
||||
|
||||
void Core::UnregisterPlugin(const std::string& deviceName_) {
|
||||
DeviceIDParser parser(deviceName_);
|
||||
std::string deviceName = parser.getDeviceName();
|
||||
ov::DeviceIDParser parser(deviceName_);
|
||||
std::string deviceName = parser.get_device_name();
|
||||
|
||||
_impl->unload_plugin(deviceName);
|
||||
}
|
||||
|
@ -17,6 +17,7 @@
|
||||
#include <ie_performance_hints.hpp>
|
||||
#include <threading/ie_executor_manager.hpp>
|
||||
#include "openvino/runtime/auto/properties.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "plugin.hpp"
|
||||
#include <ie_algorithm.hpp>
|
||||
#include <ie_icore.hpp>
|
||||
@ -189,8 +190,8 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
|
||||
}
|
||||
}
|
||||
|
||||
DeviceIDParser parsed{deviceName};
|
||||
std::string deviceid = parsed.getDeviceID();
|
||||
ov::DeviceIDParser parsed{deviceName};
|
||||
std::string deviceid = parsed.get_device_id();
|
||||
std::vector<std::string> sameTypeDevices;
|
||||
// if AUTO:GPU case, replace GPU with GPU.0 and GPU.1
|
||||
// Disable AUTO:MYRIAD here because of below test case
|
||||
@ -212,19 +213,19 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
|
||||
}
|
||||
|
||||
for (auto&& deviceNameWithID : sameTypeDevices) {
|
||||
DeviceIDParser newParsed{deviceNameWithID};
|
||||
ov::DeviceIDParser newParsed{deviceNameWithID};
|
||||
std::string defaultDeviceID = "";
|
||||
std::string tempDeviceID = "";
|
||||
if (newParsed.getDeviceID().empty()) {
|
||||
if (newParsed.get_device_id().empty()) {
|
||||
defaultDeviceID = getDefaultDeviceID(deviceNameWithID);
|
||||
tempDeviceID = defaultDeviceID;
|
||||
} else {
|
||||
tempDeviceID = newParsed.getDeviceID();
|
||||
tempDeviceID = newParsed.get_device_id();
|
||||
}
|
||||
|
||||
std::string fullDeviceName = "";
|
||||
std::string uniqueName = "";
|
||||
if (newParsed.getDeviceName() == "GPU") {
|
||||
if (newParsed.get_device_name() == "GPU") {
|
||||
auto supportedMetrics = GetCore()->GetMetric(deviceNameWithID, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
if (std::find(supportedMetrics.begin(), supportedMetrics.end(), METRIC_KEY(FULL_DEVICE_NAME)) != supportedMetrics.end()) {
|
||||
fullDeviceName = GetCore()->GetMetric(deviceNameWithID, METRIC_KEY(FULL_DEVICE_NAME)).as<std::string>();
|
||||
@ -232,7 +233,7 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
|
||||
}
|
||||
|
||||
if (fullDeviceName.empty()) {
|
||||
uniqueName = newParsed.getDeviceName() + "_" + tempDeviceID;
|
||||
uniqueName = newParsed.get_device_name() + "_" + tempDeviceID;
|
||||
} else {
|
||||
uniqueName = fullDeviceName + "_" + tempDeviceID;
|
||||
}
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "ie_icore.hpp"
|
||||
#include "openvino/runtime/auto/properties.hpp"
|
||||
#include "log.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
@ -199,7 +200,7 @@ public:
|
||||
if (realDevName.empty()) {
|
||||
return false;
|
||||
}
|
||||
realDevName = DeviceIDParser(realDevName).getDeviceName();
|
||||
realDevName = ov::DeviceIDParser(realDevName).get_device_name();
|
||||
std::string::size_type realEndPos = 0;
|
||||
if ((realEndPos = realDevName.find('(')) != std::string::npos) {
|
||||
realDevName = realDevName.substr(0, realEndPos);
|
||||
@ -239,4 +240,4 @@ private:
|
||||
BaseValidator::Ptr device_property_validator;
|
||||
static const std::set<std::string> _availableDevices;
|
||||
};
|
||||
} // namespace MultiDevicePlugin
|
||||
} // namespace MultiDevicePlugin
|
||||
|
@ -19,6 +19,7 @@
|
||||
#include "ie_ngraph_utils.hpp"
|
||||
#include "ie_performance_hints.hpp"
|
||||
#include "openvino/pass/manager.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "openvino/runtime/intel_gpu/properties.hpp"
|
||||
#include "transformations/common_optimizations/dimension_tracking.hpp"
|
||||
#include "transformations/init_node_info.hpp"
|
||||
@ -692,8 +693,8 @@ DeviceInformation AutoBatchInferencePlugin::ParseBatchDevice(const std::string&
|
||||
DeviceInformation AutoBatchInferencePlugin::ParseMetaDevice(const std::string& devicesBatchCfg,
|
||||
const std::map<std::string, std::string>& config) const {
|
||||
auto getDeviceConfig = [&](const DeviceName& deviceWithID) {
|
||||
DeviceIDParser deviceParser(deviceWithID);
|
||||
std::string deviceName = deviceParser.getDeviceName();
|
||||
ov::DeviceIDParser deviceParser(deviceWithID);
|
||||
std::string deviceName = deviceParser.get_device_name();
|
||||
std::map<std::string, std::string> tconfig = mergeConfigs(_config, config);
|
||||
// passthrough the cache dir to core->loadnetwork when underlying device does not support cache dir
|
||||
auto deviceConfig = GetCore()->GetSupportedConfig(deviceWithID, tconfig);
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
// clang-format off
|
||||
#include "ie_metric_helpers.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "plugin.hpp"
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
@ -95,7 +96,7 @@ InferenceEngine::IExecutableNetworkInternal::Ptr Engine::ImportNetwork(
|
||||
|
||||
Engine::DeviceMetaInformationMap Engine::GetDevicePlugins(const std::string& targetFallback,
|
||||
const Configs& localConfig) const {
|
||||
auto fallbackDevices = InferenceEngine::DeviceIDParser::getHeteroDevices(targetFallback);
|
||||
auto fallbackDevices = ov::DeviceIDParser::get_hetero_devices(targetFallback);
|
||||
Engine::DeviceMetaInformationMap metaDevices;
|
||||
for (auto&& deviceName : fallbackDevices) {
|
||||
auto itPlugin = metaDevices.find(deviceName);
|
||||
@ -140,7 +141,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const Configs
|
||||
}
|
||||
|
||||
// WARNING: Here is devices with user set priority
|
||||
auto fallbackDevices = InferenceEngine::DeviceIDParser::getHeteroDevices(fallbackDevicesStr);
|
||||
auto fallbackDevices = ov::DeviceIDParser::get_hetero_devices(fallbackDevicesStr);
|
||||
|
||||
for (auto&& deviceName : fallbackDevices) {
|
||||
for (auto&& layerQueryResult : queryResults[deviceName].supportedLayersMap) {
|
||||
@ -187,17 +188,18 @@ Parameter Engine::GetMetric(const std::string& name, const std::map<std::string,
|
||||
}
|
||||
}
|
||||
std::string Engine::DeviceArchitecture(const std::string& targetFallback) const {
|
||||
auto fallbackDevices = InferenceEngine::DeviceIDParser::getHeteroDevices(targetFallback);
|
||||
auto fallbackDevices = ov::DeviceIDParser::get_hetero_devices(targetFallback);
|
||||
std::string resArch;
|
||||
for (const auto& device : fallbackDevices) {
|
||||
InferenceEngine::DeviceIDParser parser(device);
|
||||
ov::DeviceIDParser parser(device);
|
||||
|
||||
auto supportedMetricKeys =
|
||||
GetCore()->GetMetric(parser.getDeviceName(), METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
auto supportedMetricKeys = GetCore()
|
||||
->GetMetric(parser.get_device_name(), METRIC_KEY(SUPPORTED_METRICS))
|
||||
.as<std::vector<std::string>>();
|
||||
auto it = std::find(supportedMetricKeys.begin(), supportedMetricKeys.end(), METRIC_KEY(DEVICE_ARCHITECTURE));
|
||||
auto arch = (it != supportedMetricKeys.end())
|
||||
? GetCore()->GetMetric(device, METRIC_KEY(DEVICE_ARCHITECTURE)).as<std::string>()
|
||||
: parser.getDeviceName();
|
||||
: parser.get_device_name();
|
||||
resArch += " " + arch;
|
||||
}
|
||||
return resArch;
|
||||
|
@ -32,6 +32,7 @@
|
||||
#include "ie_plugin_config.hpp"
|
||||
#include "gpu/gpu_config.hpp"
|
||||
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include "ie_icore.hpp"
|
||||
|
||||
#include "dimension_tracker.hpp"
|
||||
@ -207,7 +208,7 @@ IExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const InferenceEngine
|
||||
check_inputs(_networkInputs);
|
||||
|
||||
auto context_impl = get_context_impl(context);
|
||||
auto device_id = InferenceEngine::DeviceIDParser{context_impl->get_device_name()}.getDeviceID();
|
||||
auto device_id = ov::DeviceIDParser{context_impl->get_device_name()}.get_device_id();
|
||||
|
||||
OPENVINO_ASSERT(m_configs_map.find(device_id) != m_configs_map.end(), "[GPU] LoadExeNetworkImpl: Couldn't find config for GPU with id ", device_id);
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include <thread>
|
||||
|
||||
#include "openvino/runtime/device_id_parser.hpp"
|
||||
#include <openvino/pass/serialize.hpp>
|
||||
#include <ngraph/opsets/opset.hpp>
|
||||
#include "shared_test_classes/base/layer_test_utils.hpp"
|
||||
@ -121,7 +122,7 @@ void LayerTestsCommon::QueryNetwork() {
|
||||
ASSERT_EQ(res.second, ctx->getDeviceName());
|
||||
} catch (...) {
|
||||
// otherwise, compare with originally used device name
|
||||
ASSERT_EQ(InferenceEngine::DeviceIDParser(res.second).getDeviceName(), targetDevice);
|
||||
ASSERT_EQ(ov::DeviceIDParser(res.second).get_device_name(), targetDevice);
|
||||
}
|
||||
actual.insert(res.first);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user