[GPU] Add separate config for each device (#7421)
This commit is contained in:
parent
b7ccdf4490
commit
03424849fb
@ -82,6 +82,7 @@ Options:
|
||||
-d "<device>" Optional. Specify a target device to infer on (the list of available devices is shown below). Default value is CPU.
|
||||
Use "-d HETERO:<comma-separated_devices_list>" format to specify HETERO plugin.
|
||||
Use "-d MULTI:<comma-separated_devices_list>" format to specify MULTI plugin.
|
||||
Use "-d GPU.X" format to specify device id for GPU devices.
|
||||
The application looks for a suitable plugin for the specified device.
|
||||
-l "<absolute_path>" Required for CPU custom layers. Absolute path to a shared library with the kernels implementations.
|
||||
Or
|
||||
|
@ -218,6 +218,25 @@ int main(int argc, char* argv[]) {
|
||||
else if (FLAGS_hint == "latency")
|
||||
ov_perf_hint = CONFIG_VALUE(LATENCY);
|
||||
|
||||
auto getDeviceTypeFromName = [](std::string device) -> std::string {
|
||||
return device.substr(0, device.find_first_of(".("));
|
||||
};
|
||||
|
||||
// Set default values from dumped config
|
||||
std::set<std::string> default_devices;
|
||||
for (auto& device : devices) {
|
||||
auto default_config = config.find(getDeviceTypeFromName(device));
|
||||
if (default_config != config.end()) {
|
||||
if (!config.count(device)) {
|
||||
config[device] = default_config->second;
|
||||
default_devices.emplace(default_config->first);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (auto& device : default_devices) {
|
||||
config.erase(device);
|
||||
}
|
||||
|
||||
bool perf_counts = false;
|
||||
// Update config per device according to command line parameters
|
||||
for (auto& device : devices) {
|
||||
@ -256,7 +275,7 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
// the rest are individual per-device settings (overriding the values set with perf modes)
|
||||
auto setThroughputStreams = [&]() {
|
||||
const std::string key = device + "_THROUGHPUT_STREAMS";
|
||||
const std::string key = getDeviceTypeFromName(device) + "_THROUGHPUT_STREAMS";
|
||||
if (device_nstreams.count(device)) {
|
||||
// set to user defined value
|
||||
std::vector<std::string> supported_config_keys =
|
||||
@ -279,13 +298,13 @@ int main(int argc, char* argv[]) {
|
||||
<< slog::endl;
|
||||
if (std::string::npos == device.find("MYRIAD")) // MYRIAD sets the default number of
|
||||
// streams implicitly (without _AUTO)
|
||||
device_config[key] = std::string(device + "_THROUGHPUT_AUTO");
|
||||
device_config[key] = std::string(getDeviceTypeFromName(device) + "_THROUGHPUT_AUTO");
|
||||
}
|
||||
if (device_config.count(key))
|
||||
device_nstreams[device] = device_config.at(key);
|
||||
};
|
||||
|
||||
if (device == "CPU") { // CPU supports few special performance-oriented keys
|
||||
if (device.find("CPU") != std::string::npos) { // CPU supports few special performance-oriented keys
|
||||
// limit threading for CPU portion of inference
|
||||
if (isFlagSetInCommandLine("nthreads"))
|
||||
device_config[CONFIG_KEY(CPU_THREADS_NUM)] = std::to_string(FLAGS_nthreads);
|
||||
@ -307,7 +326,7 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
// for CPU execution, more throughput-oriented execution via streams
|
||||
setThroughputStreams();
|
||||
} else if (device == ("GPU")) {
|
||||
} else if (device.find("GPU") != std::string::npos) {
|
||||
// for GPU execution, more throughput-oriented execution via streams
|
||||
setThroughputStreams();
|
||||
|
||||
@ -320,10 +339,10 @@ int main(int argc, char* argv[]) {
|
||||
<< slog::endl;
|
||||
device_config[GPU_CONFIG_KEY(PLUGIN_THROTTLE)] = "1";
|
||||
}
|
||||
} else if (device == "MYRIAD") {
|
||||
} else if (device.find("MYRIAD") != std::string::npos) {
|
||||
device_config[CONFIG_KEY(LOG_LEVEL)] = CONFIG_VALUE(LOG_WARNING);
|
||||
setThroughputStreams();
|
||||
} else if (device == "GNA") {
|
||||
} else if (device.find("GNA") != std::string::npos) {
|
||||
if (FLAGS_qb == 8)
|
||||
device_config[GNA_CONFIG_KEY(PRECISION)] = "I8";
|
||||
else
|
||||
@ -525,7 +544,7 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
// Update number of streams
|
||||
for (auto&& ds : device_nstreams) {
|
||||
const std::string key = ds.first + "_THROUGHPUT_STREAMS";
|
||||
const std::string key = getDeviceTypeFromName(ds.first) + "_THROUGHPUT_STREAMS";
|
||||
device_nstreams[ds.first] = ie.GetConfig(ds.first, key).as<std::string>();
|
||||
}
|
||||
|
||||
|
@ -113,8 +113,6 @@ std::vector<std::string> parseDevices(const std::string& device_string) {
|
||||
if ((comma_separated_devices == "MULTI") || (comma_separated_devices == "HETERO"))
|
||||
return std::vector<std::string>();
|
||||
auto devices = split(comma_separated_devices, ',');
|
||||
for (auto& device : devices)
|
||||
device = device.substr(0, device.find_first_of(".("));
|
||||
return devices;
|
||||
}
|
||||
|
||||
@ -221,11 +219,25 @@ std::map<std::string, std::vector<float>> parseScaleOrMean(const std::string& sc
|
||||
|
||||
#ifdef USE_OPENCV
|
||||
void dump_config(const std::string& filename, const std::map<std::string, std::map<std::string, std::string>>& config) {
|
||||
auto plugin_to_opencv_format = [](const std::string& str) -> std::string {
|
||||
if (str.find("_") != std::string::npos) {
|
||||
slog::warn
|
||||
<< "Device name contains \"_\" and will be changed during loading of configuration due to limitations."
|
||||
"This configuration file could not be loaded correctly."
|
||||
<< slog::endl;
|
||||
}
|
||||
std::string new_str(str);
|
||||
auto pos = new_str.find(".");
|
||||
if (pos != std::string::npos) {
|
||||
new_str.replace(pos, 1, "_");
|
||||
}
|
||||
return new_str;
|
||||
};
|
||||
cv::FileStorage fs(filename, cv::FileStorage::WRITE);
|
||||
if (!fs.isOpened())
|
||||
throw std::runtime_error("Error: Can't open config file : " + filename);
|
||||
for (auto device_it = config.begin(); device_it != config.end(); ++device_it) {
|
||||
fs << device_it->first << "{:";
|
||||
fs << plugin_to_opencv_format(device_it->first) << "{:";
|
||||
for (auto param_it = device_it->second.begin(); param_it != device_it->second.end(); ++param_it)
|
||||
fs << param_it->first << param_it->second;
|
||||
fs << "}";
|
||||
@ -234,6 +246,14 @@ void dump_config(const std::string& filename, const std::map<std::string, std::m
|
||||
}
|
||||
|
||||
void load_config(const std::string& filename, std::map<std::string, std::map<std::string, std::string>>& config) {
|
||||
auto opencv_to_plugin_format = [](const std::string& str) -> std::string {
|
||||
std::string new_str(str);
|
||||
auto pos = new_str.find("_");
|
||||
if (pos != std::string::npos) {
|
||||
new_str.replace(pos, 1, ".");
|
||||
}
|
||||
return new_str;
|
||||
};
|
||||
cv::FileStorage fs(filename, cv::FileStorage::READ);
|
||||
if (!fs.isOpened())
|
||||
throw std::runtime_error("Error: Can't load config file : " + filename);
|
||||
@ -245,7 +265,7 @@ void load_config(const std::string& filename, std::map<std::string, std::map<std
|
||||
}
|
||||
for (auto iit = device.begin(); iit != device.end(); ++iit) {
|
||||
auto item = *iit;
|
||||
config[device.name()][item.name()] = item.string();
|
||||
config[opencv_to_plugin_format(device.name())][item.name()] = item.string();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -350,6 +350,27 @@ void Config::adjustKeyMapValues() {
|
||||
key_config_map[PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS] =
|
||||
std::to_string(perfHintsConfig.ovPerfHintNumRequests);
|
||||
}
|
||||
|
||||
void Configs::CreateConfig(std::string device_id) {
|
||||
if (configs.find(device_id) == configs.end()) {
|
||||
configs.emplace(device_id, Config(device_id));
|
||||
}
|
||||
}
|
||||
|
||||
Config& Configs::GetConfig(std::string device_id) {
|
||||
if (device_id.empty()) {
|
||||
return GetDefaultDeviceConfig();
|
||||
}
|
||||
if (configs.find(device_id) == configs.end()) {
|
||||
IE_THROW() << "Config for device with " << device_id << " ID is not registered in GPU plugin";
|
||||
}
|
||||
return configs.find(device_id)->second;
|
||||
}
|
||||
|
||||
Config& Configs::GetDefaultDeviceConfig() {
|
||||
return GetConfig(default_device_id);
|
||||
}
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
} // namespace CLDNNPlugin
|
||||
|
@ -14,7 +14,8 @@
|
||||
namespace CLDNNPlugin {
|
||||
|
||||
struct Config {
|
||||
Config() : throughput_streams(1),
|
||||
Config(std::string device_id = "0") : device_id(device_id),
|
||||
throughput_streams(1),
|
||||
useProfiling(false),
|
||||
dumpCustomKernels(false),
|
||||
exclusiveAsyncRequests(false),
|
||||
@ -30,7 +31,6 @@ struct Config {
|
||||
tuningConfig(),
|
||||
graph_dumps_dir(""),
|
||||
sources_dumps_dir(""),
|
||||
device_id("0"),
|
||||
kernels_cache_dir(""),
|
||||
n_threads(std::max(static_cast<unsigned int>(1), std::thread::hardware_concurrency())),
|
||||
enable_loop_unrolling(true) {
|
||||
@ -40,6 +40,7 @@ struct Config {
|
||||
void UpdateFromMap(const std::map<std::string, std::string>& configMap);
|
||||
void adjustKeyMapValues();
|
||||
|
||||
std::string device_id;
|
||||
uint16_t throughput_streams;
|
||||
bool useProfiling;
|
||||
bool dumpCustomKernels;
|
||||
@ -56,7 +57,6 @@ struct Config {
|
||||
cldnn::tuning_config_options tuningConfig;
|
||||
std::string graph_dumps_dir;
|
||||
std::string sources_dumps_dir;
|
||||
std::string device_id;
|
||||
std::string kernels_cache_dir;
|
||||
size_t n_threads;
|
||||
bool enable_loop_unrolling;
|
||||
@ -65,4 +65,23 @@ struct Config {
|
||||
InferenceEngine::PerfHintsConfig perfHintsConfig;
|
||||
};
|
||||
|
||||
struct Configs {
|
||||
using conf_iter = std::map<std::string, Config>::iterator;
|
||||
Configs(Config conf = Config()) : configs({std::make_pair(default_device_id, conf.device_id = default_device_id)}) { }
|
||||
|
||||
void CreateConfig(std::string device_id);
|
||||
Config& GetConfig(std::string device_id);
|
||||
Config& GetDefaultDeviceConfig();
|
||||
|
||||
void SetDefaultDeviceID(std::string default_device_id) { this->default_device_id = default_device_id; }
|
||||
std::string GetDefaultDeviceID() { return default_device_id; }
|
||||
|
||||
conf_iter begin() { return configs.begin(); }
|
||||
conf_iter end() { return configs.end(); }
|
||||
|
||||
private:
|
||||
std::string default_device_id = "0";
|
||||
std::map<std::string, Config> configs;
|
||||
};
|
||||
|
||||
} // namespace CLDNNPlugin
|
||||
|
@ -23,6 +23,7 @@
|
||||
#include "cldnn_custom_layer.h"
|
||||
#include "cldnn_itt.h"
|
||||
#include "gpu/gpu_config.hpp"
|
||||
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
||||
|
||||
#include <transformations/rt_info/fused_names_attribute.hpp>
|
||||
|
||||
@ -61,17 +62,25 @@ void clDNNEngine::RegisterPrimitives() {
|
||||
}
|
||||
|
||||
struct clDNNEngine::impl {
|
||||
CLDNNPlugin::Config m_config;
|
||||
CLDNNPlugin::Configs m_configs;
|
||||
};
|
||||
|
||||
std::string clDNNEngine::GetDeviceIDFromConfig(const std::map<std::string, std::string>& config) const {
|
||||
std::string device_id;
|
||||
if (config.find(PluginConfigParams::KEY_DEVICE_ID) != config.end()) {
|
||||
device_id = config.at(PluginConfigParams::KEY_DEVICE_ID);
|
||||
}
|
||||
return device_id;
|
||||
}
|
||||
|
||||
cldnn::device_info clDNNEngine::GetDeviceInfo(const std::map<std::string, std::string> &config) const {
|
||||
auto device_info = device_map.begin()->second->get_info();
|
||||
if (config.find(PluginConfigParams::KEY_DEVICE_ID) != config.end()) {
|
||||
auto val = config.at(PluginConfigParams::KEY_DEVICE_ID);
|
||||
if (device_map.find(val) == device_map.end()) {
|
||||
IE_THROW() << "Invalid device ID: " << val;
|
||||
std::string device_id = GetDeviceIDFromConfig(config);
|
||||
if (!device_id.empty()) {
|
||||
if (device_map.find(device_id) == device_map.end()) {
|
||||
IE_THROW() << "Invalid device ID: " << device_id;
|
||||
}
|
||||
device_info = device_map.at(val)->get_info();
|
||||
device_info = device_map.at(device_id)->get_info();
|
||||
}
|
||||
|
||||
return device_info;
|
||||
@ -109,6 +118,11 @@ clDNNEngine::clDNNEngine() : m_defaultContext(nullptr) {
|
||||
// Set OCL runtime which should be always available
|
||||
cldnn::device_query device_query(cldnn::engine_types::ocl, cldnn::runtime_types::ocl);
|
||||
device_map = device_query.get_available_devices();
|
||||
|
||||
// Set default configs for each device
|
||||
for (auto& device : device_map) {
|
||||
_impl->m_configs.CreateConfig(device.first);
|
||||
}
|
||||
}
|
||||
// locate global custom kernel config
|
||||
// and auto-load kernels from it
|
||||
@ -133,7 +147,9 @@ clDNNEngine::clDNNEngine() : m_defaultContext(nullptr) {
|
||||
config_path = configFile.substr(0, dir_split_pos);
|
||||
}
|
||||
config_path += "/cldnn_global_custom_kernels/cldnn_global_custom_kernels.xml";
|
||||
CLDNNCustomLayer::LoadFromFile(config_path, _impl->m_config.customLayers, true);
|
||||
for (auto& config : _impl->m_configs) {
|
||||
CLDNNCustomLayer::LoadFromFile(config_path, config.second.customLayers, true);
|
||||
}
|
||||
}
|
||||
|
||||
auto check_inputs = [](InferenceEngine::InputsDataMap _networkInputs) {
|
||||
@ -201,7 +217,10 @@ IExecutableNetworkInternal::Ptr clDNNEngine::LoadExeNetworkImpl(const InferenceE
|
||||
InferenceEngine::InputsDataMap _networkInputs = network.getInputsInfo();
|
||||
check_inputs(_networkInputs);
|
||||
|
||||
CLDNNPlugin::Config conf = _impl->m_config;
|
||||
CLDNNPlugin::Configs confs = _impl->m_configs;
|
||||
std::string device_id = GetDeviceIDFromConfig(orig_config);
|
||||
CLDNNPlugin::Config conf = confs.GetConfig(device_id);
|
||||
|
||||
auto config = ConvertPerfHintsToConfig(orig_config, conf);
|
||||
UpdateConfig(conf, network, config);
|
||||
|
||||
@ -270,12 +289,12 @@ RemoteContext::Ptr clDNNEngine::CreateContext(const ParamMap& params) {
|
||||
std::string contextTypeStr = _StrFromParams(params, GPU_PARAM_KEY(CONTEXT_TYPE));
|
||||
|
||||
if (GPU_PARAM_VALUE(OCL) == contextTypeStr) {
|
||||
return std::make_shared<CLDNNRemoteCLContext>(shared_from_this(), params, _impl->m_config);
|
||||
return std::make_shared<CLDNNRemoteCLContext>(shared_from_this(), params, _impl->m_configs.GetDefaultDeviceConfig());
|
||||
} else if (GPU_PARAM_VALUE(VA_SHARED) == contextTypeStr) {
|
||||
#ifdef _WIN32
|
||||
return std::make_shared<CLDNNRemoteD3DContext>(shared_from_this(), params, _impl->m_config);
|
||||
return std::make_shared<CLDNNRemoteD3DContext>(shared_from_this(), params, _impl->m_configs.GetDefaultDeviceConfig());
|
||||
#else
|
||||
return std::make_shared<CLDNNRemoteVAContext>(shared_from_this(), params, _impl->m_config);
|
||||
return std::make_shared<CLDNNRemoteVAContext>(shared_from_this(), params, _impl->m_configs.GetDefaultDeviceConfig());
|
||||
#endif
|
||||
} else {
|
||||
IE_THROW() << "Invalid remote context type" << contextTypeStr;
|
||||
@ -284,7 +303,7 @@ RemoteContext::Ptr clDNNEngine::CreateContext(const ParamMap& params) {
|
||||
|
||||
RemoteContext::Ptr clDNNEngine::GetDefaultContext(const ParamMap& params) {
|
||||
if (nullptr == m_defaultContext) {
|
||||
m_defaultContext.reset(new CLDNNRemoteCLContext(shared_from_this(), params, _impl->m_config));
|
||||
m_defaultContext.reset(new CLDNNRemoteCLContext(shared_from_this(), params, _impl->m_configs.GetDefaultDeviceConfig()));
|
||||
}
|
||||
return m_defaultContext;
|
||||
}
|
||||
@ -293,14 +312,31 @@ void clDNNEngine::SetConfig(const std::map<std::string, std::string> &config) {
|
||||
streamsSet = (config.find(PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS) != config.end());
|
||||
throttlingSet = config.find(GPUConfigParams::KEY_GPU_PLUGIN_THROTTLE) != config.end() ||
|
||||
config.find(CLDNNConfigParams::KEY_CLDNN_PLUGIN_THROTTLE) != config.end();
|
||||
_impl->m_config.UpdateFromMap(config);
|
||||
std::string device_id;
|
||||
if (config.find(PluginConfigInternalParams::KEY_CONFIG_DEVICE_ID) != config.end()) {
|
||||
device_id = config.at(PluginConfigInternalParams::KEY_CONFIG_DEVICE_ID);
|
||||
_impl->m_configs.GetConfig(device_id).UpdateFromMap(config);
|
||||
} else {
|
||||
device_id = GetDeviceIDFromConfig(config);
|
||||
if (!device_id.empty()) {
|
||||
_impl->m_configs.SetDefaultDeviceID(device_id);
|
||||
_impl->m_configs.GetConfig(device_id).UpdateFromMap(config);
|
||||
} else {
|
||||
for (auto& conf : _impl->m_configs) {
|
||||
conf.second.UpdateFromMap(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
|
||||
const std::map<std::string, std::string>& config) const {
|
||||
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "clDNNEngine::QueryNetwork");
|
||||
QueryNetworkResult res;
|
||||
CLDNNPlugin::Config conf = _impl->m_config;
|
||||
CLDNNPlugin::Configs confs = _impl->m_configs;
|
||||
std::string device_id = GetDeviceIDFromConfig(config);
|
||||
CLDNNPlugin::Config conf = confs.GetConfig(device_id);
|
||||
|
||||
UpdateConfig(conf, network, config);
|
||||
|
||||
if (m_defaultContext == nullptr) {
|
||||
@ -516,12 +552,18 @@ QueryNetworkResult clDNNEngine::QueryNetwork(const CNNNetwork& network,
|
||||
return res;
|
||||
}
|
||||
|
||||
Parameter clDNNEngine::GetConfig(const std::string& name, const std::map<std::string, Parameter>& /*options*/) const {
|
||||
Parameter clDNNEngine::GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
|
||||
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "clDNNEngine::GetConfig");
|
||||
Parameter result;
|
||||
auto option = _impl->m_config.key_config_map.find(name);
|
||||
if (option != _impl->m_config.key_config_map.end()) {
|
||||
result = option->second;
|
||||
|
||||
std::string device_id;
|
||||
if (options.find(PluginConfigParams::KEY_DEVICE_ID) != options.end()) {
|
||||
device_id = options.find(PluginConfigParams::KEY_DEVICE_ID)->second.as<std::string>();
|
||||
}
|
||||
Config config = _impl->m_configs.GetConfig(device_id);
|
||||
|
||||
if (config.key_config_map.find(name) != config.key_config_map.end()) {
|
||||
result = config.key_config_map.find(name)->second;
|
||||
} else {
|
||||
IE_THROW() << "Unsupported config key : " << name;
|
||||
}
|
||||
@ -583,9 +625,7 @@ static float GetGOPS(cldnn::device_info info, cldnn::data_types dt) {
|
||||
|
||||
Parameter clDNNEngine::GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
|
||||
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "clDNNEngine::GetMetric");
|
||||
auto device_id = GetConfig(CONFIG_KEY(DEVICE_ID), {});
|
||||
if (options.find(CONFIG_KEY(DEVICE_ID)) != options.end())
|
||||
device_id = options.at(CONFIG_KEY(DEVICE_ID)).as<std::string>();
|
||||
std::string device_id = GetConfig(CONFIG_KEY(DEVICE_ID), options);
|
||||
|
||||
auto iter = device_map.find(device_id);
|
||||
auto device_info = iter != device_map.end() ?
|
||||
@ -643,7 +683,7 @@ Parameter clDNNEngine::GetMetric(const std::string& name, const std::map<std::st
|
||||
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, deviceName);
|
||||
} else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
|
||||
std::vector<std::string> configKeys;
|
||||
for (auto opt : _impl->m_config.key_config_map)
|
||||
for (auto opt : _impl->m_configs.GetConfig(device_id).key_config_map)
|
||||
configKeys.push_back(opt.first);
|
||||
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
|
||||
} else if (name == METRIC_KEY(OPTIMIZATION_CAPABILITIES)) {
|
||||
|
@ -49,6 +49,7 @@ public:
|
||||
const std::map<std::string, std::string> &config) override;
|
||||
|
||||
void SetConfig(const std::map<std::string, std::string> &config) override;
|
||||
std::string GetDeviceIDFromConfig(const std::map<std::string, std::string>& config) const;
|
||||
InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
|
||||
InferenceEngine::Parameter GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
|
||||
InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network,
|
||||
|
@ -68,7 +68,7 @@ public:
|
||||
Program(InferenceEngine::CNNNetwork& network, std::shared_ptr<cldnn::engine> engine, const Config& config, bool createTopologyOnly = false);
|
||||
Program(std::shared_ptr<cldnn::engine> engine, const Config& config) : m_config(config), m_engine(engine),
|
||||
m_curBatch(-1), queryMode(false), m_max_batch(1) {}
|
||||
Program() : m_config({}), m_engine(nullptr), m_curBatch(-1), queryMode(false), m_max_batch(1) {}
|
||||
Program() : m_config(), m_engine(nullptr), m_curBatch(-1), queryMode(false), m_max_batch(1) {}
|
||||
|
||||
static const cldnn::primitive_id m_preProcessTag;
|
||||
static const cldnn::primitive_id m_meanValuesTag;
|
||||
|
@ -707,6 +707,24 @@ public:
|
||||
}
|
||||
}
|
||||
allowNotImplemented([&]() {
|
||||
// Add device specific value to support device_name.device_id cases
|
||||
std::vector<std::string> supportedConfigKeys =
|
||||
plugin.get_metric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {});
|
||||
auto config_iter = std::find(supportedConfigKeys.begin(),
|
||||
supportedConfigKeys.end(),
|
||||
CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID));
|
||||
const bool supportsConfigDeviceID = config_iter != supportedConfigKeys.end();
|
||||
const std::string deviceKey =
|
||||
supportsConfigDeviceID ? CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID) : CONFIG_KEY(DEVICE_ID);
|
||||
|
||||
for (auto pluginDesc : pluginRegistry) {
|
||||
InferenceEngine::DeviceIDParser parser(pluginDesc.first);
|
||||
if (pluginDesc.first.find(deviceName) != std::string::npos &&
|
||||
!parser.getDeviceID().empty()) {
|
||||
pluginDesc.second.defaultConfig[deviceKey] = parser.getDeviceID();
|
||||
plugin.set_config(pluginDesc.second.defaultConfig);
|
||||
}
|
||||
}
|
||||
plugin.set_config(desc.defaultConfig);
|
||||
});
|
||||
|
||||
@ -797,17 +815,26 @@ public:
|
||||
* @param deviceName A device name to set config to
|
||||
* If empty, config is set for all the plugins / plugin's meta-data
|
||||
* @note `deviceName` is not allowed in form of MULTI:CPU, HETERO:GPU,CPU, AUTO:CPU
|
||||
* just simple forms like CPU, GPU, MULTU, GPU.0, etc
|
||||
* just simple forms like CPU, GPU, MULTI, GPU.0, etc
|
||||
*/
|
||||
void SetConfigForPlugins(const std::map<std::string, std::string>& configMap, const std::string& deviceName) {
|
||||
auto config = configMap;
|
||||
|
||||
InferenceEngine::DeviceIDParser parser(deviceName);
|
||||
std::string clearDeviceName = parser.getDeviceName();
|
||||
|
||||
std::lock_guard<std::mutex> lock(pluginsMutex);
|
||||
|
||||
if (deviceName.empty()) {
|
||||
coreConfig.setAndUpdate(config);
|
||||
}
|
||||
|
||||
auto base_desc = pluginRegistry.find(clearDeviceName);
|
||||
if (pluginRegistry.find(deviceName) == pluginRegistry.end() && base_desc != pluginRegistry.end()) {
|
||||
PluginDescriptor desc = {base_desc->second.libraryLocation, config, base_desc->second.listOfExtentions};
|
||||
pluginRegistry[deviceName] = desc;
|
||||
}
|
||||
|
||||
// set config for plugins in registry
|
||||
bool configIsSet = false;
|
||||
for (auto& desc : pluginRegistry) {
|
||||
@ -825,7 +852,7 @@ public:
|
||||
|
||||
// set config for already created plugins
|
||||
for (auto& plugin : plugins) {
|
||||
if (deviceName.empty() || deviceName == plugin.first) {
|
||||
if (deviceName.empty() || clearDeviceName == plugin.first) {
|
||||
allowNotImplemented([&]() {
|
||||
auto configCopy = config;
|
||||
if (DeviceSupportsCacheDir(plugin.second)) {
|
||||
@ -834,6 +861,19 @@ public:
|
||||
configCopy[CONFIG_KEY(CACHE_DIR)] = cacheConfig._cacheDir;
|
||||
}
|
||||
}
|
||||
// Add device specific value to support device_name.device_id cases
|
||||
std::vector<std::string> supportedConfigKeys =
|
||||
plugin.second.get_metric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {});
|
||||
auto config_iter = std::find(supportedConfigKeys.begin(),
|
||||
supportedConfigKeys.end(),
|
||||
CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID));
|
||||
const bool supportsConfigDeviceID = config_iter != supportedConfigKeys.end();
|
||||
const std::string deviceKey =
|
||||
supportsConfigDeviceID ? CONFIG_KEY_INTERNAL(CONFIG_DEVICE_ID) : CONFIG_KEY(DEVICE_ID);
|
||||
|
||||
if (!parser.getDeviceID().empty()) {
|
||||
configCopy[deviceKey] = parser.getDeviceID();
|
||||
}
|
||||
plugin.second.set_config(configCopy);
|
||||
});
|
||||
}
|
||||
@ -1176,18 +1216,10 @@ void Core::SetConfig(const std::map<std::string, std::string>& config, const std
|
||||
"You can configure the devices with SetConfig before creating the AUTO on top.";
|
||||
}
|
||||
|
||||
// GPU.0, GPU.1 cases
|
||||
if (deviceName.find(".") != std::string::npos) {
|
||||
IE_THROW()
|
||||
<< "SetConfig is supported only for device family itself (without particular device .#). "
|
||||
"You can pass .# as a particular device instance to QueryNetwork, LoadNetwork, ImportNetwork only";
|
||||
}
|
||||
|
||||
if (deviceName.empty()) {
|
||||
_impl->SetConfigForPlugins(config, std::string());
|
||||
} else {
|
||||
auto parsed = ov::runtime::parseDeviceNameIntoConfig(deviceName, config);
|
||||
_impl->SetConfigForPlugins(parsed._config, parsed._deviceName);
|
||||
_impl->SetConfigForPlugins(config, deviceName);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1442,17 +1474,11 @@ void Core::set_config(const ConfigMap& config, const std::string& deviceName) {
|
||||
"set_config is supported only for AUTO itself (without devices). "
|
||||
"You can configure the devices with set_config before creating the AUTO on top.");
|
||||
|
||||
// GPU.0, GPU.1 cases
|
||||
OPENVINO_ASSERT(deviceName.find(".") == std::string::npos,
|
||||
"set_config is supported only for device family itself (without particular device .#). "
|
||||
"You can pass .# as a particular device instance to query_model, compile_model, import_model only");
|
||||
|
||||
OV_CORE_CALL_STATEMENT({
|
||||
if (deviceName.empty()) {
|
||||
_impl->SetConfigForPlugins(config, std::string());
|
||||
} else {
|
||||
auto parsed = parseDeviceNameIntoConfig(deviceName, config);
|
||||
_impl->SetConfigForPlugins(parsed._config, parsed._deviceName);
|
||||
_impl->SetConfigForPlugins(config, deviceName);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -57,6 +57,11 @@ DECLARE_CONFIG_KEY(FORCE_DISABLE_CACHE);
|
||||
*/
|
||||
DECLARE_CONFIG_KEY(MULTI_WORK_MODE_AS_AUTO);
|
||||
|
||||
/**
|
||||
* @brief Internal device id for particular device (like GPU.0, GPU.1 etc)
|
||||
*/
|
||||
DECLARE_CONFIG_KEY(CONFIG_DEVICE_ID);
|
||||
|
||||
} // namespace PluginConfigInternalParams
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
@ -424,6 +424,7 @@ private:
|
||||
};
|
||||
|
||||
TEST_P(CachingTest, TestLoad) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -454,6 +455,7 @@ TEST_P(CachingTest, TestLoad) {
|
||||
|
||||
TEST_P(CachingTest, TestLoadCustomImportExport) {
|
||||
const char customData[] = {1, 2, 3, 4, 5};
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -512,6 +514,7 @@ TEST_P(CachingTest, TestLoadCustomImportExport) {
|
||||
// Brief: when LoadNetwork is called from different config - old cache shall not be used
|
||||
TEST_P(CachingTest, TestChangeLoadConfig) {
|
||||
const std::string CUSTOM_KEY = "CUSTOM_KEY";
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -547,6 +550,7 @@ TEST_P(CachingTest, TestChangeLoadConfig) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestNoCacheEnabled) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(0);
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -563,6 +567,7 @@ TEST_P(CachingTest, TestNoCacheEnabled) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestNoCacheSupported) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _))
|
||||
.Times(AnyNumber()).WillRepeatedly(Return(false));
|
||||
@ -582,6 +587,7 @@ TEST_P(CachingTest, TestNoCacheSupported) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestNoCacheMetricSupported) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _))
|
||||
.Times(AnyNumber()).WillRepeatedly(Return(std::vector<std::string>{}));
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(0);
|
||||
@ -688,6 +694,7 @@ TEST_P(CachingTest, TestNoCacheEnabled_cacheDirConfig) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestLoadChangeCacheDir) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -719,6 +726,7 @@ TEST_P(CachingTest, TestLoadChangeCacheDir) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestClearCacheDir) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(0);
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -737,6 +745,7 @@ TEST_P(CachingTest, TestClearCacheDir) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestChangeOtherConfig) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -756,6 +765,7 @@ TEST_P(CachingTest, TestChangeOtherConfig) {
|
||||
|
||||
TEST_P(CachingTest, TestChangeCacheDirFailure) {
|
||||
std::string longName(1000000, ' ');
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -790,6 +800,7 @@ TEST_P(CachingTest, TestCacheDirCreateRecursive) {
|
||||
std::string newCacheDir2 = newCacheDir1 + CommonTestUtils::FileSeparator + "b";
|
||||
std::string newCacheDir3 = newCacheDir2 + CommonTestUtils::FileSeparator + CommonTestUtils::FileSeparator;
|
||||
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -810,6 +821,7 @@ TEST_P(CachingTest, TestCacheDirCreateRecursive) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestDeviceArchitecture) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber())
|
||||
@ -874,6 +886,7 @@ TEST_P(CachingTest, TestDeviceArchitecture) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestNoDeviceArchitecture) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber())
|
||||
.WillRepeatedly(Invoke([&] (const std::string&, const std::map<std::string, Parameter>&) {
|
||||
return std::vector<std::string>{METRIC_KEY(IMPORT_EXPORT_SUPPORT)};
|
||||
@ -908,6 +921,7 @@ TEST_P(CachingTest, TestNoDeviceArchitecture) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestThrowOnExport) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -927,6 +941,7 @@ TEST_P(CachingTest, TestThrowOnExport) {
|
||||
// TODO: temporary behavior is to no re-throw exception on import error (see 54335)
|
||||
// In future add separate 'no throw' test for 'blob_outdated' exception from plugin
|
||||
TEST_P(CachingTest, TestThrowOnImport) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -972,6 +987,7 @@ TEST_P(CachingTest, TestThrowOnImport) {
|
||||
|
||||
// FIXME: two different tests expect different number of results
|
||||
TEST_P(CachingTest, DISABLED_TestNetworkModified) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -1042,6 +1058,7 @@ TEST_P(CachingTest, DISABLED_TestNetworkModified) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestCacheFileCorrupted) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
@ -1089,6 +1106,7 @@ TEST_P(CachingTest, TestCacheFileCorrupted) {
|
||||
}
|
||||
|
||||
TEST_P(CachingTest, TestCacheFileOldVersion) {
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(SUPPORTED_METRICS), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)).Times(AnyNumber());
|
||||
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
|
||||
|
@ -129,8 +129,6 @@ INSTANTIATE_TEST_SUITE_P(nightly_OVClassGetMetricTest,
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(nightly_OVClassGetConfigTest, OVClassGetConfigTest, ::testing::Values("GPU"));
|
||||
|
||||
|
||||
|
||||
// IE Class Query network
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_OVClassQueryNetworkTest, OVClassQueryNetworkTest, ::testing::Values("GPU"));
|
||||
@ -142,4 +140,57 @@ INSTANTIATE_TEST_SUITE_P(smoke_OVClassLoadNetworkTest, OVClassLoadNetworkTest, :
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_OVClassHeteroExecutableNetworkGetMetricTest,
|
||||
OVClassLoadNetworkAfterCoreRecreateTest,
|
||||
::testing::Values("GPU"));
|
||||
|
||||
// GetConfig / SetConfig for specific device
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSpecificDevice0Test, OVClassSpecificDeviceTestGetConfig,
|
||||
::testing::Values("GPU.0")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSpecificDevice1Test, OVClassSpecificDeviceTestGetConfig,
|
||||
::testing::Values("GPU.1")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSpecificDevice0Test, OVClassSpecificDeviceTestSetConfig,
|
||||
::testing::Values("GPU.0")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSpecificDevice1Test, OVClassSpecificDeviceTestSetConfig,
|
||||
::testing::Values("GPU.1")
|
||||
);
|
||||
|
||||
// Several devices case
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSeveralDevicesTest, OVClassSeveralDevicesTestLoadNetwork,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSeveralDevicesTest, OVClassSeveralDevicesTestQueryNetwork,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSeveralDevicesTest, OVClassSeveralDevicesTestDefaultCore,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
// Set default device ID
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSetDefaultDeviceIDTest, OVClassSetDefaultDeviceIDTest,
|
||||
::testing::Values(std::make_pair("GPU", "1"))
|
||||
);
|
||||
|
||||
// Set config for all GPU devices
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_OVClassSetGlobalConfigTest, OVClassSetGlobalConfigTest,
|
||||
::testing::Values("GPU")
|
||||
);
|
||||
} // namespace
|
@ -201,4 +201,58 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
smoke_IEClassHeteroExecutableNetworkGetMetricTest, IEClassLoadNetworkAfterCoreRecreateTest,
|
||||
::testing::Values("GPU")
|
||||
);
|
||||
|
||||
// GetConfig / SetConfig for specific device
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSpecificDevice0Test, IEClassSpecificDeviceTestGetConfig,
|
||||
::testing::Values("GPU.0")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSpecificDevice1Test, IEClassSpecificDeviceTestGetConfig,
|
||||
::testing::Values("GPU.1")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSpecificDevice0Test, IEClassSpecificDeviceTestSetConfig,
|
||||
::testing::Values("GPU.0")
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSpecificDevice1Test, IEClassSpecificDeviceTestSetConfig,
|
||||
::testing::Values("GPU.1")
|
||||
);
|
||||
|
||||
// Several devices case
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSeveralDevicesTest, IEClassSeveralDevicesTestLoadNetwork,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSeveralDevicesTest, IEClassSeveralDevicesTestQueryNetwork,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSeveralDevicesTest, IEClassSeveralDevicesTestDefaultCore,
|
||||
::testing::Values(std::vector<std::string>({"GPU.0", "GPU.1"}))
|
||||
);
|
||||
|
||||
// Set default device ID
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSetDefaultDeviceIDTest, IEClassSetDefaultDeviceIDTest,
|
||||
::testing::Values(std::make_pair("GPU", "1"))
|
||||
);
|
||||
|
||||
// Set config for all GPU devices
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
nightly_IEClassSetGlobalConfigTest, IEClassSetGlobalConfigTest,
|
||||
::testing::Values("GPU")
|
||||
);
|
||||
|
||||
} // namespace
|
||||
|
@ -44,6 +44,17 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class OVClassSetDefaultDeviceIDTest : public ::testing::Test,
|
||||
public ::testing::WithParamInterface<std::pair<std::string, std::string>> {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string deviceID;
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(deviceName, deviceID) = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
using OVClassNetworkTestP = OVClassBaseTestP;
|
||||
using OVClassQueryNetworkTest = OVClassBaseTestP;
|
||||
using OVClassImportExportTestP = OVClassBaseTestP;
|
||||
@ -60,12 +71,26 @@ using OVClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS = OVClassBaseTestP;
|
||||
using OVClassGetMetricTest_ThrowUnsupported = OVClassBaseTestP;
|
||||
using OVClassGetConfigTest = OVClassBaseTestP;
|
||||
using OVClassGetConfigTest_ThrowUnsupported = OVClassBaseTestP;
|
||||
using OVClassGetConfigTest_ThrowUnsupported = OVClassBaseTestP;
|
||||
using OVClassGetConfigTest_ThrowUnsupported = OVClassBaseTestP;
|
||||
using OVClassGetAvailableDevices = OVClassBaseTestP;
|
||||
using OVClassGetMetricTest_RANGE_FOR_STREAMS = OVClassBaseTestP;
|
||||
using OVClassLoadNetworkAfterCoreRecreateTest = OVClassBaseTestP;
|
||||
using OVClassLoadNetworkTest = OVClassQueryNetworkTest;
|
||||
using OVClassSetGlobalConfigTest = OVClassBaseTestP;
|
||||
using OVClassSpecificDeviceTestSetConfig = OVClassBaseTestP;
|
||||
using OVClassSpecificDeviceTestGetConfig = OVClassBaseTestP;
|
||||
|
||||
class OVClassSeveralDevicesTest : public OVClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::vector<std::string>> {
|
||||
public:
|
||||
std::vector<std::string> deviceNames;
|
||||
void SetUp() override {
|
||||
OVClassNetworkTest::SetUp();
|
||||
deviceNames = GetParam();
|
||||
}
|
||||
};
|
||||
using OVClassSeveralDevicesTestLoadNetwork = OVClassSeveralDevicesTest;
|
||||
using OVClassSeveralDevicesTestQueryNetwork = OVClassSeveralDevicesTest;
|
||||
using OVClassSeveralDevicesTestDefaultCore = OVClassSeveralDevicesTest;
|
||||
|
||||
inline bool supportsAvaliableDevices(ov::runtime::Core& ie, const std::string& deviceName) {
|
||||
auto supportedMetricKeys = ie.get_metric(deviceName, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
@ -73,6 +98,13 @@ inline bool supportsAvaliableDevices(ov::runtime::Core& ie, const std::string& d
|
||||
std::find(std::begin(supportedMetricKeys), std::end(supportedMetricKeys), METRIC_KEY(AVAILABLE_DEVICES));
|
||||
}
|
||||
|
||||
bool supportsDeviceID(ov::runtime::Core& ie, const std::string& deviceName) {
|
||||
auto supportedConfigKeys =
|
||||
ie.get_metric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
return supportedConfigKeys.end() !=
|
||||
std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY(DEVICE_ID));
|
||||
}
|
||||
|
||||
TEST(OVClassBasicTest, smoke_createDefault) {
|
||||
ASSERT_NO_THROW(ov::runtime::Core ie);
|
||||
}
|
||||
@ -268,6 +300,29 @@ TEST(OVClassBasicTest, smoke_SetConfigHeteroNoThrow) {
|
||||
ASSERT_FALSE(value);
|
||||
}
|
||||
|
||||
TEST_P(OVClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(ie.set_config({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, deviceName));
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(value = ie.get_config(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
//
|
||||
// QueryNetwork
|
||||
//
|
||||
@ -305,6 +360,31 @@ TEST_P(OVClassNetworkTestP, QueryNetworkWithKSO) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.query_model(actualNetwork, multiDeviceName));
|
||||
}
|
||||
|
||||
TEST_P(OVClassNetworkTestP, SetAffinityWithConstantBranches) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
@ -644,6 +724,34 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
|
||||
ASSERT_THROW(p = ie.get_config(deviceName, "unsupported_config"), ov::Exception);
|
||||
}
|
||||
|
||||
TEST_P(OVClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
runtime::Parameter p;
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_metric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
runtime::Parameter defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_config(deviceName, confKey));
|
||||
ASSERT_FALSE(defaultValue.empty());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
std::vector<std::string> devices;
|
||||
@ -667,12 +775,6 @@ TEST_P(OVClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
//
|
||||
// QueryNetwork with HETERO on particular device
|
||||
//
|
||||
bool supportsDeviceID(ov::runtime::Core& ie, const std::string& deviceName) {
|
||||
auto supportedConfigKeys =
|
||||
ie.get_metric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
return supportedConfigKeys.end() !=
|
||||
std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY(DEVICE_ID));
|
||||
}
|
||||
|
||||
TEST_P(OVClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
@ -787,6 +889,31 @@ TEST_P(OVClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.compile_model(actualNetwork, multiDeviceName));
|
||||
}
|
||||
|
||||
//
|
||||
// LoadNetwork with HETERO on particular device
|
||||
//
|
||||
@ -1003,6 +1130,64 @@ TEST_P(OVClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins
|
||||
// auto executableNetwork = ie.compile_model(actualNetwork, deviceName, config);
|
||||
// });
|
||||
};
|
||||
|
||||
TEST_P(OVClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(ie.set_config({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID },
|
||||
{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }},
|
||||
deviceName));
|
||||
ASSERT_NO_THROW(value = ie.get_config(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
TEST_P(OVClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
|
||||
ov::runtime::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
runtime::Parameter ref, src;
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(ie.set_config({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }},
|
||||
deviceName + "." + dev_id));
|
||||
}
|
||||
ASSERT_NO_THROW(ie.set_config({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, deviceName));
|
||||
ASSERT_NO_THROW(ref = ie.get_config(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(src = ie.get_config(deviceName + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
ASSERT_EQ(src, ref);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) {
|
||||
ov::runtime::Core ie;
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.get_metric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(ie.set_config({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, deviceNames[i]));
|
||||
}
|
||||
std::string res;
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(res = ie.get_config(deviceNames[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>());
|
||||
ASSERT_EQ(res, std::to_string(i + 2));
|
||||
}
|
||||
}
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
||||
|
@ -40,6 +40,17 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class IEClassSetDefaultDeviceIDTest : public ::testing::Test,
|
||||
public ::testing::WithParamInterface<std::pair<std::string, std::string>> {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string deviceID;
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(deviceName, deviceID) = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
using IEClassNetworkTestP = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetMetricTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassQueryNetworkTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
@ -56,13 +67,27 @@ using IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS = BehaviorTestsUtils::
|
||||
using IEClassGetMetricTest_ThrowUnsupported = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetConfigTest_ThrowUnsupported = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetConfigTest_ThrowUnsupported = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetConfigTest_ThrowUnsupported = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetAvailableDevices = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetMetricTest_RANGE_FOR_STREAMS = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassSetGlobalConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassSpecificDeviceTestSetConfig = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassSpecificDeviceTestGetConfig = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
|
||||
using IEClassLoadNetworkAfterCoreRecreateTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
|
||||
class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::vector<std::string>> {
|
||||
public:
|
||||
std::vector<std::string> deviceNames;
|
||||
void SetUp() override {
|
||||
IEClassNetworkTest::SetUp();
|
||||
deviceNames = GetParam();
|
||||
}
|
||||
};
|
||||
using IEClassSeveralDevicesTestLoadNetwork = IEClassSeveralDevicesTest;
|
||||
using IEClassSeveralDevicesTestQueryNetwork = IEClassSeveralDevicesTest;
|
||||
using IEClassSeveralDevicesTestDefaultCore = IEClassSeveralDevicesTest;
|
||||
|
||||
bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &deviceName) {
|
||||
auto supportedMetricKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
return supportedMetricKeys.end() != std::find(std::begin(supportedMetricKeys),
|
||||
@ -70,6 +95,13 @@ bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &dev
|
||||
METRIC_KEY(AVAILABLE_DEVICES));
|
||||
}
|
||||
|
||||
bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &deviceName) {
|
||||
auto supportedConfigKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys),
|
||||
std::end(supportedConfigKeys),
|
||||
CONFIG_KEY(DEVICE_ID));
|
||||
}
|
||||
|
||||
TEST(IEClassBasicTest, smoke_createDefault) {
|
||||
ASSERT_NO_THROW(InferenceEngine::Core ie);
|
||||
}
|
||||
@ -264,6 +296,29 @@ TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) {
|
||||
ASSERT_FALSE(value);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, deviceName));
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
//
|
||||
// ImportNetwork
|
||||
//
|
||||
@ -341,6 +396,31 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(IEClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multiDeviceName));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
@ -684,6 +764,34 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
|
||||
ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
InferenceEngine::Parameter defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey));
|
||||
ASSERT_FALSE(defaultValue.empty());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
std::vector<std::string> devices;
|
||||
@ -707,13 +815,6 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
//
|
||||
// QueryNetwork with HETERO on particular device
|
||||
//
|
||||
bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &deviceName) {
|
||||
auto supportedConfigKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys),
|
||||
std::end(supportedConfigKeys),
|
||||
CONFIG_KEY(DEVICE_ID));
|
||||
}
|
||||
|
||||
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
@ -824,6 +925,31 @@ TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(IEClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multiDeviceName));
|
||||
}
|
||||
|
||||
using IEClassLoadNetworkTest = IEClassQueryNetworkTest;
|
||||
//
|
||||
// LoadNetwork with HETERO on particular device
|
||||
@ -1030,4 +1156,62 @@ TEST_P(IEClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins
|
||||
auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName, config);
|
||||
});
|
||||
};
|
||||
|
||||
TEST_P(IEClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID },
|
||||
{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }},
|
||||
deviceName));
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
InferenceEngine::Parameter ref, src;
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }},
|
||||
deviceName + "." + dev_id));
|
||||
}
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, deviceName));
|
||||
ASSERT_NO_THROW(ref = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(src = ie.GetConfig(deviceName + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
ASSERT_EQ(src, ref);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(IEClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie;
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, deviceNames[i]));
|
||||
}
|
||||
std::string res;
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(res = ie.GetConfig(deviceNames[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>());
|
||||
ASSERT_EQ(res, std::to_string(i + 2));
|
||||
}
|
||||
}
|
||||
} // namespace BehaviorTestsDefinitions
|
@ -76,7 +76,8 @@ Options:
|
||||
value is CPU. Use '-d HETERO:<comma separated devices
|
||||
list>' format to specify HETERO plugin. Use '-d
|
||||
MULTI:<comma separated devices list>' format to
|
||||
specify MULTI plugin. The application looks for a
|
||||
specify MULTI plugin. Use "-d GPU.X" format to specify
|
||||
device id for GPU devices. The application looks for a
|
||||
suitable plugin for the specified device.
|
||||
-l PATH_TO_EXTENSION, --path_to_extension PATH_TO_EXTENSION
|
||||
Optional. Required for CPU custom layers. Absolute
|
||||
|
@ -83,6 +83,22 @@ def run(args):
|
||||
|
||||
# --------------------- 3. Setting device configuration --------------------------------------------------------
|
||||
next_step()
|
||||
def get_device_type_from_name(name) :
|
||||
new_name = str(name)
|
||||
new_name = new_name.split(".", 1)[0]
|
||||
new_name = new_name.split("(", 1)[0]
|
||||
return new_name
|
||||
|
||||
## Set default values from dumped config
|
||||
default_devices = set()
|
||||
for device in devices:
|
||||
device_type = get_device_type_from_name(device)
|
||||
if device_type in config and device not in config:
|
||||
config[device] = config[device_type].copy()
|
||||
default_devices.add(device_type)
|
||||
|
||||
for def_device in default_devices:
|
||||
config.pop(def_device)
|
||||
|
||||
perf_counts = False
|
||||
for device in devices:
|
||||
@ -115,7 +131,7 @@ def run(args):
|
||||
config[device]['PERFORMANCE_HINT_NUM_REQUESTS'] = str(args.number_infer_requests)
|
||||
## the rest are individual per-device settings (overriding the values the device will deduce from perf hint)
|
||||
def set_throughput_streams():
|
||||
key = device + "_THROUGHPUT_STREAMS"
|
||||
key = get_device_type_from_name(device) + "_THROUGHPUT_STREAMS"
|
||||
if device in device_number_streams.keys():
|
||||
## set to user defined value
|
||||
supported_config_keys = benchmark.ie.get_metric(device, 'SUPPORTED_CONFIG_KEYS')
|
||||
@ -129,11 +145,11 @@ def run(args):
|
||||
"Although the automatic selection usually provides a reasonable performance, "
|
||||
"but it still may be non-optimal for some cases, for more information look at README.")
|
||||
if device != MYRIAD_DEVICE_NAME: ## MYRIAD sets the default number of streams implicitly
|
||||
config[device][key] = device + "_THROUGHPUT_AUTO"
|
||||
config[device][key] = get_device_type_from_name(device) + "_THROUGHPUT_AUTO"
|
||||
if key in config[device].keys():
|
||||
device_number_streams[device] = config[device][key]
|
||||
|
||||
if device == CPU_DEVICE_NAME: # CPU supports few special performance-oriented keys
|
||||
if CPU_DEVICE_NAME in device: # CPU supports few special performance-oriented keys
|
||||
# limit threading for CPU portion of inference
|
||||
if args.number_threads and is_flag_set_in_command_line("nthreads"):
|
||||
config[device]['CPU_THREADS_NUM'] = str(args.number_threads)
|
||||
@ -152,7 +168,7 @@ def run(args):
|
||||
|
||||
## for CPU execution, more throughput-oriented execution via streams
|
||||
set_throughput_streams()
|
||||
elif device == GPU_DEVICE_NAME:
|
||||
elif GPU_DEVICE_NAME in device:
|
||||
## for GPU execution, more throughput-oriented execution via streams
|
||||
set_throughput_streams()
|
||||
|
||||
@ -160,10 +176,10 @@ def run(args):
|
||||
logger.warning("Turn on GPU throttling. Multi-device execution with the CPU + GPU performs best with GPU throttling hint, " +
|
||||
"which releases another CPU thread (that is otherwise used by the GPU driver for active polling)")
|
||||
config[device]['GPU_PLUGIN_THROTTLE'] = '1'
|
||||
elif device == MYRIAD_DEVICE_NAME:
|
||||
elif MYRIAD_DEVICE_NAME in device:
|
||||
set_throughput_streams()
|
||||
config[device]['LOG_LEVEL'] = 'LOG_INFO'
|
||||
elif device == GNA_DEVICE_NAME:
|
||||
elif GNA_DEVICE_NAME in device:
|
||||
if is_flag_set_in_command_line('qb'):
|
||||
if args.qb == 8:
|
||||
config[device]['GNA_PRECISION'] = 'I8'
|
||||
@ -303,8 +319,8 @@ def run(args):
|
||||
|
||||
# Update number of streams
|
||||
for device in device_number_streams.keys():
|
||||
key = device + '_THROUGHPUT_STREAMS'
|
||||
device_number_streams[device] = exe_network.get_config(key)
|
||||
key = get_device_type_from_name(device) + '_THROUGHPUT_STREAMS'
|
||||
device_number_streams[device] = benchmark.ie.get_config(device, key)
|
||||
|
||||
# Number of requests
|
||||
infer_requests = exe_network.requests
|
||||
@ -342,7 +358,7 @@ def run(args):
|
||||
|
||||
# ------------------------------------ 10. Measuring performance -----------------------------------------------
|
||||
|
||||
output_string = process_help_inference_string(benchmark, exe_network)
|
||||
output_string = process_help_inference_string(benchmark, device_number_streams)
|
||||
|
||||
next_step(additional_info=output_string)
|
||||
progress_bar_total_count = 10000
|
||||
|
@ -165,8 +165,7 @@ def parse_devices(device_string):
|
||||
devices = device_string
|
||||
if ':' in devices:
|
||||
devices = devices.partition(':')[2]
|
||||
return [d[:d.index('(')] if '(' in d else
|
||||
d[:d.index('.')] if '.' in d else d for d in devices.split(',')]
|
||||
return [d for d in devices.split(',')]
|
||||
|
||||
|
||||
def parse_nstreams_value_per_device(devices, values_string):
|
||||
@ -194,19 +193,15 @@ def parse_nstreams_value_per_device(devices, values_string):
|
||||
return result
|
||||
|
||||
|
||||
def process_help_inference_string(benchmark_app, exe_network):
|
||||
def process_help_inference_string(benchmark_app, device_number_streams):
|
||||
output_string = f'Start inference {benchmark_app.api_type}hronously'
|
||||
if benchmark_app.api_type == 'async':
|
||||
output_string += f', {benchmark_app.nireq} inference requests'
|
||||
|
||||
device_ss = ''
|
||||
if CPU_DEVICE_NAME in benchmark_app.device:
|
||||
device_ss += str(exe_network.get_config('CPU_THROUGHPUT_STREAMS'))
|
||||
device_ss += f' streams for {CPU_DEVICE_NAME}'
|
||||
if GPU_DEVICE_NAME in benchmark_app.device:
|
||||
for device, streams in device_number_streams.items():
|
||||
device_ss += ', ' if device_ss else ''
|
||||
device_ss += str(exe_network.get_config('GPU_THROUGHPUT_STREAMS'))
|
||||
device_ss += f' streams for {GPU_DEVICE_NAME}'
|
||||
device_ss += f'{streams} streams for {device}'
|
||||
|
||||
if device_ss:
|
||||
output_string += ' using ' + device_ss
|
||||
|
Loading…
Reference in New Issue
Block a user