[MULTI] Fixed devices comparison to respect default device id (#7311)

* [GPU] Fixed default device id

* [MULTI] Fixed devices comparison to respect default device id
This commit is contained in:
Vladimir Paramuzov 2021-09-24 12:32:38 +03:00 committed by GitHub
parent fdc125118e
commit 3eb1aa59de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 105 additions and 27 deletions

View File

@ -30,7 +30,7 @@ struct Config {
tuningConfig(),
graph_dumps_dir(""),
sources_dumps_dir(""),
device_id(""),
device_id("0"),
kernels_cache_dir(""),
n_threads(std::max(static_cast<unsigned int>(1), std::thread::hardware_concurrency())),
enable_loop_unrolling(true) {

View File

@ -594,6 +594,15 @@ public:
return copyParameterValue(GetCPPPluginByName(parsed._deviceName).get_metric(name, parsed._config));
}
ie::Parameter GetConfig(const std::string& deviceName, const std::string& name) const override {
auto parsed = parseDeviceNameIntoConfig(deviceName);
// we need to return a copy of Parameter object which is created on Core side,
// not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return copyParameterValue(GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config));
}
/**
* @brief Returns devices available for neural networks inference
*

View File

@ -9,6 +9,9 @@
#include <map>
#include "multi_device_async_infer_request.hpp"
#include <ie_icore.hpp>
#include <ie_metric_helpers.hpp>
#include <ie_plugin_config.hpp>
namespace MultiDevicePlugin {
using namespace InferenceEngine;
@ -35,31 +38,32 @@ MultiDeviceAsyncInferRequest::MultiDeviceAsyncInferRequest(
_pipeline = {
// if the request is coming with device-specific remote blobs make sure it is scheduled to the specific device only:
{ /*TaskExecutor*/ std::make_shared<ImmediateExecutor>(), /*task*/ [this] {
// by default, no preferred device:
_multiDeviceExecutableNetwork->_thisPreferredDeviceName = "";
// if any input is remote (e.g. was set with SetBlob), let' use the corresponding device
for (const auto &it : _multiDeviceExecutableNetwork->GetInputsInfo()) {
auto b = _inferRequest->GetBlob(it.first);
auto r = b->as<RemoteBlob>();
if (r) {
const auto name = r->getDeviceName();
const auto res = std::find_if(
_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cbegin(),
_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cend(),
[&name](const MultiDevicePlugin::DeviceInformation& d){
return d.deviceName == name; });
if (_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cend() == res) {
IE_THROW() << "None of the devices (for which current MULTI-device configuration was "
"initialized) supports a remote blob created on the device named " << name;
// by default, no preferred device:
_multiDeviceExecutableNetwork->_thisPreferredDeviceName = "";
// if any input is remote (e.g. was set with SetBlob), let' use the corresponding device
for (const auto &it : _multiDeviceExecutableNetwork->GetInputsInfo()) {
auto b = _inferRequest->GetBlob(it.first);
auto r = b->as<RemoteBlob>();
if (r) {
const auto name = r->getDeviceName();
const auto res = std::find_if(
_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cbegin(),
_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cend(),
[&name](const MultiDevicePlugin::DeviceInformation& d) {
return (d.defaultDeviceID.empty() ? d.deviceName : (d.deviceName + "." + d.defaultDeviceID)) == name;
});
if (_multiDeviceExecutableNetwork->_devicePrioritiesInitial.cend() == res) {
IE_THROW() << "None of the devices (for which current MULTI-device configuration was "
"initialized) supports a remote blob created on the device named " << name;
} else {
} else {
// it is ok to take the c_str() here (as pointed in the multi_device_exec_network.hpp we need to use const char*)
// as the original strings are from the "persistent" vector (with the right lifetime)
_multiDeviceExecutableNetwork->_thisPreferredDeviceName = res->deviceName.c_str();
break;
}
}
}
_multiDeviceExecutableNetwork->_thisPreferredDeviceName = res->deviceName.c_str();
break;
}
}
}
}},
// as the scheduling algo may select any device, this stage accepts the scheduling decision (actual workerRequest)
// then sets the device-agnostic blobs to the actual (device-specific) request

View File

@ -372,6 +372,10 @@ std::shared_ptr<InferenceEngine::RemoteContext> MultiDeviceExecutableNetwork::Ge
<< " Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names;
}
std::shared_ptr<InferenceEngine::ICore> MultiDeviceExecutableNetwork::GetCore() const {
return _plugin->GetCore();
}
InferenceEngine::IInferRequestInternal::Ptr MultiDeviceExecutableNetwork::CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs) {
auto num = _numRequestsCreated++;

View File

@ -36,6 +36,7 @@ struct DeviceInformation {
DeviceName deviceName;
std::map<std::string, std::string> config;
int numRequestsPerDevices;
std::string defaultDeviceID;
};
template<typename T>
@ -131,6 +132,7 @@ public:
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs) override;
std::shared_ptr<InferenceEngine::RemoteContext> GetContext() const override;
std::shared_ptr<InferenceEngine::ICore> GetCore() const;
~MultiDeviceExecutableNetwork() override;
void ScheduleToWorkerInferRequest(InferenceEngine::Task, DeviceName preferred_device = "");

View File

@ -108,6 +108,19 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
return GetSupportedConfig(tconfig, deviceName);
};
auto getDefaultDeviceID = [this](std::string deviceName) -> std::string {
std::vector<std::string> supportedMetrics = GetCore()->GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS));
if (std::find(supportedMetrics.begin(), supportedMetrics.end(), METRIC_KEY(SUPPORTED_CONFIG_KEYS)) != supportedMetrics.end()) {
std::vector<std::string> supportKeys = GetCore()->GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
if (std::find(supportKeys.begin(), supportKeys.end(), CONFIG_KEY(DEVICE_ID)) != supportKeys.end()) {
return GetCore()->GetConfig(deviceName, CONFIG_KEY(DEVICE_ID)).as<std::string>();
}
}
return "";
};
for (auto && d : devicesWithRequests) {
auto openingBracket = d.find_first_of('(');
auto closingBracket = d.find_first_of(')', openingBracket);
@ -123,8 +136,13 @@ std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(cons
}
}
std::string defaultDeviceID = "";
DeviceIDParser parsed{deviceName};
if (parsed.getDeviceID().empty())
defaultDeviceID = getDefaultDeviceID(deviceName);
// create meta device
metaDevices.push_back({ deviceName, getDeviceConfig(deviceName), numRequests });
metaDevices.push_back({ deviceName, getDeviceConfig(deviceName), numRequests, defaultDeviceID });
}
return metaDevices;

View File

@ -118,6 +118,17 @@ public:
*/
virtual Parameter GetMetric(const std::string& deviceName, const std::string& name) const = 0;
/**
* @brief Gets configuration dedicated to device behaviour.
*
* The method is targeted to extract information which can be set via SetConfig method.
*
* @param deviceName - A name of a device to get a configuration value.
* @param name - config key.
* @return Value of config corresponding to config key.
*/
virtual Parameter GetConfig(const std::string& deviceName, const std::string& name) const = 0;
/**
* @brief Returns devices available for neural networks inference
*

View File

@ -72,7 +72,7 @@ struct PerfHintsConfig {
* @return configuration value
*/
static std::string CheckPerformanceHintValue(const std::string& val) {
if (val == PluginConfigParams::LATENCY || val == PluginConfigParams::THROUGHPUT)
if (val == PluginConfigParams::LATENCY || val == PluginConfigParams::THROUGHPUT || val == "")
return val;
else
IE_THROW() << "Wrong value for property key " << PluginConfigParams::KEY_PERFORMANCE_HINT
@ -88,7 +88,7 @@ struct PerfHintsConfig {
int val_i = -1;
try {
val_i = std::stoi(val);
if (val_i > 0)
if (val_i >= 0)
return val_i;
else
throw std::logic_error("wrong val");

View File

@ -132,6 +132,13 @@ namespace {
::testing::ValuesIn(conf)),
CorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, DefaultValuesConfigTests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::ValuesIn(conf)),
CorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_GPU_BehaviorTests, CorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),

View File

@ -10,6 +10,7 @@
const std::vector<DevicesNamesAndSupportPair> device_names_and_support_for_remote_blobs {
{{GPU}, true}, // GPU via MULTI,
{{"GPU.0"}, true}, // GPU.0 via MULTI,
#ifdef ENABLE_MKL_DNN
{{GPU, CPU}, true}, // GPU+CPU
{{CPU, GPU}, true}, // CPU+GPU

View File

@ -133,6 +133,27 @@ namespace BehaviorTestsDefinitions {
ASSERT_EQ(std::find(supportedOptions.cbegin(), supportedOptions.cend(), key), supportedOptions.cend());
}
using DefaultValuesConfigTests = BehaviorTestsUtils::BehaviorTestsBasic;
TEST_P(DefaultValuesConfigTests, CanSetDefaultValueBackToPlugin) {
// Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::Parameter metric;
ASSERT_NO_THROW(metric = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> keys = metric;
for (auto& key : keys) {
InferenceEngine::Parameter configValue;
ASSERT_NO_THROW(configValue = ie->GetConfig(targetDevice, key));
ASSERT_NO_THROW(ie->SetConfig({{ key, configValue.as<std::string>()}}, targetDevice))
<< "device=" << targetDevice << " "
<< "config key=" << key << " "
<< "value=" << configValue.as<std::string>();
}
}
using IncorrectConfigTests = BehaviorTestsUtils::BehaviorTestsBasic;
TEST_P(IncorrectConfigTests, SetConfigWithIncorrectKey) {

View File

@ -30,6 +30,7 @@ public:
const InferenceEngine::CNNNetwork&, const std::string&, const std::map<std::string, std::string>&));
MOCK_CONST_METHOD2(GetMetric, InferenceEngine::Parameter(const std::string&, const std::string&));
MOCK_CONST_METHOD2(GetConfig, InferenceEngine::Parameter(const std::string&, const std::string&));
MOCK_CONST_METHOD0(GetAvailableDevices, std::vector<std::string>());
MOCK_CONST_METHOD1(DeviceSupportsImportExport, bool(const std::string&)); // NOLINT not a cast to bool