fix compiled model failed issue when set config with ov device properties (#11793)

* 1. Enable IE Core filter to promote the secondary properties to first level for hardware device.
2. Enable IE Core filter to pass the secondary properties to AUTO plguin.
3. Enable AUTO Plugin to parse secondary properties to first level and pass them to corresponding target hardware device.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* 1. Enable MULTI Plugin to support secondary properties.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* 1. Enable HETERO Plugin to support secondary priorities.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Catch the EXPECT_CALL with AVAILABLE_DEVICES argument inputting to GetMetric.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Revert the logic of handling secondary properties for MULTI and HETERO device.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Remove the secondary property flattening logic because this logic has been implemented within AUTO plugin.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* 1. update flatten logic when secondary properties is specified.
2. add the test case with secondary properties for CPU.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* add the test case with secondary properties for GPU plugin.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Add debug message to fix the test case failure issue.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Add more debug info.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update.
1. For IE Core, 1st level property overides the 2nd level property.
2. For AUTO plugin, add available device list to check if the secondary properties is vaild.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Add CUDA and ARM.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

* Update device name for ARM Plugin and add device name for HPU plugin.

Signed-off-by: Wang, Yang <yang4.wang@intel.com>

Co-authored-by: Chen Peter <peter.chen@intel.com>
This commit is contained in:
Wang, Yang 2022-06-25 10:17:30 +08:00 committed by GitHub
parent 23b0ba6898
commit bd04dc1ecf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 172 additions and 17 deletions

View File

@ -127,14 +127,27 @@ void allowNotImplemented(F&& f) {
ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& properties) { ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& properties) {
ov::AnyMap result = properties; ov::AnyMap result = properties;
for (auto&& property : properties) { for (auto item = result.begin(); item != result.end();) {
auto parsed = parseDeviceNameIntoConfig(property.first); auto parsed = parseDeviceNameIntoConfig(item->first);
if (!item->second.is<ov::AnyMap>()) {
item++;
continue;
}
if (device.find(parsed._deviceName) != std::string::npos) { if (device.find(parsed._deviceName) != std::string::npos) {
if (property.second.is<ov::AnyMap>()) { // 1. flatten the scondary property for target device
for (auto&& sub_property : property.second.as<ov::AnyMap>()) { for (auto&& sub_property : item->second.as<ov::AnyMap>()) {
result[sub_property.first] = sub_property.second; // 1.1 1st level property overides 2nd level property
} if (result.find(sub_property.first) != result.end())
continue;
result[sub_property.first] = sub_property.second;
} }
item = result.erase(item);
} else if (device != "AUTO" && device != "MULTI" && device != "HETERO") {
// 2. remove the secondary property setting for other hard ware device
item = result.erase(item);
} else {
// 3. keep the secondary property for the other virtual devices
item++;
} }
} }
return result; return result;

View File

@ -77,6 +77,8 @@ namespace {
std::mutex MultiDeviceInferencePlugin::_mtx; std::mutex MultiDeviceInferencePlugin::_mtx;
std::map<unsigned int, std::list<std::string>> MultiDeviceInferencePlugin::_priorityMap; std::map<unsigned int, std::list<std::string>> MultiDeviceInferencePlugin::_priorityMap;
std::set<std::string> MultiDeviceInferencePlugin::_availableDevices =
std::set<std::string>{"CPU", "GPU", "GNA", "TEMPLATE", "MYRAID", "HDDL", "VPUX", "MULTI", "HETERO", "CUDA", "HPU_GOYA"};
std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const std::string& priorities, std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const std::string& priorities,
const std::map<std::string, std::string> & config) const { const std::map<std::string, std::string> & config) const {
@ -338,7 +340,6 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
auto workMode = fullConfig.find(CONFIG_KEY_INTERNAL(MULTI_WORK_MODE_AS_AUTO)); auto workMode = fullConfig.find(CONFIG_KEY_INTERNAL(MULTI_WORK_MODE_AS_AUTO));
bool workModeAuto = workMode != fullConfig.end() && workMode->second == InferenceEngine::PluginConfigParams::YES; bool workModeAuto = workMode != fullConfig.end() && workMode->second == InferenceEngine::PluginConfigParams::YES;
auto priorities = fullConfig.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES); auto priorities = fullConfig.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES);
// if workMode is AUTO // if workMode is AUTO
if (workModeAuto) { if (workModeAuto) {
// check the configure and check if need to set PerfCounters configure to device // check the configure and check if need to set PerfCounters configure to device
@ -347,9 +348,10 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
OV_ITT_SCOPED_TASK(itt::domains::MULTIPlugin, "MultiDeviceInferencePlugin::LoadNetworkImpl::AutoMode"); OV_ITT_SCOPED_TASK(itt::domains::MULTIPlugin, "MultiDeviceInferencePlugin::LoadNetworkImpl::AutoMode");
auto autoSContext = std::make_shared<AutoScheduleContext>(); auto autoSContext = std::make_shared<AutoScheduleContext>();
std::map<std::string, std::string> filterConfig; std::map<std::string, std::string> filterConfig;
auto strDevices = GetDeviceList(fullConfig);
// keep the secondary priorities when the config key is one of the available hardware devices
CheckConfig(fullConfig, autoSContext, filterConfig); CheckConfig(fullConfig, autoSContext, filterConfig);
// filter the device that supports filter configure // filter the device that supports filter configure
auto strDevices = GetDeviceList(fullConfig);
auto metaDevices = ParseMetaDevices(strDevices, fullConfig); auto metaDevices = ParseMetaDevices(strDevices, fullConfig);
auto supportDevicesByConfig = FilterDevice(metaDevices, filterConfig); auto supportDevicesByConfig = FilterDevice(metaDevices, filterConfig);
if (supportDevicesByConfig.size() == 0) { if (supportDevicesByConfig.size() == 0) {
@ -431,8 +433,8 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
auto tmpiter = fullConfig.find(CONFIG_KEY(ALLOW_AUTO_BATCHING)); auto tmpiter = fullConfig.find(CONFIG_KEY(ALLOW_AUTO_BATCHING));
if (tmpiter != fullConfig.end()) if (tmpiter != fullConfig.end())
p.config.insert({tmpiter->first, tmpiter->second}); p.config.insert({tmpiter->first, tmpiter->second});
const auto &deviceName = p.deviceName; const auto& deviceName = p.deviceName;
const auto &deviceConfig = p.config; const auto& deviceConfig = p.config;
SoExecutableNetworkInternal exec_net; SoExecutableNetworkInternal exec_net;
if (modelPath.empty()) { if (modelPath.empty()) {
exec_net = GetCore()->LoadNetwork(network, deviceName, deviceConfig); exec_net = GetCore()->LoadNetwork(network, deviceName, deviceConfig);
@ -769,9 +771,9 @@ std::string MultiDeviceInferencePlugin::GetDeviceList(const std::map<std::string
} }
void MultiDeviceInferencePlugin::CheckConfig(const std::map<std::string, std::string>& config, void MultiDeviceInferencePlugin::CheckConfig(const std::map<std::string, std::string>& config,
AutoScheduleContext::Ptr& context, std::map<std::string, std::string>& filterConfig) { AutoScheduleContext::Ptr& context,
std::map<std::string, std::string>& filterConfig) {
// TODO need to optimize this code, too much duplicated code // TODO need to optimize this code, too much duplicated code
const auto perf_hints_configs = PerfHintsConfig::SupportedKeys(); const auto perf_hints_configs = PerfHintsConfig::SupportedKeys();
for (auto&& kvp : config) { for (auto&& kvp : config) {
if (kvp.first == ov::enable_profiling) { if (kvp.first == ov::enable_profiling) {
@ -840,7 +842,12 @@ void MultiDeviceInferencePlugin::CheckConfig(const std::map<std::string, std::st
if (kvp.first == PluginConfigParams::KEY_PERFORMANCE_HINT) { if (kvp.first == PluginConfigParams::KEY_PERFORMANCE_HINT) {
context->_performanceHint = kvp.second; context->_performanceHint = kvp.second;
} }
} else if (supported_configKeys.end() == std::find(supported_configKeys.begin(), supported_configKeys.end(), kvp.first)) { } else if (_availableDevices.end() !=
std::find(_availableDevices.begin(), _availableDevices.end(), kvp.first)) {
// keep secondary prperties for HW or virtual device
continue;
} else if (supported_configKeys.end() ==
std::find(supported_configKeys.begin(), supported_configKeys.end(), kvp.first)) {
IE_THROW() << "Unsupported config key: " << kvp.first; IE_THROW() << "Unsupported config key: " << kvp.first;
} else if (kvp.first.find("AUTO_") == 0) { } else if (kvp.first.find("AUTO_") == 0) {
continue; continue;

View File

@ -65,7 +65,8 @@ private:
InferenceEngine::CNNNetwork network, InferenceEngine::CNNNetwork network,
const std::map<std::string, std::string>& config, const std::map<std::string, std::string>& config,
const std::string &networkPrecision = METRIC_VALUE(FP32)); const std::string &networkPrecision = METRIC_VALUE(FP32));
static void CheckConfig(const std::map<std::string, std::string>& config, AutoScheduleContext::Ptr& context, static void CheckConfig(const std::map<std::string, std::string>& config,
AutoScheduleContext::Ptr& context,
std::map<std::string, std::string>& filterConfig); std::map<std::string, std::string>& filterConfig);
std::vector<DeviceInformation> FilterDevice(const std::vector<DeviceInformation>& metaDevices, std::vector<DeviceInformation> FilterDevice(const std::vector<DeviceInformation>& metaDevices,
const std::map<std::string, std::string>& config); const std::map<std::string, std::string>& config);
@ -73,6 +74,7 @@ private:
InferenceEngine::CNNNetwork network); InferenceEngine::CNNNetwork network);
static std::mutex _mtx; static std::mutex _mtx;
static std::map<unsigned int, std::list<std::string>> _priorityMap; static std::map<unsigned int, std::list<std::string>> _priorityMap;
static std::set<std::string> _availableDevices;
}; };
} // namespace MultiDevicePlugin } // namespace MultiDevicePlugin

View File

@ -615,7 +615,7 @@ Parameter Plugin::GetConfig(const std::string& name, const std::map<std::string,
return val; return val;
} }
} else { } else {
IE_THROW() << "Unsupported config key : " << name; IE_THROW() << "3-Unsupported config key : " << name;
} }
} }

View File

@ -85,9 +85,60 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values("MULTI", "AUTO")); ::testing::Values("MULTI", "AUTO"));
const std::vector<ov::AnyMap> multiConfigs = { const std::vector<ov::AnyMap> multiConfigs = {
{ov::device::priorities(CommonTestUtils::DEVICE_CPU)} {ov::device::priorities(CommonTestUtils::DEVICE_CPU)}
}; };
const std::vector<ov::AnyMap> configsWithSecondaryProperties = {
{ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))},
{ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)),
ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}};
const std::vector<ov::AnyMap> multiConfigsWithSecondaryProperties = {
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))},
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)),
ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}};
const std::vector<ov::AnyMap> autoConfigsWithSecondaryProperties = {
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("AUTO",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))},
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))},
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)),
ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))},
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("AUTO",
ov::enable_profiling(false),
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))},
{ov::device::priorities(CommonTestUtils::DEVICE_CPU),
ov::device::properties("AUTO",
ov::enable_profiling(false),
ov::device::priorities(CommonTestUtils::DEVICE_GPU),
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)),
ov::device::properties("CPU",
ov::enable_profiling(true),
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)),
ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}};
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_OVClassSetDevicePriorityConfigTest, OVClassSetDevicePriorityConfigTest, smoke_OVClassSetDevicePriorityConfigTest, OVClassSetDevicePriorityConfigTest,
::testing::Combine(::testing::Values("MULTI", "AUTO"), ::testing::Combine(::testing::Values("MULTI", "AUTO"),
@ -204,9 +255,22 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values("CPU")); ::testing::Values("CPU"));
// IE Class Load network // IE Class Load network
INSTANTIATE_TEST_SUITE_P(smoke_CPU_OVClassLoadNetworkWithCorrectSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values("CPU", "AUTO:CPU", "MULTI:CPU"),
::testing::ValuesIn(configsWithSecondaryProperties)));
INSTANTIATE_TEST_SUITE_P(smoke_Multi_OVClassLoadNetworkWithSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values("MULTI"),
::testing::ValuesIn(multiConfigsWithSecondaryProperties)));
INSTANTIATE_TEST_SUITE_P(smoke_AUTO_OVClassLoadNetworkWithSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values("AUTO"),
::testing::ValuesIn(autoConfigsWithSecondaryProperties)));
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_OVClassLoadNetworkTest, OVClassLoadNetworkTest, smoke_OVClassLoadNetworkTest, OVClassLoadNetworkTest,
::testing::Values("CPU")); ::testing::Values("CPU"));
} // namespace } // namespace

View File

@ -692,10 +692,31 @@ const std::vector<ov::AnyMap> gpuCorrectConfigs = {
} }
}; };
const std::vector<ov::AnyMap> gpuCorrectConfigsWithSecondaryProperties = {
{ov::device::properties(CommonTestUtils::DEVICE_GPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false))},
{ov::device::properties(CommonTestUtils::DEVICE_GPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false)),
ov::device::properties(CommonTestUtils::DEVICE_CPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY),
ov::hint::allow_auto_batching(false))}};
INSTANTIATE_TEST_SUITE_P(smoke_OVClassLoadNetworkWithCorrectPropertiesAutoBatchingTest, OVClassLoadNetworkWithCorrectPropertiesTest, INSTANTIATE_TEST_SUITE_P(smoke_OVClassLoadNetworkWithCorrectPropertiesAutoBatchingTest, OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_GPU), ::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::ValuesIn(gpuCorrectConfigs))); ::testing::ValuesIn(gpuCorrectConfigs)));
INSTANTIATE_TEST_SUITE_P(smoke_OVClassLoadNetworkWithCorrectSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::ValuesIn(gpuCorrectConfigsWithSecondaryProperties)));
INSTANTIATE_TEST_SUITE_P(smoke_AUTO_OVClassLoadNetworkWithCorrectSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values("AUTO:GPU", "MULTI:GPU"),
::testing::ValuesIn(gpuCorrectConfigsWithSecondaryProperties)));
const std::vector<ov::AnyMap> autoCorrectConfigs = { const std::vector<ov::AnyMap> autoCorrectConfigs = {
{ {
ov::device::priorities(CommonTestUtils::DEVICE_GPU), ov::device::priorities(CommonTestUtils::DEVICE_GPU),
@ -709,10 +730,33 @@ const std::vector<ov::AnyMap> autoCorrectConfigs = {
} }
}; };
const std::vector<ov::AnyMap> autoCorrectConfigsWithSecondaryProperties = {
{ov::device::priorities(CommonTestUtils::DEVICE_GPU),
ov::device::properties(CommonTestUtils::DEVICE_AUTO,
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false))},
{ov::device::priorities(CommonTestUtils::DEVICE_GPU),
ov::device::properties(CommonTestUtils::DEVICE_GPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false))},
{ov::device::priorities(CommonTestUtils::DEVICE_GPU),
ov::device::properties(CommonTestUtils::DEVICE_GPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false)),
ov::device::properties(CommonTestUtils::DEVICE_CPU,
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY),
ov::hint::allow_auto_batching(false))}};
INSTANTIATE_TEST_SUITE_P(smoke_Auto_OVClassLoadNetworkWithCorrectPropertiesAutoBatchingTest, OVClassLoadNetworkWithCorrectPropertiesTest, INSTANTIATE_TEST_SUITE_P(smoke_Auto_OVClassLoadNetworkWithCorrectPropertiesAutoBatchingTest, OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_MULTI, CommonTestUtils::DEVICE_AUTO), ::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_MULTI, CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoCorrectConfigs))); ::testing::ValuesIn(autoCorrectConfigs)));
INSTANTIATE_TEST_SUITE_P(smoke_Auto_OVClassLoadNetworkWithCorrectSecondaryPropertiesTest,
OVClassLoadNetworkWithCorrectPropertiesTest,
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_MULTI,
CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoCorrectConfigsWithSecondaryProperties)));
const std::vector<ov::AnyMap> batchCorrectConfigs = { const std::vector<ov::AnyMap> batchCorrectConfigs = {
{} {}
}; };

View File

@ -1063,6 +1063,31 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
} }
} }
//
// LoadNetwork with AUTO on MULTI combinations particular device
//
TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithAUTONoThrow) {
ov::Core ie = createCoreWithTemplate();
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) {
std::string devices;
auto availableDevices = ie.get_property(deviceName, ov::available_devices);
for (auto&& device : availableDevices) {
devices += deviceName + '.' + device;
if (&device != &(availableDevices.back())) {
devices += ',';
}
}
OV_ASSERT_NO_THROW(
ie.compile_model(actualNetwork,
CommonTestUtils::DEVICE_MULTI,
ov::device::properties(CommonTestUtils::DEVICE_AUTO, ov::device::priorities(devices)),
ov::device::properties(CommonTestUtils::DEVICE_MULTI,
ov::device::priorities(CommonTestUtils::DEVICE_AUTO, deviceName))));
} else {
GTEST_SKIP();
}
}
// //
// LoadNetwork with HETERO on MULTI combinations particular device // LoadNetwork with HETERO on MULTI combinations particular device
// //