From b968c7b813a2e89ccd944f76d02b0ddd709fbc02 Mon Sep 17 00:00:00 2001 From: Yuan Hu Date: Sat, 25 Sep 2021 17:30:21 +0800 Subject: [PATCH] AutoPlugin: support performance Hint (#7621) Signed-off-by: Hu, Yuan2 --- .../samples/benchmark_app/main.cpp | 3 - .../src/multi_device/multi_device_plugin.cpp | 6 +- .../behavior/config.cpp | 12 ++-- .../behavior/config.cpp | 68 +++++++++++++++++-- 4 files changed, 70 insertions(+), 19 deletions(-) diff --git a/inference-engine/samples/benchmark_app/main.cpp b/inference-engine/samples/benchmark_app/main.cpp index 2f08f790881..52b974368c8 100644 --- a/inference-engine/samples/benchmark_app/main.cpp +++ b/inference-engine/samples/benchmark_app/main.cpp @@ -221,9 +221,6 @@ int main(int argc, char* argv[]) { bool perf_counts = false; // Update config per device according to command line parameters for (auto& device : devices) { - if (device == "AUTO") { - continue; - } if (!config.count(device)) config[device] = {}; std::map& device_config = config.at(device); diff --git a/inference-engine/src/multi_device/multi_device_plugin.cpp b/inference-engine/src/multi_device/multi_device_plugin.cpp index 8017d1e07de..d3e0dc303c1 100644 --- a/inference-engine/src/multi_device/multi_device_plugin.cpp +++ b/inference-engine/src/multi_device/multi_device_plugin.cpp @@ -253,11 +253,9 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons } // replace the configure with configure that auto want to pass to device // and reset the strDevices to support devices - std::vector validConfigKey; + auto validConfigKey = PerfHintsConfig::SupportedKeys(); validConfigKey.push_back(PluginConfigParams::KEY_PERF_COUNT); validConfigKey.push_back(PluginConfigParams::KEY_EXCLUSIVE_ASYNC_REQUESTS); - validConfigKey.push_back(PluginConfigParams::KEY_PERFORMANCE_HINT); - validConfigKey.push_back(PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS); strDevices = ""; for (auto iter = supportDevices.begin(); iter != supportDevices.end(); iter++) { std::map deviceConfig; @@ -268,7 +266,7 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons } } iter->config = deviceConfig; - strDevices = iter->deviceName; + strDevices += iter->deviceName; strDevices += ((iter + 1) == supportDevices.end()) ? "" : ","; } diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/behavior/config.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/behavior/config.cpp index 0d8cafc651e..48fcab8a20f 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/behavior/config.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/behavior/config.cpp @@ -162,10 +162,10 @@ namespace { IncorrectConfigAPITests::getTestCaseName); INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, IncorrectConfigAPITests, - ::testing::Combine( - ::testing::ValuesIn(netPrecisions), - ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(multiinconfigs)), - IncorrectConfigAPITests::getTestCaseName); + ::testing::Combine( + ::testing::ValuesIn(netPrecisions), + ::testing::Values(CommonTestUtils::DEVICE_AUTO), + ::testing::ValuesIn(multiinconfigs)), + IncorrectConfigAPITests::getTestCaseName); -} // namespace \ No newline at end of file +} // namespace diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/behavior/config.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/behavior/config.cpp index 882572fead2..468ca89a537 100644 --- a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/behavior/config.cpp +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/behavior/config.cpp @@ -45,6 +45,47 @@ namespace { {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, {InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, "DEVICE_UNKNOWN"}} }; + + + const std::vector> autoinconfigs = { + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, "DOESN'T EXIST"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS, "-1"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, "ON"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE, "unknown_file"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_DUMP_KERNELS, "ON"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_TUNING_MODE, "TUNING_UNKNOWN_MODE"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, "DEVICE_UNKNOWN"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, "DOESN'T EXIST"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS, "-1"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, "ON"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE, "unknown_file"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_DUMP_KERNELS, "ON"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_TUNING_MODE, "TUNING_UNKNOWN_MODE"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, "DEVICE_UNKNOWN"}} + }; IE_SUPPRESS_DEPRECATED_END INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, IncorrectConfigTests, @@ -65,7 +106,7 @@ namespace { ::testing::Combine( ::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(multiinconfigs)), + ::testing::ValuesIn(autoinconfigs)), IncorrectConfigTests::getTestCaseName); @@ -119,9 +160,24 @@ namespace { }; const std::vector> autoConfigs = { - {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, - {InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES, - CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}} + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS, "1"}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}}, + {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , + CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, + {InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT_NUM_REQUESTS, "1"}} }; @@ -153,7 +209,7 @@ namespace { ::testing::ValuesIn(multiconf)), CorrectConfigAPITests::getTestCaseName); - INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, CorrectConfigTests, + INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, CorrectConfigAPITests, ::testing::Combine( ::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_AUTO), @@ -178,7 +234,7 @@ namespace { ::testing::Combine( ::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(multiinconfigs)), + ::testing::ValuesIn(autoinconfigs)), IncorrectConfigAPITests::getTestCaseName);