[AUTO plugin] AUTO plugin will ignore other plugins' configuration (#5979)

* AUTO plugin will ignore other plugins' configuration

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>

* Update tests

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>

* Support PER_COUNT config which is needed in benchmark_app

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>

* Address reviewer comments: check config and throw exception for unsupported, but that begin with "AUTO_" will be ignored

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>

* Fix CI tests issue

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>
This commit is contained in:
Shoujiang Ma 2021-06-08 17:11:58 +08:00 committed by GitHub
parent eadf2c4ce0
commit dcf36565b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 140 additions and 190 deletions

View File

@ -3,10 +3,8 @@
//
#include <string>
#include <vector>
#include <memory>
#include <map>
#include <unordered_map>
#include "ie_metric_helpers.hpp"
#include "auto_exec_network.hpp"
@ -15,8 +13,8 @@
namespace AutoPlugin {
using namespace InferenceEngine;
AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal& network) :
_network(network) {
AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal& network, bool enablePerfCount) :
_network(network), _enablePerfCount(enablePerfCount) {
}
AutoExecutableNetwork::~AutoExecutableNetwork() = default;
@ -24,7 +22,7 @@ AutoExecutableNetwork::~AutoExecutableNetwork() = default;
InferenceEngine::IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs) {
SoIInferRequestInternal inferRequest = {_network, _network->CreateInferRequest()};
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest);
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest, _enablePerfCount);
}
void AutoExecutableNetwork::Export(std::ostream& networkModel) {

View File

@ -19,16 +19,11 @@ namespace AutoPlugin {
using DeviceName = std::string;
struct DeviceInformation {
DeviceName deviceName;
std::map<std::string, std::string> config;
};
class AutoExecutableNetwork : public InferenceEngine::IExecutableNetworkInternal {
public:
using Ptr = std::shared_ptr<AutoExecutableNetwork>;
explicit AutoExecutableNetwork(const InferenceEngine::SoExecutableNetworkInternal& network);
explicit AutoExecutableNetwork(const InferenceEngine::SoExecutableNetworkInternal& network, bool enablePerfCount);
void Export(std::ostream& networkModel) override;
InferenceEngine::RemoteContext::Ptr GetContext() const override;
@ -43,6 +38,7 @@ public:
private:
InferenceEngine::SoExecutableNetworkInternal _network;
bool _enablePerfCount;
};
} // namespace AutoPlugin

View File

@ -11,13 +11,23 @@ namespace AutoPlugin {
AutoInferRequest::AutoInferRequest(const InputsDataMap& networkInputs,
const OutputsDataMap& networkOutputs,
const SoIInferRequestInternal& inferRequest)
const SoIInferRequestInternal& inferRequest,
bool enablePerfCount)
: IInferRequestInternal(networkInputs, networkOutputs)
, _inferRequest(inferRequest) {
, _inferRequest(inferRequest)
, _enablePerfCount(enablePerfCount) {
}
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> AutoInferRequest::GetPerformanceCounts() const {
if (_enablePerfCount) {
try {
return _inferRequest->GetPerformanceCounts();
} catch (...) {
return {};
}
} else {
return {};
}
}
void AutoInferRequest::InferImpl() {

View File

@ -24,7 +24,8 @@ public:
using Ptr = std::shared_ptr<AutoInferRequest>;
explicit AutoInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
const InferenceEngine::OutputsDataMap& networkOutputs,
const InferenceEngine::SoIInferRequestInternal& inferRequest);
const InferenceEngine::SoIInferRequestInternal& inferRequest,
bool enablePerfCount);
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> GetPerformanceCounts() const override;
void InferImpl() override;
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
@ -37,6 +38,7 @@ public:
private:
InferenceEngine::SoIInferRequestInternal _inferRequest;
bool _enablePerfCount;
};
} // namespace AutoPlugin

View File

@ -75,11 +75,11 @@ IE::QueryNetworkResult AutoInferencePlugin::QueryNetwork(const IE::CNNNetwork& n
}
auto fullConfig = mergeConfigs(_config, config);
auto metaDevices = GetDeviceChoice(fullConfig);
auto metaDevices = GetDeviceList(fullConfig);
std::unordered_set<std::string> supportedLayers;
for (auto&& value : metaDevices) {
try {
auto deviceQr = GetCore()->QueryNetwork(network, value.deviceName, value.config);
auto deviceQr = GetCore()->QueryNetwork(network, value, {});
std::unordered_set<std::string> deviceSupportedLayers;
for (auto &&layerQr : deviceQr.supportedLayersMap) {
deviceSupportedLayers.emplace(layerQr.first);
@ -111,7 +111,19 @@ IE::Parameter AutoInferencePlugin::GetConfig(const std::string& name,
void AutoInferencePlugin::SetConfig(const ConfigType& config) {
for (auto && kvp : config) {
if (kvp.first.find("AUTO_") == 0) {
_config[kvp.first] = kvp.second;
} else if (kvp.first == IE::PluginConfigParams::KEY_PERF_COUNT) {
if (kvp.second == IE::PluginConfigParams::YES ||
kvp.second == IE::PluginConfigParams::NO) {
_config[kvp.first] = kvp.second;
} else {
IE_THROW() << "Unsupported config value: " << kvp.second
<< " for key: " << kvp.first;
}
} else {
IE_THROW() << "Unsupported config key: " << kvp.first;
}
}
}
@ -128,7 +140,10 @@ IE::Parameter AutoInferencePlugin::GetMetric(const std::string& name,
std::string device_name = {"Inference Engine AUTO device"};
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, device_name);
} else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
std::vector<std::string> configKeys;
std::vector<std::string> configKeys = {
IE::KEY_AUTO_DEVICE_LIST,
IE::PluginConfigParams::KEY_PERF_COUNT
};
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
} else if (name == METRIC_KEY(OPTIMIZATION_CAPABILITIES)) {
std::vector<std::string> capabilities = GetOptimizationCapabilities(options);
@ -139,42 +154,21 @@ IE::Parameter AutoInferencePlugin::GetMetric(const std::string& name,
}
//////////////////////////////////// private & protected functions ///////////////////
std::vector<AutoPlugin::DeviceInformation> AutoInferencePlugin::GetDeviceChoice(const ConfigType& config) const {
std::vector<DeviceInformation> metaDevices;
std::vector<std::string> availableDevices;
std::vector<DeviceName> AutoInferencePlugin::GetDeviceList(const ConfigType& config) const {
std::vector<DeviceName> deviceList;
auto deviceListConfig = config.find(IE::KEY_AUTO_DEVICE_LIST);
if (deviceListConfig == config.end()) {
availableDevices = GetCore()->GetAvailableDevices();
deviceList = GetCore()->GetAvailableDevices();
} else {
availableDevices = IE::DeviceIDParser::getHeteroDevices(deviceListConfig->second);
deviceList = IE::DeviceIDParser::getHeteroDevices(deviceListConfig->second);
}
auto getDeviceConfig = [&] (const DeviceName & deviceWithID) {
IE::DeviceIDParser deviceParser(deviceWithID);
std::string deviceName = deviceParser.getDeviceName();
ConfigType tconfig = config;
// set device ID if any
std::string deviceIDLocal = deviceParser.getDeviceID();
if (!deviceIDLocal.empty()) {
tconfig[IE::PluginConfigParams::KEY_DEVICE_ID] = deviceIDLocal;
}
return GetSupportedConfig(tconfig, deviceName);
};
for (auto && d : availableDevices) {
if (d != _pluginName) {
metaDevices.push_back({ d, getDeviceConfig(d)});
}
}
if (metaDevices.empty()) {
if (deviceList.empty()) {
IE_THROW() << "Please, check environment due to no supported devices can be used";
}
return metaDevices;
return deviceList;
}
std::vector<std::string> AutoInferencePlugin::GetOptimizationCapabilities(const std::map<std::string, IE::Parameter> & options) const {
@ -215,7 +209,21 @@ ConfigType AutoInferencePlugin::GetSupportedConfig(const ConfigType& config,
return supportedConfig;
}
DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision) {
void AutoInferencePlugin::CheckConfig(const ConfigType& config) {
std::vector<std::string> supportedConfigKeys = GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {});
for (auto&& c : config) {
auto itKey = std::find(supportedConfigKeys.begin(), supportedConfigKeys.end(), c.first);
if (supportedConfigKeys.end() == itKey) {
// CVS-57233
if (c.first.find("AUTO_") == 0) {
continue;
}
IE_THROW() << "AUTO plugin doesn't support config key " << c.first;
}
}
}
DeviceName AutoInferencePlugin::SelectDevice(const std::vector<DeviceName>& metaDevices, const std::string& networkPrecision) {
if (metaDevices.empty()) {
IE_THROW(NotFound) << "No available device to select in AUTO plugin";
}
@ -223,15 +231,15 @@ DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInfo
return metaDevices.at(0);
}
std::vector<DeviceInformation> CPU;
std::vector<DeviceInformation> GPU;
std::vector<DeviceName> CPU;
std::vector<DeviceName> GPU;
for (auto& item : metaDevices) {
if (item.deviceName.find("CPU") == 0) {
if (item.find("CPU") == 0) {
CPU.push_back(item);
continue;
}
if (item.deviceName.find("GPU") == 0) {
if (item.find("GPU") == 0) {
GPU.push_back(item);
continue;
}
@ -242,10 +250,10 @@ DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInfo
}
// Sort GPU by name: GPU.2 > GPU.1 > GPU.0 > GPU, so we always choose the GPU[0] as best device
std::sort(GPU.begin(), GPU.end(), [](const DeviceInformation& a, const DeviceInformation& b)->bool{return b.deviceName < a.deviceName;});
std::sort(GPU.begin(), GPU.end(), [](const DeviceName& a, const DeviceName& b)->bool{return b < a;});
for (auto&& item : GPU) {
std::vector<std::string> capability = GetCore()->GetMetric(item.deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
std::vector<std::string> capability = GetCore()->GetMetric(item, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
auto res = std::find(capability.begin(), capability.end(), networkPrecision);
if (res != capability.end()) {
return item;

View File

@ -30,10 +30,11 @@ public:
void SetConfig(const ConfigType& config) override;
private:
std::vector<AutoPlugin::DeviceInformation> GetDeviceChoice(const ConfigType& config) const;
std::vector<DeviceName> GetDeviceList(const ConfigType& config) const;
std::vector<std::string> GetOptimizationCapabilities(const std::map<std::string, IE::Parameter>& options) const;
DeviceInformation SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));
ConfigType GetSupportedConfig(const ConfigType& config, const AutoPlugin::DeviceName & deviceName) const;
DeviceName SelectDevice(const std::vector<DeviceName>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));
ConfigType GetSupportedConfig(const ConfigType& config, const DeviceName & deviceName) const;
void CheckConfig(const ConfigType& config);
static ConfigType mergeConfigs(ConfigType config, const ConfigType& local);
template <typename T>
@ -41,18 +42,21 @@ private:
if (GetCore() == nullptr) {
IE_THROW() << "Please, work with AUTO device via InferencEngine::Core object";
}
CheckConfig(config);
auto fullConfig = mergeConfigs(_config, config);
auto metaDevices = GetDeviceChoice(fullConfig);
DeviceInformation selectedDevice;
auto metaDevices = GetDeviceList(fullConfig);
DeviceName selectedDevice;
IE::SoExecutableNetworkInternal executableNetwork;
while (!metaDevices.empty()) {
selectedDevice = SelectDevice(metaDevices, networkPrecision);
try {
executableNetwork = GetCore()->LoadNetwork(param, selectedDevice.deviceName, selectedDevice.config);
executableNetwork = GetCore()->LoadNetwork(param, selectedDevice, {});
break;
} catch (...) {
auto eraseDevice = std::find_if(metaDevices.begin(), metaDevices.end(),
[=](const DeviceInformation& d)->bool{return d.deviceName == selectedDevice.deviceName;});
[=](const DeviceName& d)->bool{return d == selectedDevice;});
if (eraseDevice == metaDevices.end()) {
IE_THROW() << "Didn't find the selected device name";
}
@ -63,7 +67,10 @@ private:
if (!executableNetwork) {
IE_THROW() << "Failed to load network by AUTO plugin";
}
auto impl = std::make_shared<AutoExecutableNetwork>(executableNetwork);
bool enablePerfCount = fullConfig.find(IE::PluginConfigParams::KEY_PERF_COUNT) != fullConfig.end();
auto impl = std::make_shared<AutoExecutableNetwork>(executableNetwork, enablePerfCount);
if (std::is_same<std::string, T>::value) {
SetExeNetworkInfo(impl, executableNetwork->GetInputsInfo(),

View File

@ -42,18 +42,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_NUMA}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "8"}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::NO}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::YES}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigTests,
@ -94,21 +83,13 @@ namespace {
const std::vector<std::map<std::string, std::string>> autoinconfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "OFF"}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, "OFF"}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "NAN"}}
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "OFF"}}
};
const std::vector<std::map<std::string, std::string>> multiconf = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_CPU}}
};
const std::vector<std::map<std::string, std::string>> autoconf = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
@ -127,7 +108,7 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoconf)),
::testing::ValuesIn(AutoConfigs)),
CorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigTests,
@ -144,13 +125,6 @@ namespace {
::testing::ValuesIn(multiinconfigs)),
IncorrectConfigTests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, IncorrectConfigTests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoinconfigs)),
IncorrectConfigTests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),

View File

@ -51,20 +51,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_NUMA}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "8"}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::NO}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::YES}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferConfigTests,

View File

@ -26,9 +26,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestInputTests,

View File

@ -22,9 +22,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestOutputTests,

View File

@ -14,10 +14,6 @@ namespace {
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_CPU}}
};
const std::vector<std::map<std::string, std::string>> Autoconfigs = {
{{ AUTO_CONFIG_KEY(DEVICE_LIST) , CommonTestUtils::DEVICE_CPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
::testing::Combine(
::testing::Values(InferenceEngine::Precision::FP32),
@ -32,11 +28,4 @@ namespace {
::testing::ValuesIn(Multiconfigs)),
PerfCountersTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PerfCountersTest,
::testing::Combine(
::testing::Values(InferenceEngine::Precision::FP32),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(Autoconfigs)),
PerfCountersTest::getTestCaseName);
} // namespace

View File

@ -37,9 +37,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU},
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU}}
};
const std::vector<std::map<std::string, std::string>> configsOutput = {

View File

@ -67,6 +67,8 @@ std::vector<std::string> disabledTestPatterns() {
// TODO: 55656 AUTO plugin and QueryNetwork
R"(.*CoreThreading.*smoke_QueryNetwork.*targetDevice=AUTO_config.*)",
// Unsupported config KEY_ENFORCE_BF16 for AUTO plugin
R"(.*smoke_SetBlobOfKindAUTO.*SetBlobOfKindTest.CompareWithRefs.*)",
// reference doesn't cover I8, U8 cases. Issue: 55842
R"(.*Gather7LayerTest.*netPRC=I8.*)",
};

View File

@ -106,6 +106,13 @@ namespace {
::testing::ValuesIn(autoconf)),
CorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, CorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(auto_cpu_gpu_conf)),
CorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
@ -124,14 +131,14 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoconf)),
::testing::ValuesIn(autoinconfigs)),
IncorrectConfigAPITests::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, IncorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(auto_cpu_gpu_conf)),
::testing::ValuesIn(autoinconfigs)),
IncorrectConfigAPITests::getTestCaseName);

View File

@ -26,9 +26,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS,
InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU}}
};
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {

View File

@ -22,8 +22,7 @@ namespace {
};
const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU}}
};
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {

View File

@ -14,14 +14,6 @@ namespace {
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_GPU}}
};
const std::vector<std::map<std::string, std::string>> Autoconfigs = {
{{ AUTO_CONFIG_KEY(DEVICE_LIST) , CommonTestUtils::DEVICE_GPU}}
};
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}}
};
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
::testing::Combine(
::testing::Values(InferenceEngine::Precision::FP32),
@ -36,18 +28,4 @@ namespace {
::testing::ValuesIn(Multiconfigs)),
PerfCountersTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PerfCountersTest,
::testing::Combine(
::testing::Values(InferenceEngine::Precision::FP32),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(Autoconfigs)),
PerfCountersTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, PerfCountersTest,
::testing::Combine(
::testing::Values(InferenceEngine::Precision::FP32),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(auto_cpu_gpu_conf)),
PerfCountersTest::getTestCaseName);
} // namespace

View File

@ -42,18 +42,6 @@ namespace {
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
};
const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_GPU}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_GPU},
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
};
const std::vector<std::map<std::string, std::string>> AutoCGConfigsInputOutput = {
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}},
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU},
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
};
const std::vector<std::map<std::string, std::string>> configsOutput = {
{},
{{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
@ -77,14 +65,14 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(AutoConfigsInputOutput)),
::testing::ValuesIn(AutoConfigs)),
BehaviorTestOutput::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestOutput,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(AutoCGConfigsInputOutput)),
::testing::ValuesIn(auto_cpu_gpu_conf)),
BehaviorTestOutput::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTests,
@ -133,14 +121,14 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(AutoConfigsInputOutput)),
::testing::ValuesIn(AutoConfigs)),
BehaviorTestInput::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestInput,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(AutoCGConfigsInputOutput)),
::testing::ValuesIn(auto_cpu_gpu_conf)),
BehaviorTestInput::getTestCaseName);
} // namespace

View File

@ -57,8 +57,7 @@ namespace BehaviorTestsDefinitions {
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
if (targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
ASSERT_THROW(ie->SetConfig(configuration, targetDevice),
InferenceEngine::Exception);
@ -73,9 +72,13 @@ namespace BehaviorTestsDefinitions {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) != std::string::npos) {
GTEST_SKIP();
} else {
ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration),
InferenceEngine::Exception);
}
}
using IncorrectConfigAPITests = BehaviorTestsUtils::BehaviorTestsBasic;
@ -110,8 +113,10 @@ namespace BehaviorTestsDefinitions {
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
}
// Load CNNNetwork to target plugins
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
execNet.CreateInferRequest();
}
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
@ -139,8 +144,10 @@ namespace BehaviorTestsDefinitions {
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
}
// Load CNNNetwork to target plugins
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
execNet.CreateInferRequest();
}
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
@ -170,8 +177,10 @@ namespace BehaviorTestsDefinitions {
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
}
// Load CNNNetwork to target plugins
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
execNet.CreateInferRequest();
}
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {

View File

@ -42,8 +42,10 @@ TEST_P(InferConfigTests, canSetExclusiveAsyncRequests) {
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
}
// Load CNNNetwork to target plugins
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
execNet.CreateInferRequest();
}
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
@ -71,8 +73,10 @@ TEST_P(InferConfigTests, withoutExclusiveAsyncRequests) {
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
}
// Load CNNNetwork to target plugins
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
execNet.CreateInferRequest();
}
if ((targetDevice == CommonTestUtils::DEVICE_GNA) || (targetDevice == CommonTestUtils::DEVICE_HDDL)) {
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());