[AUTO plugin] AUTO plugin will ignore other plugins' configuration (#5979)
* AUTO plugin will ignore other plugins' configuration Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com> * Update tests Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com> * Support PER_COUNT config which is needed in benchmark_app Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com> * Address reviewer comments: check config and throw exception for unsupported, but that begin with "AUTO_" will be ignored Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com> * Fix CI tests issue Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>
This commit is contained in:
parent
eadf2c4ce0
commit
dcf36565b0
@ -3,10 +3,8 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <unordered_map>
|
|
||||||
|
|
||||||
#include "ie_metric_helpers.hpp"
|
#include "ie_metric_helpers.hpp"
|
||||||
#include "auto_exec_network.hpp"
|
#include "auto_exec_network.hpp"
|
||||||
@ -15,8 +13,8 @@
|
|||||||
namespace AutoPlugin {
|
namespace AutoPlugin {
|
||||||
using namespace InferenceEngine;
|
using namespace InferenceEngine;
|
||||||
|
|
||||||
AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal& network) :
|
AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal& network, bool enablePerfCount) :
|
||||||
_network(network) {
|
_network(network), _enablePerfCount(enablePerfCount) {
|
||||||
}
|
}
|
||||||
|
|
||||||
AutoExecutableNetwork::~AutoExecutableNetwork() = default;
|
AutoExecutableNetwork::~AutoExecutableNetwork() = default;
|
||||||
@ -24,7 +22,7 @@ AutoExecutableNetwork::~AutoExecutableNetwork() = default;
|
|||||||
InferenceEngine::IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
|
InferenceEngine::IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
|
||||||
OutputsDataMap networkOutputs) {
|
OutputsDataMap networkOutputs) {
|
||||||
SoIInferRequestInternal inferRequest = {_network, _network->CreateInferRequest()};
|
SoIInferRequestInternal inferRequest = {_network, _network->CreateInferRequest()};
|
||||||
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest);
|
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest, _enablePerfCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoExecutableNetwork::Export(std::ostream& networkModel) {
|
void AutoExecutableNetwork::Export(std::ostream& networkModel) {
|
||||||
|
@ -19,16 +19,11 @@ namespace AutoPlugin {
|
|||||||
|
|
||||||
using DeviceName = std::string;
|
using DeviceName = std::string;
|
||||||
|
|
||||||
struct DeviceInformation {
|
|
||||||
DeviceName deviceName;
|
|
||||||
std::map<std::string, std::string> config;
|
|
||||||
};
|
|
||||||
|
|
||||||
class AutoExecutableNetwork : public InferenceEngine::IExecutableNetworkInternal {
|
class AutoExecutableNetwork : public InferenceEngine::IExecutableNetworkInternal {
|
||||||
public:
|
public:
|
||||||
using Ptr = std::shared_ptr<AutoExecutableNetwork>;
|
using Ptr = std::shared_ptr<AutoExecutableNetwork>;
|
||||||
|
|
||||||
explicit AutoExecutableNetwork(const InferenceEngine::SoExecutableNetworkInternal& network);
|
explicit AutoExecutableNetwork(const InferenceEngine::SoExecutableNetworkInternal& network, bool enablePerfCount);
|
||||||
|
|
||||||
void Export(std::ostream& networkModel) override;
|
void Export(std::ostream& networkModel) override;
|
||||||
InferenceEngine::RemoteContext::Ptr GetContext() const override;
|
InferenceEngine::RemoteContext::Ptr GetContext() const override;
|
||||||
@ -43,6 +38,7 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
InferenceEngine::SoExecutableNetworkInternal _network;
|
InferenceEngine::SoExecutableNetworkInternal _network;
|
||||||
|
bool _enablePerfCount;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace AutoPlugin
|
} // namespace AutoPlugin
|
||||||
|
@ -11,13 +11,23 @@ namespace AutoPlugin {
|
|||||||
|
|
||||||
AutoInferRequest::AutoInferRequest(const InputsDataMap& networkInputs,
|
AutoInferRequest::AutoInferRequest(const InputsDataMap& networkInputs,
|
||||||
const OutputsDataMap& networkOutputs,
|
const OutputsDataMap& networkOutputs,
|
||||||
const SoIInferRequestInternal& inferRequest)
|
const SoIInferRequestInternal& inferRequest,
|
||||||
|
bool enablePerfCount)
|
||||||
: IInferRequestInternal(networkInputs, networkOutputs)
|
: IInferRequestInternal(networkInputs, networkOutputs)
|
||||||
, _inferRequest(inferRequest) {
|
, _inferRequest(inferRequest)
|
||||||
|
, _enablePerfCount(enablePerfCount) {
|
||||||
}
|
}
|
||||||
|
|
||||||
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> AutoInferRequest::GetPerformanceCounts() const {
|
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> AutoInferRequest::GetPerformanceCounts() const {
|
||||||
return _inferRequest->GetPerformanceCounts();
|
if (_enablePerfCount) {
|
||||||
|
try {
|
||||||
|
return _inferRequest->GetPerformanceCounts();
|
||||||
|
} catch (...) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoInferRequest::InferImpl() {
|
void AutoInferRequest::InferImpl() {
|
||||||
|
@ -24,7 +24,8 @@ public:
|
|||||||
using Ptr = std::shared_ptr<AutoInferRequest>;
|
using Ptr = std::shared_ptr<AutoInferRequest>;
|
||||||
explicit AutoInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
|
explicit AutoInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
|
||||||
const InferenceEngine::OutputsDataMap& networkOutputs,
|
const InferenceEngine::OutputsDataMap& networkOutputs,
|
||||||
const InferenceEngine::SoIInferRequestInternal& inferRequest);
|
const InferenceEngine::SoIInferRequestInternal& inferRequest,
|
||||||
|
bool enablePerfCount);
|
||||||
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> GetPerformanceCounts() const override;
|
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> GetPerformanceCounts() const override;
|
||||||
void InferImpl() override;
|
void InferImpl() override;
|
||||||
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
|
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
|
||||||
@ -37,6 +38,7 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
InferenceEngine::SoIInferRequestInternal _inferRequest;
|
InferenceEngine::SoIInferRequestInternal _inferRequest;
|
||||||
|
bool _enablePerfCount;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace AutoPlugin
|
} // namespace AutoPlugin
|
||||||
|
@ -75,11 +75,11 @@ IE::QueryNetworkResult AutoInferencePlugin::QueryNetwork(const IE::CNNNetwork& n
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto fullConfig = mergeConfigs(_config, config);
|
auto fullConfig = mergeConfigs(_config, config);
|
||||||
auto metaDevices = GetDeviceChoice(fullConfig);
|
auto metaDevices = GetDeviceList(fullConfig);
|
||||||
std::unordered_set<std::string> supportedLayers;
|
std::unordered_set<std::string> supportedLayers;
|
||||||
for (auto&& value : metaDevices) {
|
for (auto&& value : metaDevices) {
|
||||||
try {
|
try {
|
||||||
auto deviceQr = GetCore()->QueryNetwork(network, value.deviceName, value.config);
|
auto deviceQr = GetCore()->QueryNetwork(network, value, {});
|
||||||
std::unordered_set<std::string> deviceSupportedLayers;
|
std::unordered_set<std::string> deviceSupportedLayers;
|
||||||
for (auto &&layerQr : deviceQr.supportedLayersMap) {
|
for (auto &&layerQr : deviceQr.supportedLayersMap) {
|
||||||
deviceSupportedLayers.emplace(layerQr.first);
|
deviceSupportedLayers.emplace(layerQr.first);
|
||||||
@ -111,7 +111,19 @@ IE::Parameter AutoInferencePlugin::GetConfig(const std::string& name,
|
|||||||
|
|
||||||
void AutoInferencePlugin::SetConfig(const ConfigType& config) {
|
void AutoInferencePlugin::SetConfig(const ConfigType& config) {
|
||||||
for (auto && kvp : config) {
|
for (auto && kvp : config) {
|
||||||
_config[kvp.first] = kvp.second;
|
if (kvp.first.find("AUTO_") == 0) {
|
||||||
|
_config[kvp.first] = kvp.second;
|
||||||
|
} else if (kvp.first == IE::PluginConfigParams::KEY_PERF_COUNT) {
|
||||||
|
if (kvp.second == IE::PluginConfigParams::YES ||
|
||||||
|
kvp.second == IE::PluginConfigParams::NO) {
|
||||||
|
_config[kvp.first] = kvp.second;
|
||||||
|
} else {
|
||||||
|
IE_THROW() << "Unsupported config value: " << kvp.second
|
||||||
|
<< " for key: " << kvp.first;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
IE_THROW() << "Unsupported config key: " << kvp.first;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +140,10 @@ IE::Parameter AutoInferencePlugin::GetMetric(const std::string& name,
|
|||||||
std::string device_name = {"Inference Engine AUTO device"};
|
std::string device_name = {"Inference Engine AUTO device"};
|
||||||
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, device_name);
|
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, device_name);
|
||||||
} else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
|
} else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
|
||||||
std::vector<std::string> configKeys;
|
std::vector<std::string> configKeys = {
|
||||||
|
IE::KEY_AUTO_DEVICE_LIST,
|
||||||
|
IE::PluginConfigParams::KEY_PERF_COUNT
|
||||||
|
};
|
||||||
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
|
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
|
||||||
} else if (name == METRIC_KEY(OPTIMIZATION_CAPABILITIES)) {
|
} else if (name == METRIC_KEY(OPTIMIZATION_CAPABILITIES)) {
|
||||||
std::vector<std::string> capabilities = GetOptimizationCapabilities(options);
|
std::vector<std::string> capabilities = GetOptimizationCapabilities(options);
|
||||||
@ -139,42 +154,21 @@ IE::Parameter AutoInferencePlugin::GetMetric(const std::string& name,
|
|||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////// private & protected functions ///////////////////
|
//////////////////////////////////// private & protected functions ///////////////////
|
||||||
std::vector<AutoPlugin::DeviceInformation> AutoInferencePlugin::GetDeviceChoice(const ConfigType& config) const {
|
std::vector<DeviceName> AutoInferencePlugin::GetDeviceList(const ConfigType& config) const {
|
||||||
std::vector<DeviceInformation> metaDevices;
|
std::vector<DeviceName> deviceList;
|
||||||
std::vector<std::string> availableDevices;
|
|
||||||
|
|
||||||
auto deviceListConfig = config.find(IE::KEY_AUTO_DEVICE_LIST);
|
auto deviceListConfig = config.find(IE::KEY_AUTO_DEVICE_LIST);
|
||||||
if (deviceListConfig == config.end()) {
|
if (deviceListConfig == config.end()) {
|
||||||
availableDevices = GetCore()->GetAvailableDevices();
|
deviceList = GetCore()->GetAvailableDevices();
|
||||||
} else {
|
} else {
|
||||||
availableDevices = IE::DeviceIDParser::getHeteroDevices(deviceListConfig->second);
|
deviceList = IE::DeviceIDParser::getHeteroDevices(deviceListConfig->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto getDeviceConfig = [&] (const DeviceName & deviceWithID) {
|
if (deviceList.empty()) {
|
||||||
IE::DeviceIDParser deviceParser(deviceWithID);
|
|
||||||
std::string deviceName = deviceParser.getDeviceName();
|
|
||||||
ConfigType tconfig = config;
|
|
||||||
|
|
||||||
// set device ID if any
|
|
||||||
std::string deviceIDLocal = deviceParser.getDeviceID();
|
|
||||||
if (!deviceIDLocal.empty()) {
|
|
||||||
tconfig[IE::PluginConfigParams::KEY_DEVICE_ID] = deviceIDLocal;
|
|
||||||
}
|
|
||||||
|
|
||||||
return GetSupportedConfig(tconfig, deviceName);
|
|
||||||
};
|
|
||||||
|
|
||||||
for (auto && d : availableDevices) {
|
|
||||||
if (d != _pluginName) {
|
|
||||||
metaDevices.push_back({ d, getDeviceConfig(d)});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (metaDevices.empty()) {
|
|
||||||
IE_THROW() << "Please, check environment due to no supported devices can be used";
|
IE_THROW() << "Please, check environment due to no supported devices can be used";
|
||||||
}
|
}
|
||||||
|
|
||||||
return metaDevices;
|
return deviceList;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::string> AutoInferencePlugin::GetOptimizationCapabilities(const std::map<std::string, IE::Parameter> & options) const {
|
std::vector<std::string> AutoInferencePlugin::GetOptimizationCapabilities(const std::map<std::string, IE::Parameter> & options) const {
|
||||||
@ -215,7 +209,21 @@ ConfigType AutoInferencePlugin::GetSupportedConfig(const ConfigType& config,
|
|||||||
return supportedConfig;
|
return supportedConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision) {
|
void AutoInferencePlugin::CheckConfig(const ConfigType& config) {
|
||||||
|
std::vector<std::string> supportedConfigKeys = GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS), {});
|
||||||
|
for (auto&& c : config) {
|
||||||
|
auto itKey = std::find(supportedConfigKeys.begin(), supportedConfigKeys.end(), c.first);
|
||||||
|
if (supportedConfigKeys.end() == itKey) {
|
||||||
|
// CVS-57233
|
||||||
|
if (c.first.find("AUTO_") == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
IE_THROW() << "AUTO plugin doesn't support config key " << c.first;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DeviceName AutoInferencePlugin::SelectDevice(const std::vector<DeviceName>& metaDevices, const std::string& networkPrecision) {
|
||||||
if (metaDevices.empty()) {
|
if (metaDevices.empty()) {
|
||||||
IE_THROW(NotFound) << "No available device to select in AUTO plugin";
|
IE_THROW(NotFound) << "No available device to select in AUTO plugin";
|
||||||
}
|
}
|
||||||
@ -223,15 +231,15 @@ DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInfo
|
|||||||
return metaDevices.at(0);
|
return metaDevices.at(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<DeviceInformation> CPU;
|
std::vector<DeviceName> CPU;
|
||||||
std::vector<DeviceInformation> GPU;
|
std::vector<DeviceName> GPU;
|
||||||
|
|
||||||
for (auto& item : metaDevices) {
|
for (auto& item : metaDevices) {
|
||||||
if (item.deviceName.find("CPU") == 0) {
|
if (item.find("CPU") == 0) {
|
||||||
CPU.push_back(item);
|
CPU.push_back(item);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (item.deviceName.find("GPU") == 0) {
|
if (item.find("GPU") == 0) {
|
||||||
GPU.push_back(item);
|
GPU.push_back(item);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -242,10 +250,10 @@ DeviceInformation AutoInferencePlugin::SelectDevice(const std::vector<DeviceInfo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sort GPU by name: GPU.2 > GPU.1 > GPU.0 > GPU, so we always choose the GPU[0] as best device
|
// Sort GPU by name: GPU.2 > GPU.1 > GPU.0 > GPU, so we always choose the GPU[0] as best device
|
||||||
std::sort(GPU.begin(), GPU.end(), [](const DeviceInformation& a, const DeviceInformation& b)->bool{return b.deviceName < a.deviceName;});
|
std::sort(GPU.begin(), GPU.end(), [](const DeviceName& a, const DeviceName& b)->bool{return b < a;});
|
||||||
|
|
||||||
for (auto&& item : GPU) {
|
for (auto&& item : GPU) {
|
||||||
std::vector<std::string> capability = GetCore()->GetMetric(item.deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
|
std::vector<std::string> capability = GetCore()->GetMetric(item, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
|
||||||
auto res = std::find(capability.begin(), capability.end(), networkPrecision);
|
auto res = std::find(capability.begin(), capability.end(), networkPrecision);
|
||||||
if (res != capability.end()) {
|
if (res != capability.end()) {
|
||||||
return item;
|
return item;
|
||||||
|
@ -30,10 +30,11 @@ public:
|
|||||||
void SetConfig(const ConfigType& config) override;
|
void SetConfig(const ConfigType& config) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::vector<AutoPlugin::DeviceInformation> GetDeviceChoice(const ConfigType& config) const;
|
std::vector<DeviceName> GetDeviceList(const ConfigType& config) const;
|
||||||
std::vector<std::string> GetOptimizationCapabilities(const std::map<std::string, IE::Parameter>& options) const;
|
std::vector<std::string> GetOptimizationCapabilities(const std::map<std::string, IE::Parameter>& options) const;
|
||||||
DeviceInformation SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));
|
DeviceName SelectDevice(const std::vector<DeviceName>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));
|
||||||
ConfigType GetSupportedConfig(const ConfigType& config, const AutoPlugin::DeviceName & deviceName) const;
|
ConfigType GetSupportedConfig(const ConfigType& config, const DeviceName & deviceName) const;
|
||||||
|
void CheckConfig(const ConfigType& config);
|
||||||
static ConfigType mergeConfigs(ConfigType config, const ConfigType& local);
|
static ConfigType mergeConfigs(ConfigType config, const ConfigType& local);
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
@ -41,18 +42,21 @@ private:
|
|||||||
if (GetCore() == nullptr) {
|
if (GetCore() == nullptr) {
|
||||||
IE_THROW() << "Please, work with AUTO device via InferencEngine::Core object";
|
IE_THROW() << "Please, work with AUTO device via InferencEngine::Core object";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CheckConfig(config);
|
||||||
|
|
||||||
auto fullConfig = mergeConfigs(_config, config);
|
auto fullConfig = mergeConfigs(_config, config);
|
||||||
auto metaDevices = GetDeviceChoice(fullConfig);
|
auto metaDevices = GetDeviceList(fullConfig);
|
||||||
DeviceInformation selectedDevice;
|
DeviceName selectedDevice;
|
||||||
IE::SoExecutableNetworkInternal executableNetwork;
|
IE::SoExecutableNetworkInternal executableNetwork;
|
||||||
while (!metaDevices.empty()) {
|
while (!metaDevices.empty()) {
|
||||||
selectedDevice = SelectDevice(metaDevices, networkPrecision);
|
selectedDevice = SelectDevice(metaDevices, networkPrecision);
|
||||||
try {
|
try {
|
||||||
executableNetwork = GetCore()->LoadNetwork(param, selectedDevice.deviceName, selectedDevice.config);
|
executableNetwork = GetCore()->LoadNetwork(param, selectedDevice, {});
|
||||||
break;
|
break;
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
auto eraseDevice = std::find_if(metaDevices.begin(), metaDevices.end(),
|
auto eraseDevice = std::find_if(metaDevices.begin(), metaDevices.end(),
|
||||||
[=](const DeviceInformation& d)->bool{return d.deviceName == selectedDevice.deviceName;});
|
[=](const DeviceName& d)->bool{return d == selectedDevice;});
|
||||||
if (eraseDevice == metaDevices.end()) {
|
if (eraseDevice == metaDevices.end()) {
|
||||||
IE_THROW() << "Didn't find the selected device name";
|
IE_THROW() << "Didn't find the selected device name";
|
||||||
}
|
}
|
||||||
@ -63,7 +67,10 @@ private:
|
|||||||
if (!executableNetwork) {
|
if (!executableNetwork) {
|
||||||
IE_THROW() << "Failed to load network by AUTO plugin";
|
IE_THROW() << "Failed to load network by AUTO plugin";
|
||||||
}
|
}
|
||||||
auto impl = std::make_shared<AutoExecutableNetwork>(executableNetwork);
|
|
||||||
|
bool enablePerfCount = fullConfig.find(IE::PluginConfigParams::KEY_PERF_COUNT) != fullConfig.end();
|
||||||
|
|
||||||
|
auto impl = std::make_shared<AutoExecutableNetwork>(executableNetwork, enablePerfCount);
|
||||||
|
|
||||||
if (std::is_same<std::string, T>::value) {
|
if (std::is_same<std::string, T>::value) {
|
||||||
SetExeNetworkInfo(impl, executableNetwork->GetInputsInfo(),
|
SetExeNetworkInfo(impl, executableNetwork->GetInputsInfo(),
|
||||||
|
@ -42,18 +42,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
|
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_NUMA}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "8"}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::NO}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::YES}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigTests,
|
||||||
@ -93,22 +82,14 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoinconfigs = {
|
const std::vector<std::map<std::string, std::string>> autoinconfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "OFF"}},
|
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "OFF"}}
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, "OFF"}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "NAN"}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> multiconf = {
|
const std::vector<std::map<std::string, std::string>> multiconf = {
|
||||||
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_CPU}}
|
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_CPU}}
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoconf = {
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
|
|
||||||
};
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigAPITests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigAPITests,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
@ -127,7 +108,7 @@ namespace {
|
|||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(autoconf)),
|
::testing::ValuesIn(AutoConfigs)),
|
||||||
CorrectConfigAPITests::getTestCaseName);
|
CorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigTests,
|
||||||
@ -144,13 +125,6 @@ namespace {
|
|||||||
::testing::ValuesIn(multiinconfigs)),
|
::testing::ValuesIn(multiinconfigs)),
|
||||||
IncorrectConfigTests::getTestCaseName);
|
IncorrectConfigTests::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, IncorrectConfigTests,
|
|
||||||
::testing::Combine(
|
|
||||||
::testing::ValuesIn(netPrecisions),
|
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
|
||||||
::testing::ValuesIn(autoinconfigs)),
|
|
||||||
IncorrectConfigTests::getTestCaseName);
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
@ -166,10 +140,10 @@ namespace {
|
|||||||
IncorrectConfigAPITests::getTestCaseName);
|
IncorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, IncorrectConfigAPITests,
|
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, IncorrectConfigAPITests,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(autoinconfigs)),
|
::testing::ValuesIn(autoinconfigs)),
|
||||||
IncorrectConfigAPITests::getTestCaseName);
|
IncorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
@ -51,20 +51,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
|
const std::vector<std::map<std::string, std::string>> AutoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
|
|
||||||
InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS,
|
|
||||||
InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_NUMA}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, "8"}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::NO}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD, InferenceEngine::PluginConfigParams::YES}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferConfigTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferConfigTests,
|
||||||
|
@ -26,9 +26,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestInputTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestInputTests,
|
||||||
|
@ -22,9 +22,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU}}
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestOutputTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestOutputTests,
|
||||||
|
@ -14,10 +14,6 @@ namespace {
|
|||||||
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_CPU}}
|
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_CPU}}
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> Autoconfigs = {
|
|
||||||
{{ AUTO_CONFIG_KEY(DEVICE_LIST) , CommonTestUtils::DEVICE_CPU}}
|
|
||||||
};
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::Values(InferenceEngine::Precision::FP32),
|
::testing::Values(InferenceEngine::Precision::FP32),
|
||||||
@ -32,11 +28,4 @@ namespace {
|
|||||||
::testing::ValuesIn(Multiconfigs)),
|
::testing::ValuesIn(Multiconfigs)),
|
||||||
PerfCountersTest::getTestCaseName);
|
PerfCountersTest::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PerfCountersTest,
|
|
||||||
::testing::Combine(
|
|
||||||
::testing::Values(InferenceEngine::Precision::FP32),
|
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
|
||||||
::testing::ValuesIn(Autoconfigs)),
|
|
||||||
PerfCountersTest::getTestCaseName);
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -37,9 +37,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
|
const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU}},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU}}
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_CPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> configsOutput = {
|
const std::vector<std::map<std::string, std::string>> configsOutput = {
|
||||||
|
@ -67,6 +67,8 @@ std::vector<std::string> disabledTestPatterns() {
|
|||||||
|
|
||||||
// TODO: 55656 AUTO plugin and QueryNetwork
|
// TODO: 55656 AUTO plugin and QueryNetwork
|
||||||
R"(.*CoreThreading.*smoke_QueryNetwork.*targetDevice=AUTO_config.*)",
|
R"(.*CoreThreading.*smoke_QueryNetwork.*targetDevice=AUTO_config.*)",
|
||||||
|
// Unsupported config KEY_ENFORCE_BF16 for AUTO plugin
|
||||||
|
R"(.*smoke_SetBlobOfKindAUTO.*SetBlobOfKindTest.CompareWithRefs.*)",
|
||||||
// reference doesn't cover I8, U8 cases. Issue: 55842
|
// reference doesn't cover I8, U8 cases. Issue: 55842
|
||||||
R"(.*Gather7LayerTest.*netPRC=I8.*)",
|
R"(.*Gather7LayerTest.*netPRC=I8.*)",
|
||||||
};
|
};
|
||||||
|
@ -106,6 +106,13 @@ namespace {
|
|||||||
::testing::ValuesIn(autoconf)),
|
::testing::ValuesIn(autoconf)),
|
||||||
CorrectConfigAPITests::getTestCaseName);
|
CorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, CorrectConfigAPITests,
|
||||||
|
::testing::Combine(
|
||||||
|
::testing::ValuesIn(netPrecisions),
|
||||||
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
|
::testing::ValuesIn(auto_cpu_gpu_conf)),
|
||||||
|
CorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
@ -124,14 +131,14 @@ namespace {
|
|||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(autoconf)),
|
::testing::ValuesIn(autoinconfigs)),
|
||||||
IncorrectConfigAPITests::getTestCaseName);
|
IncorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, IncorrectConfigAPITests,
|
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, IncorrectConfigAPITests,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(auto_cpu_gpu_conf)),
|
::testing::ValuesIn(autoinconfigs)),
|
||||||
IncorrectConfigAPITests::getTestCaseName);
|
IncorrectConfigAPITests::getTestCaseName);
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,9 +26,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU}}
|
||||||
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS,
|
|
||||||
InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
||||||
|
@ -22,8 +22,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
const std::vector<std::map<std::string, std::string>> autoConfigs = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU},
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , CommonTestUtils::DEVICE_GPU}}
|
||||||
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
||||||
|
@ -14,14 +14,6 @@ namespace {
|
|||||||
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_GPU}}
|
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_GPU}}
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> Autoconfigs = {
|
|
||||||
{{ AUTO_CONFIG_KEY(DEVICE_LIST) , CommonTestUtils::DEVICE_GPU}}
|
|
||||||
};
|
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}}
|
|
||||||
};
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PerfCountersTest,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::Values(InferenceEngine::Precision::FP32),
|
::testing::Values(InferenceEngine::Precision::FP32),
|
||||||
@ -36,18 +28,4 @@ namespace {
|
|||||||
::testing::ValuesIn(Multiconfigs)),
|
::testing::ValuesIn(Multiconfigs)),
|
||||||
PerfCountersTest::getTestCaseName);
|
PerfCountersTest::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PerfCountersTest,
|
|
||||||
::testing::Combine(
|
|
||||||
::testing::Values(InferenceEngine::Precision::FP32),
|
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
|
||||||
::testing::ValuesIn(Autoconfigs)),
|
|
||||||
PerfCountersTest::getTestCaseName);
|
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, PerfCountersTest,
|
|
||||||
::testing::Combine(
|
|
||||||
::testing::Values(InferenceEngine::Precision::FP32),
|
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
|
||||||
::testing::ValuesIn(auto_cpu_gpu_conf)),
|
|
||||||
PerfCountersTest::getTestCaseName);
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -28,7 +28,7 @@ namespace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
const std::vector<std::map<std::string, std::string>> auto_cpu_gpu_conf = {
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}}
|
{{InferenceEngine::KEY_AUTO_DEVICE_LIST , std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}}
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> configsInput = {
|
const std::vector<std::map<std::string, std::string>> configsInput = {
|
||||||
@ -42,18 +42,6 @@ namespace {
|
|||||||
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
||||||
};
|
};
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_GPU}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, CommonTestUtils::DEVICE_GPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> AutoCGConfigsInputOutput = {
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU}},
|
|
||||||
{{InferenceEngine::KEY_AUTO_DEVICE_LIST, std::string(CommonTestUtils::DEVICE_CPU) + "," + CommonTestUtils::DEVICE_GPU},
|
|
||||||
{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
|
||||||
};
|
|
||||||
|
|
||||||
const std::vector<std::map<std::string, std::string>> configsOutput = {
|
const std::vector<std::map<std::string, std::string>> configsOutput = {
|
||||||
{},
|
{},
|
||||||
{{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
{{InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO}}
|
||||||
@ -77,14 +65,14 @@ namespace {
|
|||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(AutoConfigsInputOutput)),
|
::testing::ValuesIn(AutoConfigs)),
|
||||||
BehaviorTestOutput::getTestCaseName);
|
BehaviorTestOutput::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestOutput,
|
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestOutput,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(AutoCGConfigsInputOutput)),
|
::testing::ValuesIn(auto_cpu_gpu_conf)),
|
||||||
BehaviorTestOutput::getTestCaseName);
|
BehaviorTestOutput::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTests,
|
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTests,
|
||||||
@ -133,14 +121,14 @@ namespace {
|
|||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(AutoConfigsInputOutput)),
|
::testing::ValuesIn(AutoConfigs)),
|
||||||
BehaviorTestInput::getTestCaseName);
|
BehaviorTestInput::getTestCaseName);
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestInput,
|
INSTANTIATE_TEST_CASE_P(smoke_AutoCG_BehaviorTests, BehaviorTestInput,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(netPrecisions),
|
::testing::ValuesIn(netPrecisions),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||||
::testing::ValuesIn(AutoCGConfigsInputOutput)),
|
::testing::ValuesIn(auto_cpu_gpu_conf)),
|
||||||
BehaviorTestInput::getTestCaseName);
|
BehaviorTestInput::getTestCaseName);
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -57,8 +57,7 @@ namespace BehaviorTestsDefinitions {
|
|||||||
// Create CNNNetwork from ngrpah::Function
|
// Create CNNNetwork from ngrpah::Function
|
||||||
InferenceEngine::CNNNetwork cnnNet(function);
|
InferenceEngine::CNNNetwork cnnNet(function);
|
||||||
if (targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
if (targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
|
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||||
targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
|
||||||
ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||||
ASSERT_THROW(ie->SetConfig(configuration, targetDevice),
|
ASSERT_THROW(ie->SetConfig(configuration, targetDevice),
|
||||||
InferenceEngine::Exception);
|
InferenceEngine::Exception);
|
||||||
@ -73,8 +72,12 @@ namespace BehaviorTestsDefinitions {
|
|||||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||||
// Create CNNNetwork from ngrpah::Function
|
// Create CNNNetwork from ngrpah::Function
|
||||||
InferenceEngine::CNNNetwork cnnNet(function);
|
InferenceEngine::CNNNetwork cnnNet(function);
|
||||||
ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration),
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) != std::string::npos) {
|
||||||
InferenceEngine::Exception);
|
GTEST_SKIP();
|
||||||
|
} else {
|
||||||
|
ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration),
|
||||||
|
InferenceEngine::Exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
using IncorrectConfigAPITests = BehaviorTestsUtils::BehaviorTestsBasic;
|
using IncorrectConfigAPITests = BehaviorTestsUtils::BehaviorTestsBasic;
|
||||||
@ -110,8 +113,10 @@ namespace BehaviorTestsDefinitions {
|
|||||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||||
}
|
}
|
||||||
// Load CNNNetwork to target plugins
|
// Load CNNNetwork to target plugins
|
||||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
||||||
execNet.CreateInferRequest();
|
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||||
|
execNet.CreateInferRequest();
|
||||||
|
}
|
||||||
|
|
||||||
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
|
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
|
||||||
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
||||||
@ -139,8 +144,10 @@ namespace BehaviorTestsDefinitions {
|
|||||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||||
}
|
}
|
||||||
// Load CNNNetwork to target plugins
|
// Load CNNNetwork to target plugins
|
||||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
||||||
execNet.CreateInferRequest();
|
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||||
|
execNet.CreateInferRequest();
|
||||||
|
}
|
||||||
|
|
||||||
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
|
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
|
||||||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
|
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
|
||||||
@ -170,8 +177,10 @@ namespace BehaviorTestsDefinitions {
|
|||||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||||
}
|
}
|
||||||
// Load CNNNetwork to target plugins
|
// Load CNNNetwork to target plugins
|
||||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
||||||
execNet.CreateInferRequest();
|
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||||
|
execNet.CreateInferRequest();
|
||||||
|
}
|
||||||
|
|
||||||
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
|
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
|
||||||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
|
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
|
||||||
|
@ -42,8 +42,10 @@ TEST_P(InferConfigTests, canSetExclusiveAsyncRequests) {
|
|||||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||||
}
|
}
|
||||||
// Load CNNNetwork to target plugins
|
// Load CNNNetwork to target plugins
|
||||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
||||||
execNet.CreateInferRequest();
|
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||||
|
execNet.CreateInferRequest();
|
||||||
|
}
|
||||||
|
|
||||||
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
|
if ((targetDevice == CommonTestUtils::DEVICE_HDDL) || (targetDevice == CommonTestUtils::DEVICE_GNA)) {
|
||||||
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
||||||
@ -71,8 +73,10 @@ TEST_P(InferConfigTests, withoutExclusiveAsyncRequests) {
|
|||||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||||
}
|
}
|
||||||
// Load CNNNetwork to target plugins
|
// Load CNNNetwork to target plugins
|
||||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos) {
|
||||||
execNet.CreateInferRequest();
|
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||||
|
execNet.CreateInferRequest();
|
||||||
|
}
|
||||||
|
|
||||||
if ((targetDevice == CommonTestUtils::DEVICE_GNA) || (targetDevice == CommonTestUtils::DEVICE_HDDL)) {
|
if ((targetDevice == CommonTestUtils::DEVICE_GNA) || (targetDevice == CommonTestUtils::DEVICE_HDDL)) {
|
||||||
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
ASSERT_EQ(0u, InferenceEngine::ExecutorManager::getInstance()->getExecutorsNumber());
|
||||||
|
Loading…
Reference in New Issue
Block a user