Enable cache support for Myriad plugin (#4868)
* Enabling cache support for Myriad plugin Remaining issues: 1) 49309 - support DEVICE_ARCHITECTURE 2) Fix 51472 for import/export issue with batch size > 1 * Added 'options' to DeviceArchitecture and more clarifications
This commit is contained in:
parent
86eebbdfac
commit
5caa706334
@ -26,6 +26,8 @@ MyriadMetrics::MyriadMetrics() {
|
|||||||
METRIC_KEY(OPTIMIZATION_CAPABILITIES),
|
METRIC_KEY(OPTIMIZATION_CAPABILITIES),
|
||||||
METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS),
|
METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS),
|
||||||
METRIC_KEY(DEVICE_THERMAL),
|
METRIC_KEY(DEVICE_THERMAL),
|
||||||
|
METRIC_KEY(DEVICE_ARCHITECTURE),
|
||||||
|
METRIC_KEY(IMPORT_EXPORT_SUPPORT),
|
||||||
};
|
};
|
||||||
|
|
||||||
IE_SUPPRESS_DEPRECATED_START
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
@ -114,6 +116,14 @@ const std::unordered_set<std::string>& MyriadMetrics::OptimizationCapabilities()
|
|||||||
return _optimizationCapabilities;
|
return _optimizationCapabilities;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string MyriadMetrics::DeviceArchitecture(const std::map<std::string, InferenceEngine::Parameter> & options) const {
|
||||||
|
// TODO: Task 49309. Return same architecture for devices which can share same cache
|
||||||
|
// E.g. when device "MYRIAD.ma2480-1" is loaded, options.at("DEVICE_ID") will be "ma2480-1"
|
||||||
|
// For DEVICE_ID="ma2480-0" and DEVICE_ID="ma2480-1" this method shall return same string, like "ma2480"
|
||||||
|
// In this case inference engine will be able to reuse cached model and total reduce load network time
|
||||||
|
return "MYRIAD";
|
||||||
|
}
|
||||||
|
|
||||||
RangeType MyriadMetrics::RangeForAsyncInferRequests(
|
RangeType MyriadMetrics::RangeForAsyncInferRequests(
|
||||||
const std::map<std::string, std::string>& config) const {
|
const std::map<std::string, std::string>& config) const {
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ public:
|
|||||||
const std::vector<DevicePtr> &devicePool) const;
|
const std::vector<DevicePtr> &devicePool) const;
|
||||||
|
|
||||||
std::string FullName(std::string deviceName) const;
|
std::string FullName(std::string deviceName) const;
|
||||||
|
std::string DeviceArchitecture(const std::map<std::string, InferenceEngine::Parameter> & options) const;
|
||||||
float DevicesThermal(const DevicePtr& device) const;
|
float DevicesThermal(const DevicePtr& device) const;
|
||||||
const std::unordered_set<std::string>& SupportedMetrics() const;
|
const std::unordered_set<std::string>& SupportedMetrics() const;
|
||||||
const std::unordered_set<std::string>& SupportedConfigKeys() const;
|
const std::unordered_set<std::string>& SupportedConfigKeys() const;
|
||||||
|
@ -207,6 +207,10 @@ InferenceEngine::Parameter Engine::GetMetric(const std::string& name,
|
|||||||
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, std::vector<std::string>{optimizationCapabilities.cbegin(), optimizationCapabilities.cend()});
|
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, std::vector<std::string>{optimizationCapabilities.cbegin(), optimizationCapabilities.cend()});
|
||||||
} else if (name == METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS)) {
|
} else if (name == METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS)) {
|
||||||
IE_SET_METRIC_RETURN(RANGE_FOR_ASYNC_INFER_REQUESTS, _metrics->RangeForAsyncInferRequests(_config));
|
IE_SET_METRIC_RETURN(RANGE_FOR_ASYNC_INFER_REQUESTS, _metrics->RangeForAsyncInferRequests(_config));
|
||||||
|
} else if (name == METRIC_KEY(DEVICE_ARCHITECTURE)) {
|
||||||
|
IE_SET_METRIC_RETURN(DEVICE_ARCHITECTURE, _metrics->DeviceArchitecture(options));
|
||||||
|
} else if (name == METRIC_KEY(IMPORT_EXPORT_SUPPORT)) {
|
||||||
|
IE_SET_METRIC_RETURN(IMPORT_EXPORT_SUPPORT, true);
|
||||||
} else if (name == METRIC_KEY(DEVICE_THERMAL)) {
|
} else if (name == METRIC_KEY(DEVICE_THERMAL)) {
|
||||||
const auto& device = getDeviceByName(getSpecifiedDeviceName());
|
const auto& device = getDeviceByName(getSpecifiedDeviceName());
|
||||||
if (device != nullptr) {
|
if (device != nullptr) {
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
using namespace LayerTestsDefinitions;
|
using namespace LayerTestsDefinitions;
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
static const std::vector<ngraph::element::Type> precisionsMyriad = {
|
static const std::vector<ngraph::element::Type> nightly_precisionsMyriad = {
|
||||||
ngraph::element::f32,
|
ngraph::element::f32,
|
||||||
ngraph::element::f16,
|
ngraph::element::f16,
|
||||||
ngraph::element::i32,
|
ngraph::element::i32,
|
||||||
@ -15,14 +15,34 @@ namespace {
|
|||||||
ngraph::element::u8,
|
ngraph::element::u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static const std::vector<ngraph::element::Type> smoke_precisionsMyriad = {
|
||||||
|
ngraph::element::f32,
|
||||||
|
};
|
||||||
|
|
||||||
static const std::vector<std::size_t> batchSizesMyriad = {
|
static const std::vector<std::size_t> batchSizesMyriad = {
|
||||||
1, 2
|
1, 2
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static std::vector<nGraphFunctionWithName> smoke_functions() {
|
||||||
|
auto funcs = LoadNetworkCacheTestBase::getStandardFunctions();
|
||||||
|
if (funcs.size() > 1) {
|
||||||
|
funcs.erase(funcs.begin() + 1, funcs.end());
|
||||||
|
}
|
||||||
|
return funcs;
|
||||||
|
}
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(smoke_CachingSupportCase_Myriad, LoadNetworkCacheTestBase,
|
INSTANTIATE_TEST_CASE_P(smoke_CachingSupportCase_Myriad, LoadNetworkCacheTestBase,
|
||||||
|
::testing::Combine(
|
||||||
|
::testing::ValuesIn(smoke_functions()),
|
||||||
|
::testing::ValuesIn(smoke_precisionsMyriad),
|
||||||
|
::testing::ValuesIn(batchSizesMyriad),
|
||||||
|
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
|
||||||
|
LoadNetworkCacheTestBase::getTestCaseName);
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_CASE_P(nightly_CachingSupportCase_Myriad, LoadNetworkCacheTestBase,
|
||||||
::testing::Combine(
|
::testing::Combine(
|
||||||
::testing::ValuesIn(LoadNetworkCacheTestBase::getStandardFunctions()),
|
::testing::ValuesIn(LoadNetworkCacheTestBase::getStandardFunctions()),
|
||||||
::testing::ValuesIn(precisionsMyriad),
|
::testing::ValuesIn(nightly_precisionsMyriad),
|
||||||
::testing::ValuesIn(batchSizesMyriad),
|
::testing::ValuesIn(batchSizesMyriad),
|
||||||
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
|
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
|
||||||
LoadNetworkCacheTestBase::getTestCaseName);
|
LoadNetworkCacheTestBase::getTestCaseName);
|
||||||
|
@ -35,5 +35,7 @@ std::vector<std::string> disabledTestPatterns() {
|
|||||||
".*ProposalLayerTest.*",
|
".*ProposalLayerTest.*",
|
||||||
// TODO: Issue 48183
|
// TODO: Issue 48183
|
||||||
R"(.*CTCGreedyDecoderSeqLen.*?\(1.1.1\).*)",
|
R"(.*CTCGreedyDecoderSeqLen.*?\(1.1.1\).*)",
|
||||||
|
// TODO: Issue 51472
|
||||||
|
".*CachingSupportCase.*_batch2_.*",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user