From 560dba22cb761fb9817d13093a70b38898e045de Mon Sep 17 00:00:00 2001 From: "Wang, Yang" Date: Mon, 7 Nov 2022 10:36:25 +0800 Subject: [PATCH] Fix the logic issue of handling the ov::device::properties in the ie core. (#13482) * Update the logic to handle the ov:device::properties in the ie core. Signed-off-by: Wang, Yang * Update test cases. Signed-off-by: Wang, Yang * 1. Remove the config filter within AUTO plugin when passing the configs to target hardwre device through AUTO plugin. 2. add test cases to check if the secondary properties have been passed into the target device through virtaul device. Signed-off-by: Wang, Yang * Add mock tests to check if device properties works throgh the AUTO or MULTI plugin. Signed-off-by: Wang, Yang * Update. Signed-off-by: Wang, Yang * Add mock head file for loadnetwork with device properties testing. Signed-off-by: Wang, Yang * Update. Signed-off-by: Wang, Yang * Update gtest commit to fix the warning issue appeared in the macro MATCHER_P. Signed-off-by: Wang, Yang Signed-off-by: Wang, Yang --- src/inference/src/ie_core.cpp | 14 +- src/plugins/auto/plugin.cpp | 35 +-- src/plugins/auto/plugin.hpp | 2 +- .../behavior/ov_plugin/core_integration.cpp | 27 +- .../behavior/ov_plugin/core_integration.hpp | 13 + .../auto_load_network_properties_test.cpp | 273 ++++++++++++++++++ .../auto/plugin/mock_auto_device_plugin.hpp | 2 +- .../plugin/mock_load_network_properties.hpp | 32 ++ thirdparty/gtest/gtest | 2 +- 9 files changed, 360 insertions(+), 40 deletions(-) create mode 100644 src/tests/unit/auto/auto_load_network_properties_test.cpp create mode 100644 src/tests/unit/auto/plugin/mock_load_network_properties.hpp diff --git a/src/inference/src/ie_core.cpp b/src/inference/src/ie_core.cpp index 1a052496264..750c53bf02f 100644 --- a/src/inference/src/ie_core.cpp +++ b/src/inference/src/ie_core.cpp @@ -182,13 +182,15 @@ void allowNotImplemented(F&& f) { ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& properties) { ov::AnyMap result = properties; + bool isVirtualDev = device.find("AUTO") != std::string::npos || device.find("MULTI") != std::string::npos || + device.find("HETERO") != std::string::npos; for (auto item = result.begin(); item != result.end();) { auto parsed = parseDeviceNameIntoConfig(item->first); if (!item->second.is()) { item++; continue; } - if (device.find(parsed._deviceName) != std::string::npos) { + if (device == parsed._deviceName) { // 1. flatten the scondary property for target device for (auto&& sub_property : item->second.as()) { // 1.1 1st level property overides 2nd level property @@ -197,12 +199,12 @@ ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& p result[sub_property.first] = sub_property.second; } item = result.erase(item); - } else if (device != "AUTO" && device != "MULTI" && device != "HETERO") { - // 2. remove the secondary property setting for other hard ware device - item = result.erase(item); - } else { - // 3. keep the secondary property for the other virtual devices + } else if (isVirtualDev) { + // 2. keep the secondary property for the other virtual devices item++; + } else { + // 3. remove the secondary property setting for other hardware device + item = result.erase(item); } } return result; diff --git a/src/plugins/auto/plugin.cpp b/src/plugins/auto/plugin.cpp index 88943462339..6374f29c8ff 100644 --- a/src/plugins/auto/plugin.cpp +++ b/src/plugins/auto/plugin.cpp @@ -443,29 +443,22 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons LOG_INFO_TAG("load with model path"); } } - // replace the configure with configure that auto want to pass to device - // and reset the strDevices to support devices - auto validConfigKey = PerfHintsConfig::SupportedKeys(); - validConfigKey.push_back(PluginConfigParams::KEY_PERF_COUNT); - validConfigKey.push_back(PluginConfigParams::KEY_EXCLUSIVE_ASYNC_REQUESTS); + // reset the strDevices to support devices strDevices = ""; for (auto iter = supportDevices.begin(); iter != supportDevices.end(); iter++) { - std::map deviceConfig; - auto& configs = iter->config; - for (auto& config : configs) { - if (std::find(validConfigKey.begin(), validConfigKey.end(), config.first) != validConfigKey.end()) { - deviceConfig.insert({config.first, config.second}); - LOG_INFO_TAG("device:%s, config:%s=%s", iter->deviceName.c_str(), - config.first.c_str(), config.second.c_str()); - } - } - insertPropToConfig(CONFIG_KEY(ALLOW_AUTO_BATCHING), iter->deviceName, deviceConfig); - insertPropToConfig(CONFIG_KEY(AUTO_BATCH_TIMEOUT), iter->deviceName, deviceConfig); - insertPropToConfig(CONFIG_KEY(CACHE_DIR), iter->deviceName, deviceConfig); - iter->config = deviceConfig; - strDevices += iter->deviceName; - strDevices += ((iter + 1) == supportDevices.end()) ? "" : ","; - LOG_INFO_TAG("device:%s, priority:%ld", iter->deviceName.c_str(), iter->devicePriority); + auto& configs = iter->config; + for (auto& config : configs) { + LOG_INFO_TAG("device:%s, config:%s=%s", + iter->deviceName.c_str(), + config.first.c_str(), + config.second.c_str()); + } + insertPropToConfig(CONFIG_KEY(ALLOW_AUTO_BATCHING), iter->deviceName, configs); + insertPropToConfig(CONFIG_KEY(AUTO_BATCH_TIMEOUT), iter->deviceName, configs); + insertPropToConfig(CONFIG_KEY(CACHE_DIR), iter->deviceName, configs); + strDevices += iter->deviceName; + strDevices += ((iter + 1) == supportDevices.end()) ? "" : ","; + LOG_INFO_TAG("device:%s, priority:%ld", iter->deviceName.c_str(), iter->devicePriority); } autoSContext->_modelPath = clonedModelPath; // clone the network, in case of reshape conflict diff --git a/src/plugins/auto/plugin.hpp b/src/plugins/auto/plugin.hpp index e7d66829717..385f9c756b8 100644 --- a/src/plugins/auto/plugin.hpp +++ b/src/plugins/auto/plugin.hpp @@ -48,7 +48,7 @@ public: MOCKTESTMACRO std::string GetDeviceList(const std::map& config) const; - std::list GetValidDevice(const std::vector& metaDevices, + MOCKTESTMACRO std::list GetValidDevice(const std::vector& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32)); MOCKTESTMACRO DeviceInformation SelectDevice(const std::vector& metaDevices, diff --git a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/ov_plugin/core_integration.cpp b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/ov_plugin/core_integration.cpp index ddd2761a622..fefcb97138b 100644 --- a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/ov_plugin/core_integration.cpp +++ b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/ov_plugin/core_integration.cpp @@ -87,39 +87,40 @@ INSTANTIATE_TEST_SUITE_P( const std::vector multiConfigs = { {ov::device::priorities(CommonTestUtils::DEVICE_CPU)} }; - +const std::vector configsDeviceProperties = {{ov::device::properties("CPU", ov::num_streams(3))}}; const std::vector configsWithSecondaryProperties = { + {ov::device::properties("CPU", ov::num_streams(4))}, {ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))}, {ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)), ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}}; const std::vector multiConfigsWithSecondaryProperties = { {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))}, {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)), ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}}; const std::vector autoConfigsWithSecondaryProperties = { {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("AUTO", - ov::enable_profiling(true), + ov::enable_profiling(false), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))}, {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))}, {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)), ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}, {ov::device::priorities(CommonTestUtils::DEVICE_CPU), @@ -127,7 +128,7 @@ const std::vector autoConfigsWithSecondaryProperties = { ov::enable_profiling(false), ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT))}, {ov::device::priorities(CommonTestUtils::DEVICE_CPU), ov::device::properties("AUTO", @@ -135,7 +136,7 @@ const std::vector autoConfigsWithSecondaryProperties = { ov::device::priorities(CommonTestUtils::DEVICE_GPU), ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)), ov::device::properties("CPU", - ov::enable_profiling(true), + ov::num_streams(4), ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)), ov::device::properties("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY))}}; @@ -270,6 +271,12 @@ INSTANTIATE_TEST_SUITE_P(smoke_AUTO_OVClassLoadNetworkWithSecondaryPropertiesTes ::testing::Combine(::testing::Values("AUTO"), ::testing::ValuesIn(autoConfigsWithSecondaryProperties))); +// IE Class load and check network with ov::device::properties +INSTANTIATE_TEST_SUITE_P(smoke_CPU_OVClassLoadNetworkAndCheckWithSecondaryPropertiesTest, + OVClassLoadNetworkAndCheckSecondaryPropertiesTest, + ::testing::Combine(::testing::Values("CPU", "AUTO:CPU", "MULTI:CPU"), + ::testing::ValuesIn(configsDeviceProperties))); + INSTANTIATE_TEST_SUITE_P( smoke_OVClassLoadNetworkTest, OVClassLoadNetworkTest, ::testing::Values("CPU")); diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp index d0a8b138a76..4b0b68d04fd 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp @@ -114,6 +114,7 @@ using OVClassSpecificDeviceTestGetConfig = OVClassBaseTestP; using OVClassLoadNetworkWithCorrectPropertiesTest = OVClassSetDevicePriorityConfigTest; using OVClassLoadNetworkWithDefaultPropertiesTest = OVClassSetDevicePriorityConfigTest; using OVClassLoadNetworkWithDefaultIncorrectPropertiesTest = OVClassSetDevicePriorityConfigTest; +using OVClassLoadNetworkAndCheckSecondaryPropertiesTest = OVClassSetDevicePriorityConfigTest; class OVClassSeveralDevicesTest : public OVPluginTestBase, public OVClassNetworkTest, @@ -1096,6 +1097,18 @@ TEST_P(OVClassLoadNetworkWithCorrectPropertiesTest, LoadNetworkWithCorrectProper OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, target_device, configuration)); } +TEST_P(OVClassLoadNetworkAndCheckSecondaryPropertiesTest, LoadNetworkAndCheckSecondaryPropertiesTest) { + ov::Core ie = createCoreWithTemplate(); + ov::CompiledModel model; + OV_ASSERT_NO_THROW(model = ie.compile_model(actualNetwork, target_device, configuration)); + auto property = configuration.begin()->second.as(); + auto actual = property.begin()->second.as(); + ov::Any value; + OV_ASSERT_NO_THROW(value = model.get_property(ov::num_streams.name())); + int32_t expect = value.as(); + ASSERT_EQ(actual, expect); +} + TEST_P(OVClassLoadNetworkWithDefaultPropertiesTest, LoadNetworkWithDefaultPropertiesTest) { ov::Core ie = createCoreWithTemplate(); ov::CompiledModel model; diff --git a/src/tests/unit/auto/auto_load_network_properties_test.cpp b/src/tests/unit/auto/auto_load_network_properties_test.cpp new file mode 100644 index 00000000000..a8999d43b3b --- /dev/null +++ b/src/tests/unit/auto/auto_load_network_properties_test.cpp @@ -0,0 +1,273 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "cpp/ie_plugin.hpp" +#include "mock_common.hpp" +#include "plugin/mock_load_network_properties.hpp" +#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_inference_plugin_internal.hpp" +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp" +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iexecutable_network_internal.hpp" +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp" +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_ivariable_state_internal.hpp" +#include "unit_test_utils/mocks/mock_iinfer_request.hpp" + +using ::testing::_; +using ::testing::MatcherCast; +using ::testing::Matches; +using ::testing::NiceMock; +using ::testing::Return; +using ::testing::ReturnRef; +using ::testing::StrEq; +using ::testing::Throw; +using Config = std::map; + +// define a matcher if all the elements of subMap are contained in the map. +MATCHER_P(MapContains, subMap, "Check if all the elements of the subMap are contained in the map.") { + if (subMap.empty()) + return true; + for (auto& item : subMap) { + auto key = item.first; + auto value = item.second; + auto dest = arg.find(key); + if (dest == arg.end()) { + return false; + } else if (dest->second != value) { + return false; + } + } + return true; +} +using namespace MockMultiDevice; + +using ConfigParams = std::tuple, // hardware device name to expect loading network on + Config>; // secondary property setting to device + +static std::vector testConfigs; + +class LoadNetworkWithSecondaryConfigsMockTest : public ::testing::TestWithParam { +public: + std::shared_ptr> core; + std::shared_ptr> plugin; + InferenceEngine::CNNNetwork simpleCnnNetwork; + // mock cpu exeNetwork + std::shared_ptr> cpuMockIExeNet; + ov::SoPtr cpuMockExeNetwork; + NiceMock* cpuMockIPlugin; + InferenceEngine::InferencePlugin cpuMockPlugin; + + // mock gpu exeNetwork + std::shared_ptr> gpuMockIExeNet; + ov::SoPtr gpuMockExeNetwork; + NiceMock* gpuMockIPlugin; + InferenceEngine::InferencePlugin gpuMockPlugin; + std::shared_ptr> inferReqInternal; + +public: + static std::string getTestCaseName(testing::TestParamInfo obj) { + std::string deviceName; + std::vector targetDevices; + Config deviceConfigs; + std::tie(deviceName, targetDevices, deviceConfigs) = obj.param; + std::ostringstream result; + result << "_virtual_device_" << deviceName; + result << "_loadnetwork_to_device_"; + for (auto& device : targetDevices) { + result << device << "_"; + } + auto cpuConfig = deviceConfigs.find("CPU"); + auto gpuConfig = deviceConfigs.find("GPU"); + result << "device_properties_"; + if (cpuConfig != deviceConfigs.end()) + result << "CPU_" << cpuConfig->second << "_"; + if (gpuConfig != deviceConfigs.end()) + result << "GPU_" << gpuConfig->second; + return result.str(); + } + + static std::vector CreateConfigs() { + testConfigs.clear(); + testConfigs.push_back( + ConfigParams{"AUTO", {"CPU"}, {{"CPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "CPU,GPU"}}}); + testConfigs.push_back( + ConfigParams{"AUTO", {"CPU", "GPU"}, {{"GPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}}); + testConfigs.push_back( + ConfigParams{"AUTO:CPU", {"CPU"}, {{"CPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "CPU"}}}); + testConfigs.push_back( + ConfigParams{"AUTO:CPU,GPU", {"CPU"}, {{"CPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "CPU,GPU"}}}); + testConfigs.push_back( + ConfigParams{"AUTO:GPU", {"GPU"}, {{"GPU", "NUM_STREAMS 5"}, {"MULTI_DEVICE_PRIORITIES", "GPU"}}}); + testConfigs.push_back(ConfigParams{"AUTO:GPU,CPU", + {"CPU", "GPU"}, + {{"GPU", "NUM_STREAMS 5"}, {"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}}); + + testConfigs.push_back( + ConfigParams{"MULTI:CPU", {"CPU"}, {{"CPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "CPU"}}}); + testConfigs.push_back(ConfigParams{"MULTI:CPU,GPU", + {"CPU", "GPU"}, + {{"CPU", "NUM_STREAMS 3"}, {"MULTI_DEVICE_PRIORITIES", "CPU,GPU"}}}); + testConfigs.push_back( + ConfigParams{"MULTI:GPU", {"GPU"}, {{"GPU", "NUM_STREAMS 5"}, {"MULTI_DEVICE_PRIORITIES", "GPU"}}}); + testConfigs.push_back(ConfigParams{"MULTI:GPU,CPU", + {"CPU", "GPU"}, + {{"GPU", "NUM_STREAMS 5"}, {"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}}); + return testConfigs; + } + + void TearDown() override { + core.reset(); + plugin.reset(); + } + + void SetUp() override { + // prepare cpuMockExeNetwork + cpuMockIExeNet = std::make_shared>(); + auto cpuMockIPluginPtr = std::make_shared>(); + ON_CALL(*cpuMockIPluginPtr, LoadNetwork(MatcherCast(_), _)) + .WillByDefault(Return(cpuMockIExeNet)); + cpuMockPlugin = InferenceEngine::InferencePlugin{cpuMockIPluginPtr, {}}; + // remove annoying ON CALL message + EXPECT_CALL(*cpuMockIPluginPtr, LoadNetwork(MatcherCast(_), _)).Times(1); + cpuMockExeNetwork = cpuMockPlugin.LoadNetwork(CNNNetwork{}, {}); + + // prepare gpuMockExeNetwork + gpuMockIExeNet = std::make_shared>(); + auto gpuMockIPluginPtr = std::make_shared>(); + ON_CALL(*gpuMockIPluginPtr, LoadNetwork(MatcherCast(_), _)) + .WillByDefault(Return(gpuMockIExeNet)); + gpuMockPlugin = InferenceEngine::InferencePlugin{gpuMockIPluginPtr, {}}; + // remove annoying ON CALL message + EXPECT_CALL(*gpuMockIPluginPtr, LoadNetwork(MatcherCast(_), _)).Times(1); + gpuMockExeNetwork = gpuMockPlugin.LoadNetwork(CNNNetwork{}, {}); + + // prepare mockicore and cnnNetwork for loading + core = std::shared_ptr>(new NiceMock()); + auto* origin_plugin = new NiceMock(); + plugin = std::shared_ptr>(origin_plugin); + // replace core with mock Icore + plugin->SetCore(core); + inferReqInternal = std::make_shared>(); + ON_CALL(*cpuMockIExeNet.get(), CreateInferRequest()).WillByDefault(Return(inferReqInternal)); + ON_CALL(*gpuMockIExeNet.get(), CreateInferRequest()).WillByDefault(Return(inferReqInternal)); + + ON_CALL(*cpuMockIExeNet.get(), GetMetric(StrEq(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)))) + .WillByDefault(Return("0")); + ON_CALL(*gpuMockIExeNet.get(), GetMetric(StrEq(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)))) + .WillByDefault(Return("0")); + + std::vector availableDevs = {"CPU", "GPU"}; + ON_CALL(*core, GetAvailableDevices()).WillByDefault(Return(availableDevs)); + + std::vector metrics = {METRIC_KEY(SUPPORTED_CONFIG_KEYS)}; + ON_CALL(*core, GetMetric(_, StrEq(METRIC_KEY(SUPPORTED_METRICS)), _)).WillByDefault(Return(metrics)); + + std::vector configKeys = {"SUPPORTED_CONFIG_KEYS", "NUM_STREAMS"}; + ON_CALL(*core, GetMetric(_, StrEq(METRIC_KEY(SUPPORTED_CONFIG_KEYS)), _)).WillByDefault(Return(configKeys)); + + ON_CALL(*plugin, ParseMetaDevices) + .WillByDefault( + [this](const std::string& priorityDevices, const std::map& config) { + return plugin->MultiDeviceInferencePlugin::ParseMetaDevices(priorityDevices, config); + }); + + ON_CALL(*plugin, SelectDevice) + .WillByDefault([this](const std::vector& metaDevices, + const std::string& netPrecision, + unsigned int priority) { + return plugin->MultiDeviceInferencePlugin::SelectDevice(metaDevices, netPrecision, priority); + }); + + ON_CALL(*plugin, GetValidDevice) + .WillByDefault([this](const std::vector& metaDevices, const std::string& netPrecision) { + std::list devices(metaDevices.begin(), metaDevices.end()); + return devices; + }); + + ON_CALL(*core, GetSupportedConfig) + .WillByDefault([](const std::string& device, const std::map& fullConfigs) { + auto item = fullConfigs.find(device); + Config deviceConfigs; + if (item != fullConfigs.end()) { + std::stringstream strConfigs(item->second); + ov::util::Read{}(strConfigs, deviceConfigs); + } + return deviceConfigs; + }); + + ON_CALL(*plugin, GetDeviceList).WillByDefault([this](const std::map& config) { + return plugin->MultiDeviceInferencePlugin::GetDeviceList(config); + }); + ON_CALL(*plugin, SelectDevice) + .WillByDefault([this](const std::vector& metaDevices, + const std::string& netPrecision, + unsigned int Priority) { + return plugin->MultiDeviceInferencePlugin::SelectDevice(metaDevices, netPrecision, Priority); + }); + std::vector cpuCability{"FP32", "FP16", "INT8", "BIN"}; + std::vector gpuCability{"FP32", "FP16", "BATCHED_BLOB", "BIN", "INT8"}; + ON_CALL(*core, GetMetric(StrEq(CommonTestUtils::DEVICE_CPU), StrEq(METRIC_KEY(OPTIMIZATION_CAPABILITIES)), _)) + .WillByDefault(Return(cpuCability)); + ON_CALL(*core, GetMetric(StrEq(CommonTestUtils::DEVICE_GPU), StrEq(METRIC_KEY(OPTIMIZATION_CAPABILITIES)), _)) + .WillByDefault(Return(gpuCability)); + + ON_CALL(*core, + LoadNetwork(::testing::Matcher(_), + ::testing::Matcher(StrEq("CPU")), + ::testing::Matcher&>(_))) + .WillByDefault(Return(cpuMockExeNetwork)); + + ON_CALL(*core, + LoadNetwork(::testing::Matcher(_), + ::testing::Matcher(StrEq("GPU")), + ::testing::Matcher&>(_))) + .WillByDefault(Return(gpuMockExeNetwork)); + + std::shared_ptr simpleNetwork = ngraph::builder::subgraph::makeSingleConv(); + ASSERT_NO_THROW(simpleCnnNetwork = InferenceEngine::CNNNetwork(simpleNetwork)); + } +}; + +TEST_P(LoadNetworkWithSecondaryConfigsMockTest, LoadNetworkWithSecondaryConfigsTest) { + std::string device; + std::vector targetDevices; + Config config; + std::tie(device, targetDevices, config) = this->GetParam(); + if (device.find("AUTO") != std::string::npos) + plugin->SetName("AUTO"); + if (device.find("MULTI") != std::string::npos) + plugin->SetName("MULTI"); + + for (auto& deviceName : targetDevices) { + auto item = config.find(deviceName); + Config deviceConfigs; + if (item != config.end()) { + std::stringstream strConfigs(item->second); + // Parse the device properties to common property into deviceConfigs. + ov::util::Read{}(strConfigs, deviceConfigs); + } + EXPECT_CALL( + *core, + LoadNetwork(::testing::Matcher(_), + ::testing::Matcher(deviceName), + ::testing::Matcher&>(MapContains(deviceConfigs)))) + .Times(1); + } + + ASSERT_NO_THROW(plugin->LoadExeNetworkImpl(simpleCnnNetwork, config)); +} + +INSTANTIATE_TEST_SUITE_P(smoke_AutoMock_LoadNetworkWithSecondaryConfigs, + LoadNetworkWithSecondaryConfigsMockTest, + ::testing::ValuesIn(LoadNetworkWithSecondaryConfigsMockTest::CreateConfigs()), + LoadNetworkWithSecondaryConfigsMockTest::getTestCaseName); \ No newline at end of file diff --git a/src/tests/unit/auto/plugin/mock_auto_device_plugin.hpp b/src/tests/unit/auto/plugin/mock_auto_device_plugin.hpp index 731ce78ad3e..b19add9182e 100644 --- a/src/tests/unit/auto/plugin/mock_auto_device_plugin.hpp +++ b/src/tests/unit/auto/plugin/mock_auto_device_plugin.hpp @@ -19,4 +19,4 @@ public: MOCK_METHOD((std::vector), ParseMetaDevices, (const std::string&, (const std::map&)), (const, override)); }; -}// namespace MockMultiDevice +}// namespace MockMultiDevice \ No newline at end of file diff --git a/src/tests/unit/auto/plugin/mock_load_network_properties.hpp b/src/tests/unit/auto/plugin/mock_load_network_properties.hpp new file mode 100644 index 00000000000..6aaa53c05bd --- /dev/null +++ b/src/tests/unit/auto/plugin/mock_load_network_properties.hpp @@ -0,0 +1,32 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once +#include + +#include + +#include "ie_icore.hpp" +#include "plugin.hpp" + +using namespace MockMultiDevicePlugin; +namespace MockMultiDevice { + +class MockMultiPluginForLoadNetworkWithPropertiesTest : public MultiDeviceInferencePlugin { +public: + MOCK_METHOD((std::string), GetDeviceList, ((const std::map&)), (const, override)); + MOCK_METHOD(DeviceInformation, + SelectDevice, + ((const std::vector&), const std::string&, unsigned int), + (override)); + MOCK_METHOD((std::list), + GetValidDevice, + ((const std::vector&), const std::string&), + (override)); + MOCK_METHOD((std::vector), + ParseMetaDevices, + (const std::string&, (const std::map&)), + (const, override)); +}; +} // namespace MockMultiDevice \ No newline at end of file diff --git a/thirdparty/gtest/gtest b/thirdparty/gtest/gtest index f8bd7cebfb3..d269d902e4c 160000 --- a/thirdparty/gtest/gtest +++ b/thirdparty/gtest/gtest @@ -1 +1 @@ -Subproject commit f8bd7cebfb3e6aeefb1d1d93e46a461471e15f57 +Subproject commit d269d902e4c3cd02f3e731e1e2ff8307352817a4