diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_infer_request/batched_tensors.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_infer_request/batched_tensors.cpp index 3ac4c5340eb..c8217eb9e11 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_infer_request/batched_tensors.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_infer_request/batched_tensors.cpp @@ -17,7 +17,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_Non_0) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "CNHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -31,7 +31,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_remote_tensor_default) { auto batch_shape = Shape{batch, 4, 4, 4}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch - 1, ov::Tensor(element::f32, one_shape)); @@ -49,7 +49,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Strides) { auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); std::vector buffer1(one_shape_size_stride, 10); std::vector buffer2(one_shape_size_stride, 20); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/core_integration.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/core_integration.cpp index 19322f492ba..c31ca73b8e8 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/core_integration.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/core_integration.cpp @@ -69,7 +69,13 @@ INSTANTIATE_TEST_SUITE_P( // IE Class SetConfig // -using IEClassSetConfigTestHETERO = BehaviorTestsUtils::IEClassNetworkTest; +class IEClassSetConfigTestHETERO : public BehaviorTestsUtils::IEClassNetworkTest, + public BehaviorTestsUtils::IEPluginTestBase { + void SetUp() override { + IEClassNetworkTest::SetUp(); + IEPluginTestBase::SetUp(); + } +}; TEST_F(IEClassSetConfigTestHETERO, smoke_SetConfigNoThrow) { { @@ -115,7 +121,13 @@ INSTANTIATE_TEST_SUITE_P( smoke_IEClassGetConfigTest, IEClassGetConfigTest, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)); -using IEClassGetConfigTestTEMPLATE = BehaviorTestsUtils::IEClassNetworkTest; +class IEClassGetConfigTestTEMPLATE : public BehaviorTestsUtils::IEClassNetworkTest, + public BehaviorTestsUtils::IEPluginTestBase { + void SetUp() override { + IEClassNetworkTest::SetUp(); + IEPluginTestBase::SetUp(); + } +}; TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); diff --git a/src/tests/functional/plugin/conformance/subgraphs_dumper/include/matchers/base_matcher.hpp b/src/tests/functional/plugin/conformance/subgraphs_dumper/include/matchers/base_matcher.hpp index 5954319880e..615656ae6fe 100644 --- a/src/tests/functional/plugin/conformance/subgraphs_dumper/include/matchers/base_matcher.hpp +++ b/src/tests/functional/plugin/conformance/subgraphs_dumper/include/matchers/base_matcher.hpp @@ -9,7 +9,7 @@ #include "ngraph/node.hpp" #include "pugixml.hpp" -#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" +#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp" namespace SubgraphsDumper { diff --git a/src/tests/functional/plugin/conformance/subgraphs_dumper/include/ops_cache.hpp b/src/tests/functional/plugin/conformance/subgraphs_dumper/include/ops_cache.hpp index 35d9dcc0247..2f3d8ad89eb 100644 --- a/src/tests/functional/plugin/conformance/subgraphs_dumper/include/ops_cache.hpp +++ b/src/tests/functional/plugin/conformance/subgraphs_dumper/include/ops_cache.hpp @@ -10,7 +10,7 @@ #include #include #include "matchers/matchers_manager.hpp" -#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" +#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp" namespace SubgraphsDumper { diff --git a/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/convolutions_matcher.cpp b/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/convolutions_matcher.cpp index f8b1cafb907..633ec5194f7 100644 --- a/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/convolutions_matcher.cpp +++ b/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/convolutions_matcher.cpp @@ -5,7 +5,7 @@ #include "gtest/gtest.h" #include "matchers/convolutions.hpp" #include "ngraph/ops.hpp" -#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" +#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp" class ConvolutionMatcherTest : public ::testing::Test { protected: diff --git a/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/generic_single_op.cpp b/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/generic_single_op.cpp index c40a030309d..f6b8d8739aa 100644 --- a/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/generic_single_op.cpp +++ b/src/tests/functional/plugin/conformance/subgraphs_dumper/tests/matchers/generic_single_op.cpp @@ -5,7 +5,7 @@ #include "gtest/gtest.h" #include "matchers/single_op.hpp" #include "ngraph/ops.hpp" -#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" +#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp" class SingleOpMatcherTest : public ::testing::Test { protected: diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/api_conformance_helpers.hpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/api_conformance_helpers.hpp index 3007170dedc..8589d60d223 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/api_conformance_helpers.hpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/api_conformance_helpers.hpp @@ -71,7 +71,7 @@ inline const std::vector> generate_configs(co return resultConfig; } -inline const std::string generate_complex_device_name(const std::string& deviceName) { +inline const std::string generate_complex_device_name(const std::string deviceName) { return deviceName + ":" + ov::test::conformance::targetDevice; } @@ -85,9 +85,27 @@ inline const std::vector return_all_possible_device_combination() { return res; } -const std::vector> empty_config = { - {}, -}; +inline std::vector> generate_pairs_plugin_name_by_device() { + std::vector> res; + for (const auto& device : return_all_possible_device_combination()) { + std::string real_device = device.substr(0, device.find(':')); + res.push_back(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), + real_device)); + } + return res; +} + +inline std::map AnyMap2StringMap(const AnyMap& config) { + if (config.empty()) + return {}; + std::map result; + for (const auto& configItem : config) { + result.insert({configItem.first, configItem.second.as()}); + } + return result; +} + +const std::map ie_config = AnyMap2StringMap(ov::test::conformance::pluginConfig); } // namespace conformance } // namespace test diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/ov_api_conformance_helpers.hpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/ov_api_conformance_helpers.hpp index 4a650054e37..650483ed306 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/ov_api_conformance_helpers.hpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/include/ov_api_conformance_helpers.hpp @@ -34,9 +34,7 @@ inline const std::vector generate_ov_configs(const std::string& targ return resultConfig; } -const std::vector empty_ov_config = { - {}, -}; +const ov::AnyMap ov_config = ov::test::conformance::pluginConfig; } // namespace conformance } // namespace test diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_graph_info.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_graph_info.cpp index 2c3f4de843a..956ef70109b 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_graph_info.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_graph_info.cpp @@ -11,7 +11,7 @@ namespace { using namespace ExecutionGraphTests; INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphSerializationTest, - ::testing::Values(ov::test::conformance::targetDevice), + ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()), ExecGraphSerializationTest::getTestCaseName); const std::vector execGraphInfoElemTypes = { @@ -22,7 +22,7 @@ INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphUniqueNodeNames, ::testing::Combine( ::testing::ValuesIn(execGraphInfoElemTypes), ::testing::Values(InferenceEngine::SizeVector({1, 2, 5, 5})), - ::testing::Values(ov::test::conformance::targetDevice)), + ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination())), ExecGraphUniqueNodeNames::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_network_base.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_network_base.cpp index 8d05ce90ab5..434a2fde1a4 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_network_base.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/exec_network_base.cpp @@ -14,7 +14,7 @@ namespace { INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecutableNetworkBaseTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), ExecutableNetworkBaseTest::getTestCaseName); const std::vector execNetBaseElemTypes = { @@ -28,6 +28,6 @@ namespace { ::testing::Combine( ::testing::ValuesIn(execNetBaseElemTypes), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), ExecNetSetPrecision::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/get_metric.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/get_metric.cpp index 33e9166af56..ce66f413517 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/get_metric.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/executable_network/get_metric.cpp @@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P( smoke_IEClassHeteroExecutableNetworkGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, - ::testing::Values(ov::test::conformance::targetDevice)); + ::testing::ValuesIn(return_all_possible_device_combination())); INSTANTIATE_TEST_SUITE_P( ie_executable_network, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME, diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/callback.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/callback.cpp index b441237c375..5cbc4bcf0da 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/callback.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/callback.cpp @@ -12,6 +12,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCallbackTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestCallbackTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/cancellation.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/cancellation.cpp index c1156b3f05d..4578c38d927 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/cancellation.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/cancellation.cpp @@ -12,6 +12,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCancellationTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestCancellationTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/io_blob.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/io_blob.cpp index db14d827ffe..57772c62a79 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/io_blob.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/io_blob.cpp @@ -15,6 +15,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestIOBBlobTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestIOBBlobTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/multitheading.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/multitheading.cpp index 03c183d13e9..55274a446ed 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/multitheading.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/multitheading.cpp @@ -16,7 +16,7 @@ using namespace BehaviorTestsDefinitions; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestMultithreadingTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestMultithreadingTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/perf_counters.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/perf_counters.cpp index 9120fdf33d3..04c74ed74a6 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/perf_counters.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/perf_counters.cpp @@ -12,7 +12,7 @@ using namespace BehaviorTestsDefinitions; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestPerfCountersTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestPerfCountersTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/set_blob_by_type.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/set_blob_by_type.cpp index a4215ba7b7e..52f9bbec207 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/set_blob_by_type.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/set_blob_by_type.cpp @@ -13,16 +13,14 @@ const std::vector setBlobTypes = { FuncTestUtils::BlobType::Compound, FuncTestUtils::BlobType::Batched, FuncTestUtils::BlobType::Memory, -// FuncTestUtils::BlobType::Remote, + FuncTestUtils::BlobType::Remote, FuncTestUtils::BlobType::I420, FuncTestUtils::BlobType::NV12 }; -const std::map ConfigBlobType{}; //nothing special - INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestSetBlobByType, ::testing::Combine(::testing::ValuesIn(setBlobTypes), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestSetBlobByType::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/wait.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/wait.cpp index 850416599af..67be3dfc06a 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/wait.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/infer_request/wait.cpp @@ -15,6 +15,6 @@ using namespace BehaviorTestsDefinitions; INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestWaitTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestWaitTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_graph_info.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_graph_info.cpp index c8d62404ce8..4399c256842 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_graph_info.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_graph_info.cpp @@ -30,6 +30,6 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model, ::testing::Combine( ::testing::ValuesIn(ovExecGraphInfoElemTypes), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVExecGraphImportExportTest::getTestCaseName); } // namespace \ No newline at end of file diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_network_base.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_network_base.cpp index 1fd71a83224..cedce958825 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_network_base.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/exec_network_base.cpp @@ -14,6 +14,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVExecutableNetworkBaseTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVExecutableNetworkBaseTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/get_metric.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/get_metric.cpp index 7d374130620..bc380409d0f 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/get_metric.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/get_metric.cpp @@ -18,7 +18,7 @@ using namespace InferenceEngine::PluginConfigParams; INSTANTIATE_TEST_SUITE_P( - ov_compiled_model, OVClassImportExportTestP, + ov_compiled_model, OVClassExecutableNetworkImportExportTestP, ::testing::ValuesIn(return_all_possible_device_combination())); // @@ -55,7 +55,7 @@ INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P( ov_compiled_model, OVClassExecutableNetworkSetConfigTest, - ::testing::Values(ov::test::conformance::targetDevice)); + ::testing::ValuesIn(return_all_possible_device_combination())); //// //// Hetero Executable Network GetMetric diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/properties.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/properties.cpp index caa88412fd7..2cc9d2be717 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/properties.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_executable_network/properties.cpp @@ -16,34 +16,15 @@ const std::vector inproperties = { }; const std::vector auto_batch_inproperties = { - {{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}}, {{ov::auto_batch_timeout(-1)}}, }; INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesIncorrectTests, ::testing::Combine( - ::testing::Values(ov::test::conformance::targetDevice), + ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()), ::testing::ValuesIn(inproperties)), OVCompiledModelPropertiesIncorrectTests::getTestCaseName); -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_HETERO), - ::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))), - OVCompiledModelPropertiesIncorrectTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_MULTI), - ::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))), - OVCompiledModelPropertiesIncorrectTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))), - OVCompiledModelPropertiesIncorrectTests::getTestCaseName); - INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesIncorrectTests, ::testing::Combine( ::testing::Values(CommonTestUtils::DEVICE_BATCH), @@ -63,35 +44,16 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesDefaultTest OVCompiledModelPropertiesDefaultTests::getTestCaseName); const std::vector auto_batch_properties = { - {}, {{CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}}, {{ov::auto_batch_timeout(10)}}, }; INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesTests, ::testing::Combine( - ::testing::Values(ov::test::conformance::targetDevice), + ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()), ::testing::ValuesIn(default_properties)), OVCompiledModelPropertiesTests::getTestCaseName); -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_HETERO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))), - OVCompiledModelPropertiesTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_MULTI), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))), - OVCompiledModelPropertiesTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))), - OVCompiledModelPropertiesTests::getTestCaseName); - INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesTests, ::testing::Combine( ::testing::Values(CommonTestUtils::DEVICE_BATCH), diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/callback.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/callback.cpp index ad65393d2fb..2f3949dc35e 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/callback.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/callback.cpp @@ -15,7 +15,7 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCallbackTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestCallbackTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/cancellation.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/cancellation.cpp index d81802469dc..0645c2bf3bd 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/cancellation.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/cancellation.cpp @@ -12,6 +12,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCancellationTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestCancellationTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/infer_request_dynamic.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/infer_request_dynamic.cpp index fa5d861f604..c4f9803d39e 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/infer_request_dynamic.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/infer_request_dynamic.cpp @@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_1, OVInferRequestDynamicTests, {{1, 4, 20, 20}, {1, 4, 20, 20}}, {{2, 4, 20, 20}, {2, 4, 20, 20}}}), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestDynamicTests::getTestCaseName); INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests, @@ -71,6 +71,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests, {{1, 4, 20, 20}, {1, 2, 20, 40}}, {{2, 4, 20, 20}, {2, 2, 20, 40}}}), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestDynamicTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/inference_chaining.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/inference_chaining.cpp index 5bf1c31970c..1244e5f005f 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/inference_chaining.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/inference_chaining.cpp @@ -13,6 +13,6 @@ using namespace ov::test::conformance; INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferenceChaining, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferenceChaining::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/io_tensor.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/io_tensor.cpp index 2dc0a1580f0..e7a4e4e25e3 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/io_tensor.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/io_tensor.cpp @@ -15,7 +15,7 @@ namespace { INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestIOTensorTest::getTestCaseName); std::vector ovIOTensorElemTypes = { @@ -41,6 +41,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorSetPrecisionTes ::testing::Combine( ::testing::ValuesIn(ovIOTensorElemTypes), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestIOTensorSetPrecisionTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/multithreading.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/multithreading.cpp index 7e3bfc70407..4f9dec46bdd 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/multithreading.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/multithreading.cpp @@ -16,7 +16,7 @@ namespace { INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestMultithreadingTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestMultithreadingTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/perf_counters.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/perf_counters.cpp index 06c9ce4875c..d97a129ab2b 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/perf_counters.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/perf_counters.cpp @@ -13,7 +13,7 @@ namespace { INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestPerfCountersTest, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestPerfCountersTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/wait.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/wait.cpp index 56c1866d15a..8382b84b70f 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/wait.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_infer_request/wait.cpp @@ -16,7 +16,7 @@ namespace { INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestWaitTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_ov_config)), + ::testing::Values(ov_config)), OVInferRequestWaitTests::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/core_integration.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/core_integration.cpp index 85c890730b5..2dffebcad1d 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/core_integration.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/core_integration.cpp @@ -17,14 +17,14 @@ namespace { INSTANTIATE_TEST_SUITE_P( ov_plugin, OVClassBasicTestP, - ::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice))); + ::testing::ValuesIn(generate_pairs_plugin_name_by_device())); INSTANTIATE_TEST_SUITE_P( ov_plugin, OVClassNetworkTestP, ::testing::ValuesIn(return_all_possible_device_combination())); INSTANTIATE_TEST_SUITE_P( - ov_plugin, OVClassImportExportTestP, + smoke_OVClassImportExportTestP, OVClassImportExportTestP, ::testing::ValuesIn(return_all_possible_device_combination())); // diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/properties.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/properties.cpp index e2ad4223cc5..80e5943c058 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/properties.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/ov_plugin/properties.cpp @@ -3,10 +3,12 @@ // #include "behavior/ov_plugin/properties_tests.hpp" +#include "base/ov_behavior_test_utils.hpp" #include "openvino/runtime/properties.hpp" #include "ov_api_conformance_helpers.hpp" using namespace ov::test::behavior; +using namespace ov::test::conformance; namespace { @@ -15,34 +17,15 @@ const std::vector inproperties = { }; const std::vector auto_batch_inproperties = { - {{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}}, {{ov::auto_batch_timeout(-1)}}, }; INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesIncorrectTests, ::testing::Combine( - ::testing::Values(ov::test::conformance::targetDevice), + ::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(inproperties)), OVPropertiesIncorrectTests::getTestCaseName); -INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_HETERO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))), - OVPropertiesIncorrectTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_MULTI), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))), -OVPropertiesIncorrectTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesIncorrectTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))), - OVPropertiesIncorrectTests::getTestCaseName); - INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesIncorrectTests, ::testing::Combine( ::testing::Values(CommonTestUtils::DEVICE_BATCH), @@ -62,28 +45,10 @@ const std::vector auto_batch_properties = { INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesTests, ::testing::Combine( - ::testing::Values(ov::test::conformance::targetDevice), + ::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(default_properties)), OVPropertiesTests::getTestCaseName); -INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_HETERO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))), - OVPropertiesTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_MULTI), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))), - OVPropertiesTests::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesTests, - ::testing::Combine( - ::testing::Values(CommonTestUtils::DEVICE_AUTO), - ::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))), - OVPropertiesTests::getTestCaseName); - INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesTests, ::testing::Combine( ::testing::Values(CommonTestUtils::DEVICE_BATCH), diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/configuration_tests.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/configuration_tests.cpp index 24bf80b9a12..d609afd7d4f 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/configuration_tests.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/configuration_tests.cpp @@ -24,8 +24,16 @@ namespace { #else auto defaultBindThreadParameter = InferenceEngine::Parameter{std::string{CONFIG_VALUE(YES)}}; #endif + INSTANTIATE_TEST_SUITE_P( + ie_plugin, + DefaultConfigurationTest, + ::testing::Combine( + ::testing::ValuesIn(return_all_possible_device_combination()), + ::testing::Values(DefaultParameter{CONFIG_KEY(PERF_COUNT), CONFIG_VALUE(YES)})), + DefaultConfigurationTest::getTestCaseName); + const std::vector> pluginConfigs = { - {}, + {{}}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, @@ -161,7 +169,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin_Hetero, CorrectConfigTests, {{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}} }; - INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, CorrectConfigCheck, + INSTANTIATE_TEST_SUITE_P(ie_plugin, CorrectConfigCheck, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(pluginConfigsCheck)), diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_integration.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_integration.cpp index e36a34569cb..18e51126384 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_integration.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_integration.cpp @@ -16,7 +16,7 @@ namespace { INSTANTIATE_TEST_SUITE_P( ie_plugin, IEClassBasicTestP, - ::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice))); + ::testing::ValuesIn(generate_pairs_plugin_name_by_device())); INSTANTIATE_TEST_SUITE_P( ie_plugin, IEClassNetworkTestP, diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_threading_tests.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_threading_tests.cpp index 61809f6ff97..4349d58d9eb 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_threading_tests.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/core_threading_tests.cpp @@ -16,7 +16,10 @@ const Params coreThreadingParams[] = { std::tuple{ CommonTestUtils::DEVICE_BATCH, generate_configs(CommonTestUtils::DEVICE_BATCH).front() }, }; -INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests, testing::ValuesIn(coreThreadingParams), CoreThreadingTests::getTestCaseName); +INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests, + testing::ValuesIn(coreThreadingParams), + CoreThreadingTests::getTestCaseName); + INSTANTIATE_TEST_SUITE_P(ie_plugin, CoreThreadingTests, ::testing::Combine( ::testing::ValuesIn(return_all_possible_device_combination()), diff --git a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/set_preprocess.cpp b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/set_preprocess.cpp index 434e892cb1b..b1d8cb0dcd8 100644 --- a/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/set_preprocess.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/api_conformance_runner/src/behavior/plugin/set_preprocess.cpp @@ -19,7 +19,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessTest, ::testing::Combine( ::testing::ValuesIn(netPrecisionsPreprocess), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestPreprocessTest::getTestCaseName); const std::vector ioPrecisionsPreprocess = { @@ -47,7 +47,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessConversionTest, ::testing::Bool(), ::testing::Bool(), ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestPreprocessConversionTest::getTestCaseName); INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTest, @@ -61,6 +61,6 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTe ::testing::Values(true), // only SetBlob ::testing::Values(true), // only SetBlob ::testing::ValuesIn(return_all_possible_device_combination()), - ::testing::ValuesIn(empty_config)), + ::testing::Values(ie_config)), InferRequestPreprocessDynamicallyInSetBlobTest::getTestCaseName); } // namespace diff --git a/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/main.cpp b/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/main.cpp index c121bf17ec6..034bcc9e142 100644 --- a/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/main.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/main.cpp @@ -11,7 +11,7 @@ #include "common_test_utils/file_utils.hpp" #include "functional_test_utils/skip_tests_config.hpp" -#include "functional_test_utils/layer_test_utils/environment.hpp" +#include "functional_test_utils/summary/environment.hpp" #include "read_ir_test/read_ir.hpp" #include "gflag_config.hpp" @@ -45,11 +45,15 @@ int main(int argc, char* argv[]) { } FuncTestUtils::SkipTestsConfig::disable_tests_skipping = FLAGS_disable_test_config; - LayerTestsUtils::Summary::setExtendReport(FLAGS_extend_report); - LayerTestsUtils::Summary::setExtractBody(FLAGS_extract_body); - LayerTestsUtils::Summary::setSaveReportWithUniqueName(FLAGS_report_unique_name); - LayerTestsUtils::Summary::setOutputFolder(FLAGS_output_folder); - LayerTestsUtils::Summary::setSaveReportTimeout(FLAGS_save_report_timeout); + ov::test::utils::OpSummary::setExtendReport(FLAGS_extend_report); + ov::test::utils::OpSummary::setExtractBody(FLAGS_extract_body); + ov::test::utils::OpSummary::setSaveReportWithUniqueName(FLAGS_report_unique_name); + ov::test::utils::OpSummary::setOutputFolder(FLAGS_output_folder); + ov::test::utils::OpSummary::setSaveReportTimeout(FLAGS_save_report_timeout); + { + auto &apiSummary = ov::test::utils::ApiSummary::getInstance(); + apiSummary.setDeviceName(FLAGS_device); + } if (FLAGS_shape_mode == std::string("static")) { ov::test::subgraph::shapeMode = ov::test::subgraph::ShapeMode::STATIC; } else if (FLAGS_shape_mode == std::string("dynamic")) { @@ -75,25 +79,29 @@ int main(int argc, char* argv[]) { } ::testing::InitGoogleTest(&argc, argv); - ::testing::AddGlobalTestEnvironment(new LayerTestsUtils::TestEnvironment); + ::testing::AddGlobalTestEnvironment(new ov::test::utils::TestEnvironment); auto exernalSignalHandler = [](int errCode) { std::cerr << "Unexpected application crash with code: " << errCode << std::endl; + auto& op_summary = ov::test::utils::OpSummary::getInstance(); + auto& api_summary = ov::test::utils::ApiSummary::getInstance(); + op_summary.saveReport(); + api_summary.saveReport(); + // set default handler for crash + signal(SIGABRT, SIG_DFL); + signal(SIGSEGV, SIG_DFL); signal(SIGINT, SIG_DFL); signal(SIGTERM, SIG_DFL); - if (errCode == SIGINT || errCode == SIGTERM) { - auto& s = LayerTestsUtils::Summary::getInstance(); - s.saveReport(); - exit(1); - } + exit(1); }; - // killed by extarnal + // killed by external signal(SIGINT, exernalSignalHandler); signal(SIGTERM , exernalSignalHandler); - + signal(SIGSEGV, exernalSignalHandler); + signal(SIGABRT, exernalSignalHandler); return RUN_ALL_TESTS(); } diff --git a/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/read_ir_test/read_ir.cpp b/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/read_ir_test/read_ir.cpp index d58909f3593..53f6866b45d 100644 --- a/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/read_ir_test/read_ir.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/conformance_infra/src/read_ir_test/read_ir.cpp @@ -14,7 +14,7 @@ #include "common_test_utils/data_utils.hpp" #include "common_test_utils/common_utils.hpp" #include "common_test_utils/crash_handler.hpp" -#include "functional_test_utils/layer_test_utils/op_info.hpp" +#include "functional_test_utils/summary/op_info.hpp" #include "functional_test_utils/skip_tests_config.hpp" #include "read_ir_test/read_ir.hpp" @@ -56,7 +56,7 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo(new CommonTestUtils::CrashHandler()); - auto &s = LayerTestsUtils::Summary::getInstance(); + auto &s = ov::test::utils::OpSummary::getInstance(); // place to jump in case of a crash int jmpRes = 0; @@ -74,21 +74,21 @@ void ReadIRTest::query_model() { s.setDeviceName(targetDevice); if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) { - s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::SKIPPED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::SKIPPED); GTEST_SKIP() << "Disabled test due to configuration" << std::endl; } else { - s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::CRASHED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::CRASHED); } try { SubgraphBaseTest::query_model(); - s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::PASSED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED); } catch (...) { - s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); } } else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) { IE_THROW() << "Crash happens"; } else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) { - s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::HANGED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::HANGED); IE_THROW() << "Crash happens"; } } diff --git a/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/executable_network/exec_graph_info.cpp b/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/executable_network/exec_graph_info.cpp index 05f02d00aef..0c56fed32e6 100644 --- a/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/executable_network/exec_graph_info.cpp +++ b/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/executable_network/exec_graph_info.cpp @@ -18,7 +18,7 @@ TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) { InferenceEngine::CNNNetwork cnnNet(fnPtr); auto ie = PluginCache::get().ie(); -auto execNet = ie->LoadNetwork(cnnNet, targetDevice); +auto execNet = ie->LoadNetwork(cnnNet, target_device); InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo(); diff --git a/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/ov_executable_network/core_integration.cpp b/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/ov_executable_network/core_integration.cpp index ad7741c9a8b..f54182b1eee 100644 --- a/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/ov_executable_network/core_integration.cpp +++ b/src/tests/functional/plugin/cpu/shared_tests_instances/behavior/ov_executable_network/core_integration.cpp @@ -17,7 +17,7 @@ namespace { INSTANTIATE_TEST_SUITE_P( - smoke_OVClassImportExportTestP, OVClassImportExportTestP, + smoke_OVClassImportExportTestP, OVClassExecutableNetworkImportExportTestP, ::testing::Values("HETERO:CPU")); // diff --git a/src/tests/functional/plugin/gna/shared_tests_instances/behavior/executable_network/get_metric.cpp b/src/tests/functional/plugin/gna/shared_tests_instances/behavior/executable_network/get_metric.cpp index 97ddac1baa0..1a48bf87f83 100644 --- a/src/tests/functional/plugin/gna/shared_tests_instances/behavior/executable_network/get_metric.cpp +++ b/src/tests/functional/plugin/gna/shared_tests_instances/behavior/executable_network/get_metric.cpp @@ -78,7 +78,7 @@ TEST_P(IEClassExecutableNetworkSetConfigFromFp32Test, SetConfigFromFp32Throws) { std::map initialConfig; initialConfig[GNA_CONFIG_KEY(DEVICE_MODE)] = InferenceEngine::GNAConfigParams::GNA_SW_FP32; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName, initialConfig); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device, initialConfig); ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception); } diff --git a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/ov_plugin/core_integration.cpp b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/ov_plugin/core_integration.cpp index 77a23f3a7b3..f112ceba862 100644 --- a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/ov_plugin/core_integration.cpp +++ b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/ov_plugin/core_integration.cpp @@ -18,7 +18,7 @@ using namespace ov::test::behavior; namespace { -// IE Class Common tests with +// IE Class Common tests with // INSTANTIATE_TEST_SUITE_P(nightly_OVClassCommon, @@ -110,7 +110,7 @@ TEST_P(OVClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow) ov::Core ie; ov::Any p; - ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); + ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); uint64_t t = p; std::cout << "GPU device total memory size: " << t << std::endl; @@ -127,7 +127,7 @@ TEST_P(OVClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) { ov::Core ie; ov::Any p; - ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(UARCH_VERSION))); + ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(UARCH_VERSION))); std::string t = p; std::cout << "GPU device uarch: " << t << std::endl; @@ -143,7 +143,7 @@ TEST_P(OVClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow) ov::Core ie; ov::Any p; - ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); + ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); int t = p; std::cout << "GPU EUs count: " << t << std::endl; @@ -160,7 +160,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricAvailableDevicesAndPrintNoThrow) { ov::Core ie; std::vector properties; - ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::available_devices)); + ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::available_devices)); std::cout << "AVAILABLE_DEVICES: "; for (const auto& prop : properties) { @@ -175,7 +175,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForAsyncInferRequestsAndPrintNo ov::Core ie; std::tuple property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_async_infer_requests)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_async_infer_requests)); std::cout << "RANGE_FOR_ASYNC_INFER_REQUESTS: " << std::get<0>(property) << " " << std::get<1>(property) << " " << @@ -188,7 +188,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForStreamsAndPrintNoThrow) { ov::Core ie; std::tuple property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_streams)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_streams)); std::cout << "RANGE_FOR_STREAMS: " << std::get<0>(property) << " " << std::get<1>(property) << std::endl; @@ -200,7 +200,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricOptimalBatchSizeAndPrintNoThrow) { ov::Core ie; unsigned int property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::optimal_batch_size)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::optimal_batch_size)); std::cout << "OPTIMAL_BATCH_SIZE: " << property << std::endl; @@ -211,7 +211,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricFullNameAndPrintNoThrow) { ov::Core ie; std::string property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::full_name)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::full_name)); std::cout << "FULL_DEVICE_NAME: " << property << std::endl; @@ -222,7 +222,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricTypeAndPrintNoThrow) { ov::Core ie; ov::device::Type property = ov::device::Type::INTEGRATED; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::type)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::type)); std::cout << "DEVICE_TYPE: " << property << std::endl; @@ -233,7 +233,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricGopsAndPrintNoThrow) { ov::Core ie; std::map properties; - ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::gops)); + ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::gops)); std::cout << "DEVICE_GOPS: " << std::endl; for (const auto& prop : properties) { @@ -247,7 +247,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricCapabilitiesAndPrintNoThrow) { ov::Core ie; std::vector properties; - ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::capabilities)); + ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::capabilities)); std::cout << "OPTIMIZATION_CAPABILITIES: " << std::endl; for (const auto& prop : properties) { @@ -261,7 +261,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricDeviceTotalMemSizeAndPrintNoThrow) { ov::Core ie; uint64_t property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::device_total_mem_size)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::device_total_mem_size)); std::cout << "GPU_DEVICE_TOTAL_MEM_SIZE: " << property << std::endl; @@ -272,7 +272,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricUarchVersionAndPrintNoThrow) { ov::Core ie; std::string property; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::uarch_version)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::uarch_version)); std::cout << "GPU_UARCH_VERSION: " << property << std::endl; @@ -283,7 +283,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricExecutionUnitsCountAndPrintNoThrow) ov::Core ie; int32_t property = 0; - ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::execution_units_count)); + ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::execution_units_count)); std::cout << "GPU_EXECUTION_UNITS_COUNT: " << property << std::endl; @@ -294,7 +294,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricMemoryStatisticsAndPrintNoThrow) { ov::Core ie; std::map properties; - ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); std::cout << "GPU_MEMORY_STATISTICS: " << std::endl; for (const auto& prop : properties) { @@ -308,16 +308,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetPerformanceModeNoThrow) { ov::Core ie; ov::hint::PerformanceMode defaultMode{}; - ASSERT_NO_THROW(defaultMode = ie.get_property(deviceName, ov::hint::performance_mode)); + ASSERT_NO_THROW(defaultMode = ie.get_property(target_device, ov::hint::performance_mode)); std::cout << "Default PERFORMANCE_HINT: \"" << defaultMode << "\"" << std::endl; - ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED)); - ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(deviceName, ov::hint::performance_mode)); - ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)); - ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(deviceName, ov::hint::performance_mode)); - ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); - ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(deviceName, ov::hint::performance_mode)); + ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED)); + ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(target_device, ov::hint::performance_mode)); + ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)); + ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(target_device, ov::hint::performance_mode)); + ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); + ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(target_device, ov::hint::performance_mode)); OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::performance_mode); } @@ -326,12 +326,12 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetEnableProfilingNoThrow) { ov::Core ie; bool defaultValue = false; - ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::enable_profiling)); + ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::enable_profiling)); std::cout << "Default PERF_COUNT: " << defaultValue << std::endl; - ie.set_property(deviceName, ov::enable_profiling(true)); - ASSERT_EQ(true, ie.get_property(deviceName, ov::enable_profiling)); + ie.set_property(target_device, ov::enable_profiling(true)); + ASSERT_EQ(true, ie.get_property(target_device, ov::enable_profiling)); OV_ASSERT_PROPERTY_SUPPORTED(ov::enable_profiling); @@ -356,19 +356,19 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetModelPriorityNoThrow) { ov::Core ie; ov::hint::Priority defaultValue; - ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::hint::model_priority)); + ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::hint::model_priority)); std::cout << "Default PERF_COUNT: " << defaultValue << std::endl; - ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::HIGH)); - ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::hint::model_priority)); - ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); - ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::LOW)); - ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::hint::model_priority)); - ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); - ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::MEDIUM)); - ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::hint::model_priority)); - ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::HIGH)); + ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::hint::model_priority)); + ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::LOW)); + ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::hint::model_priority)); + ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::MEDIUM)); + ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::hint::model_priority)); + ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::model_priority); } @@ -377,16 +377,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetQueuePriorityNoThrow) { ov::Core ie; ov::hint::Priority defaultValue; - ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); + ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); std::cout << "Default GPU_QUEUE_PRIORITY: " << defaultValue << std::endl; - ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH)); - ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); - ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW)); - ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); - ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM)); - ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH)); + ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW)); + ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM)); + ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority)); OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_priority); } @@ -395,16 +395,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetThrottleLevelNoThrow) { ov::Core ie; ov::intel_gpu::hint::ThrottleLevel defaultValue; - ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); + ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle)); std::cout << "Default GPU_QUEUE_THROTTLE: " << defaultValue << std::endl; - ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH)); - ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); - ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW)); - ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); - ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM)); - ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH)); + ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW)); + ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle)); + ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM)); + ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle)); OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_throttle); } @@ -413,20 +413,20 @@ TEST_P(OVClassGetPropertyTest_GPU, CanSetDefaultValueBackToPluginNewAPI) { ov::Core ie; std::vector properties; - ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::supported_properties)); + ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::supported_properties)); std::cout << "SUPPORTED_PROPERTIES:" << std::endl; for (const auto& property : properties) { ov::Any prop; if (property.is_mutable()) { std::cout << "RW: " << property << " "; - ASSERT_NO_THROW(prop = ie.get_property(deviceName, property)); + ASSERT_NO_THROW(prop = ie.get_property(target_device, property)); prop.print(std::cout); std::cout << std::endl; - ASSERT_NO_THROW(ie.set_property(deviceName, {{property, prop}})); + ASSERT_NO_THROW(ie.set_property(target_device, {{property, prop}})); } else { std::cout << "RO: " << property << " "; - ASSERT_NO_THROW(prop = ie.get_property(deviceName, property)); + ASSERT_NO_THROW(prop = ie.get_property(target_device, property)); prop.print(std::cout); std::cout << std::endl; } @@ -446,7 +446,7 @@ TEST_P(OVClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) { unsigned int p; ov::AnyMap _options = {ov::hint::model(simpleNetwork)}; - ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::optimal_batch_size.name(), _options)); + ASSERT_NO_THROW(p = ie.get_property(target_device, ov::optimal_batch_size.name(), _options)); std::cout << "GPU device optimal batch size: " << p << std::endl; @@ -465,7 +465,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow unsigned int p; ov::AnyMap _options = {ov::hint::model(simpleNetwork)}; - ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options)); + ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options)); std::cout << "GPU device max available batch size: " << p << std::endl; @@ -482,7 +482,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr SKIP_IF_CURRENT_TEST_IS_DISABLED() ov::Core ie; unsigned int p; - auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); + auto exec_net1 = ie.compile_model(simpleNetwork, target_device); uint32_t n_streams = 2; int64_t available_device_mem_size = 1073741824; @@ -490,7 +490,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr ov::num_streams(n_streams), ov::intel_gpu::hint::available_device_mem(available_device_mem_size)}; - ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options)); + ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options)); std::cout << "GPU device max available batch size: " << p << std::endl; @@ -508,9 +508,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh ov::Core ie; std::map p; - auto exec_net = ie.compile_model(simpleNetwork, deviceName); + auto exec_net = ie.compile_model(simpleNetwork, target_device); - ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(p = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(p.empty()); std::cout << "Memory Statistics: " << std::endl; @@ -534,18 +534,18 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn std::map t1; std::map t2; - auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); + auto exec_net1 = ie.compile_model(simpleNetwork, target_device); - ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t1.empty()); for (auto &&kv : t1) { ASSERT_NE(kv.second, 0); } - auto exec_net2 = ie.compile_model(simpleNetwork, deviceName); + auto exec_net2 = ie.compile_model(simpleNetwork, target_device); - ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t2.empty()); for (auto &&kv : t2) { @@ -570,24 +570,24 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin ov::Core ie; std::map t1; - ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_TRUE(t1.empty()); { - auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); + auto exec_net1 = ie.compile_model(simpleNetwork, target_device); std::map t2; - ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t2.empty()); for (auto &&kv : t2) { ASSERT_NE(kv.second, 0); } { - auto exec_net2 = ie.compile_model(actualNetwork, deviceName); + auto exec_net2 = ie.compile_model(actualNetwork, target_device); std::map t3; - ASSERT_NO_THROW(t3 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t3 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t3.empty()); for (auto &&kv : t3) { @@ -595,7 +595,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin } } std::map t4; - ASSERT_NO_THROW(t4 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t4 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t4.empty()); for (auto &&kv : t4) { @@ -609,7 +609,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin } } std::map t5; - ASSERT_NO_THROW(t5 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t5 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t5.empty()); for (auto &&kv : t5) { @@ -641,9 +641,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri networks.emplace_back(simpleNetwork); networks.emplace_back(simpleNetwork); - auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); + auto exec_net1 = ie.compile_model(simpleNetwork, target_device); - ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t1.empty()); for (auto &&kv : t1) { @@ -653,7 +653,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri for (auto & thread : threads) { thread = std::thread([&](){ auto value = counter++; - exec_net_map[value] = ie.compile_model(networks[value], deviceName); + exec_net_map[value] = ie.compile_model(networks[value], target_device); }); } @@ -663,7 +663,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri } } - ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); + ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics)); ASSERT_FALSE(t2.empty()); for (auto &&kv : t2) { diff --git a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_integration.cpp b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_integration.cpp index 2b52eccfa6e..4c58eb6ab82 100644 --- a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_integration.cpp +++ b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_integration.cpp @@ -16,7 +16,7 @@ using namespace BehaviorTestsDefinitions; namespace { -// IE Class Common tests with +// IE Class Common tests with // INSTANTIATE_TEST_SUITE_P( @@ -102,7 +102,7 @@ TEST_P(IEClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow) InferenceEngine::Core ie; InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); uint64_t t = p; std::cout << "GPU device total memory size: " << t << std::endl; @@ -122,7 +122,7 @@ TEST_P(IEClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) { InferenceEngine::Parameter p; std::map _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}}; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as()); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as()); unsigned int t = p; std::cout << "GPU device optimal batch size: " << t << std::endl; @@ -142,7 +142,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow InferenceEngine::Parameter p; std::map _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}}; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as()); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as()); uint32_t t = p; std::cout << "GPU device max available batch size: " << t << std::endl; @@ -166,7 +166,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr _options.insert(std::make_pair("GPU_THROUGHPUT_STREAMS", n_streams)); _options.insert(std::make_pair("AVAILABLE_DEVICE_MEM_SIZE", available_device_mem_size)); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as()); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as()); uint32_t t = p; @@ -186,7 +186,7 @@ TEST_P(IEClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie; InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(UARCH_VERSION))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(UARCH_VERSION))); std::string t = p; std::cout << "GPU device uarch: " << t << std::endl; @@ -205,7 +205,7 @@ TEST_P(IEClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow) InferenceEngine::Core ie; InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); int t = p; std::cout << "GPU EUs count: " << t << std::endl; @@ -224,9 +224,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh InferenceEngine::Core ie; InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t = p; ASSERT_FALSE(t.empty()); @@ -250,9 +250,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn InferenceEngine::Core ie; InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t1 = p; ASSERT_FALSE(t1.empty()); @@ -260,9 +260,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn ASSERT_NE(kv.second, 0); } - InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t2 = p; ASSERT_FALSE(t2.empty()); @@ -288,14 +288,14 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin InferenceEngine::Core ie; InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t1 = p; ASSERT_TRUE(t1.empty()); { - InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t2 = p; ASSERT_FALSE(t2.empty()); @@ -303,9 +303,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin ASSERT_NE(kv.second, 0); } { - InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t3 = p; ASSERT_FALSE(t3.empty()); @@ -313,7 +313,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin ASSERT_NE(kv.second, 0); } } - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t4 = p; ASSERT_FALSE(t4.empty()); @@ -327,7 +327,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin } } } - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t5 = p; ASSERT_FALSE(t5.empty()); @@ -358,9 +358,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri networks.emplace_back(simpleCnnNetwork); networks.emplace_back(simpleCnnNetwork); - InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device); - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t1 = p; ASSERT_FALSE(t1.empty()); @@ -371,7 +371,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri for (auto & thread : threads) { thread = std::thread([&](){ auto value = counter++; - exec_net_map[value] = ie.LoadNetwork(networks[value], deviceName); + exec_net_map[value] = ie.LoadNetwork(networks[value], target_device); }); } @@ -381,7 +381,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri } } - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS))); std::map t2 = p; ASSERT_FALSE(t2.empty()); diff --git a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_threading_tests.cpp b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_threading_tests.cpp index 3d7348fd7e2..3ff1e4a595f 100644 --- a/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_threading_tests.cpp +++ b/src/tests/functional/plugin/gpu/shared_tests_instances/behavior/plugin/core_threading_tests.cpp @@ -31,7 +31,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_RemoteContext) { networks.emplace_back(InferenceEngine::CNNNetwork(ngraph::builder::subgraph::makeSplitMultiConvConcat())); auto ocl_instance = std::make_shared(); - ie.SetConfig(config, deviceName); + ie.SetConfig(config, target_device); runParallel([&] () { auto value = counter++; auto remote_context = make_shared_context(ie, CommonTestUtils::DEVICE_GPU, ocl_instance->_context.get()); diff --git a/src/tests/functional/plugin/gpu/shared_tests_instances/skip_tests_config.cpp b/src/tests/functional/plugin/gpu/shared_tests_instances/skip_tests_config.cpp index 7f6f16cf787..94b3160b8b3 100644 --- a/src/tests/functional/plugin/gpu/shared_tests_instances/skip_tests_config.cpp +++ b/src/tests/functional/plugin/gpu/shared_tests_instances/skip_tests_config.cpp @@ -95,5 +95,6 @@ std::vector disabledTestPatterns() { R"(.*smoke_VirtualPlugin_BehaviorTests.*LoadedRemoteContext.*)", // Issue: CVS-88667 - Need to verify hetero interoperability R"(.*nightly_OVClassHeteroExecutableNetworlGetMetricTest.*SUPPORTED_(CONFIG_KEYS|METRICS).*)", + R"(.*VirtualPlugin.*BehaviorTests.*OVHoldersTest.*LoadedTensor.*target_device=AUTO.*)", }; } diff --git a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_executable_network/get_metric.cpp b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_executable_network/get_metric.cpp index 46614bd967b..7c41a797d44 100644 --- a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_executable_network/get_metric.cpp +++ b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_executable_network/get_metric.cpp @@ -20,14 +20,14 @@ std::pair plugins[] = { }; INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP, - OVClassImportExportTestP, + OVClassExecutableNetworkImportExportTestP, ::testing::Values(std::string(CommonTestUtils::DEVICE_MYRIAD), "HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD))); #if defined(ENABLE_INTEL_CPU) && ENABLE_INTEL_CPU INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP_HETERO_CPU, - OVClassImportExportTestP, + OVClassExecutableNetworkImportExportTestP, ::testing::Values("HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD) + ",CPU")); #endif diff --git a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_plugin/core_integration.cpp b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_plugin/core_integration.cpp index 0b2ed277cfd..359eab9267b 100644 --- a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_plugin/core_integration.cpp +++ b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/ov_plugin/core_integration.cpp @@ -22,7 +22,7 @@ std::pair plugins[] = { }; // -// IE Class Common tests with +// IE Class Common tests with // INSTANTIATE_TEST_SUITE_P(OVClassBasicTestP_smoke, OVClassBasicTestP, ::testing::ValuesIn(plugins)); @@ -39,7 +39,7 @@ TEST_P(OVClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF ov::Core ie; OV_ASSERT_PROPERTY_SUPPORTED(ov::device::capabilities) std::vector device_capabilities; - ASSERT_NO_THROW(device_capabilities = ie.get_property(deviceName, ov::device::capabilities)); + ASSERT_NO_THROW(device_capabilities = ie.get_property(target_device, ov::device::capabilities)); ASSERT_EQ(device_capabilities.size(), 2); ASSERT_NE(std::find(device_capabilities.begin(), device_capabilities.end(), ov::device::capability::EXPORT_IMPORT), device_capabilities.end()); diff --git a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/plugin/core_integration.cpp b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/plugin/core_integration.cpp index b5f207edcee..3ba7c460466 100644 --- a/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/plugin/core_integration.cpp +++ b/src/tests/functional/plugin/myriad/shared_tests_instances/behavior/plugin/core_integration.cpp @@ -21,7 +21,7 @@ std::pair plugins[] = { }; // -// IE Class Common tests with +// IE Class Common tests with // INSTANTIATE_TEST_SUITE_P( @@ -43,7 +43,7 @@ TEST_P(IEClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF ASSERT_METRIC_SUPPORTED_IE(METRIC_KEY(OPTIMIZATION_CAPABILITIES)) InferenceEngine::Parameter optimizationCapabilitiesParameter; - ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); + ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); const auto optimizationCapabilities = optimizationCapabilitiesParameter.as>(); ASSERT_EQ(optimizationCapabilities.size(), 2); diff --git a/src/tests/functional/plugin/shared/include/base/behavior_test_utils.hpp b/src/tests/functional/plugin/shared/include/base/behavior_test_utils.hpp index 34ecad74a24..83dcbb04c50 100644 --- a/src/tests/functional/plugin/shared/include/base/behavior_test_utils.hpp +++ b/src/tests/functional/plugin/shared/include/base/behavior_test_utils.hpp @@ -9,51 +9,31 @@ #include "functional_test_utils/plugin_cache.hpp" #include "common_test_utils/file_utils.hpp" #include "openvino/util/file_util.hpp" +#include "functional_test_utils/summary/api_summary.hpp" namespace BehaviorTestsUtils { using namespace CommonTestUtils; -typedef std::tuple< - InferenceEngine::Precision, // Network precision - std::string, // Device name - std::map // Config -> BehaviorBasicParams; +class IEInferRequestTestBase : public ov::test::behavior::APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ie_infer_request; + }; +}; -class BehaviorTestsBasic : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { -public: - static std::string getTestCaseName(testing::TestParamInfo obj) { - InferenceEngine::Precision netPrecision; - std::string targetDevice; - std::map configuration; - std::tie(netPrecision, targetDevice, configuration) = obj.param; - std::ostringstream result; - result << "netPRC=" << netPrecision.name() << "_"; - result << "targetDevice=" << targetDevice; - if (!configuration.empty()) { - result << "config=" << configuration; - } - return result.str(); - } +class IEExecutableNetworkTestBase : public ov::test::behavior::APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ie_executable_network; + }; +}; - void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(netPrecision, targetDevice, configuration) = this->GetParam(); - function = ngraph::builder::subgraph::makeConvPoolRelu(); - } - - void TearDown() override { - if (!configuration.empty()) { - PluginCache::get().reset(); - } - } - - std::shared_ptr ie = PluginCache::get().ie(); - std::shared_ptr function; - InferenceEngine::Precision netPrecision; - std::string targetDevice; - std::map configuration; +class IEPluginTestBase : public ov::test::behavior::APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ie_plugin; + }; }; typedef std::tuple< @@ -62,13 +42,14 @@ typedef std::tuple< > InferRequestParams; class InferRequestTests : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public IEInferRequestTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { std::string targetDevice; std::map configuration; std::tie(targetDevice, configuration) = obj.param; std::ostringstream result; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); result << "targetDevice=" << targetDevice << "_"; if (!configuration.empty()) { for (auto &configItem : configuration) { @@ -79,19 +60,21 @@ public: } void SetUp() override { + std::tie(target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); cnnNet = InferenceEngine::CNNNetwork(function); // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); } void TearDown() override { if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::TearDown(); } protected: @@ -99,8 +82,7 @@ protected: InferenceEngine::ExecutableNetwork execNet; std::shared_ptr ie = PluginCache::get().ie(); std::shared_ptr function; - std::string targetDevice; - std::map configuration; + std::map configuration;; }; inline InferenceEngine::Core createIECoreWithTemplate() { @@ -118,7 +100,7 @@ class IEClassNetworkTest : public ov::test::behavior::OVClassNetworkTest { public: InferenceEngine::CNNNetwork actualCnnNetwork, simpleCnnNetwork, multinputCnnNetwork, ksoCnnNetwork; - void SetUp() override { + void SetUp() { SKIP_IF_CURRENT_TEST_IS_DISABLED(); OVClassNetworkTest::SetUp(); // Generic network @@ -132,13 +114,73 @@ public: } }; -class IEClassBaseTestP : public IEClassNetworkTest, public ::testing::WithParamInterface { +class IEClassBaseTestP : public IEClassNetworkTest, + public ::testing::WithParamInterface, + public IEPluginTestBase { public: - std::string deviceName; void SetUp() override { + target_device = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); IEClassNetworkTest::SetUp(); - deviceName = GetParam(); + } +}; + +class IEExecNetClassBaseTestP : public IEClassNetworkTest, + public ::testing::WithParamInterface, + public IEExecutableNetworkTestBase { +public: + void SetUp() override { + target_device = GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); + IEClassNetworkTest::SetUp(); + } +}; + +typedef std::tuple< + InferenceEngine::Precision, // Network precision + std::string, // Device name + std::map // Config +> BehaviorBasicParams; + +class BehaviorTestsBasicBase : public testing::WithParamInterface { +public: + static std::string getTestCaseName(testing::TestParamInfo obj) { + InferenceEngine::Precision netPrecision; + std::string targetDevice; + std::map configuration; + std::tie(netPrecision, targetDevice, configuration) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '_'); + std::ostringstream result; + result << "netPRC=" << netPrecision.name() << "_"; + result << "targetDevice=" << targetDevice << "_"; + if (!configuration.empty()) { + result << "config=" << configuration; + } + return result.str(); + } + + std::shared_ptr ie = PluginCache::get().ie(); + std::shared_ptr function; + InferenceEngine::Precision netPrecision; + std::map configuration; +}; + +class BehaviorTestsBasic : public BehaviorTestsBasicBase, + public IEPluginTestBase { +protected: + void SetUp() override { + std::tie(netPrecision, target_device, configuration) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); + function = ngraph::builder::subgraph::makeConvPoolRelu(); + } + void TearDown() override { + if (!configuration.empty()) { + PluginCache::get().reset(); + } + APIBaseTest::TearDown(); } }; } // namespace BehaviorTestsUtils diff --git a/src/tests/functional/plugin/shared/include/base/ov_behavior_test_utils.hpp b/src/tests/functional/plugin/shared/include/base/ov_behavior_test_utils.hpp index f3924c40699..7cf31707b7c 100644 --- a/src/tests/functional/plugin/shared/include/base/ov_behavior_test_utils.hpp +++ b/src/tests/functional/plugin/shared/include/base/ov_behavior_test_utils.hpp @@ -4,6 +4,13 @@ #pragma once +#include +#include + +#ifdef _WIN32 +#include +#endif + #include #include "ngraph_functions/subgraph_builders.hpp" @@ -11,13 +18,15 @@ #include "common_test_utils/test_common.hpp" #include "common_test_utils/test_constants.hpp" #include "common_test_utils/common_utils.hpp" +#include "common_test_utils/crash_handler.hpp" #include "common_test_utils/file_utils.hpp" -#include "openvino/util/file_util.hpp" #include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/ov_plugin_cache.hpp" #include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/blob_utils.hpp" +#include "functional_test_utils/summary/api_summary.hpp" +#include "openvino/util/file_util.hpp" namespace ov { namespace test { @@ -33,18 +42,87 @@ inline std::shared_ptr getDefaultNGraphFunctionForTheDevice(st return ngraph::builder::subgraph::makeConvPoolRelu(inputShape, ngPrc); } +class APIBaseTest : public CommonTestUtils::TestsCommon { +private: + // place to jump in case of a crash + int jmpRes = 0; + // in case of crash jump will be made and work will be continued + const std::unique_ptr crashHandler = std::unique_ptr(new CommonTestUtils::CrashHandler()); + +protected: + std::string target_device = ""; + ov::test::utils::ov_entity api_entity = ov::test::utils::ov_entity::undefined; + ov::test::utils::ApiSummary& api_summary = ov::test::utils::ApiSummary::getInstance(); + +public: + APIBaseTest() = default; + + virtual void set_api_entity() { api_entity = ov::test::utils::ov_entity::undefined; } + + void SetUp() override { + set_api_entity(); + api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::CRASHED); +#ifdef _WIN32 + jmpRes = setjmp(CommonTestUtils::env); +#else + jmpRes = sigsetjmp(CommonTestUtils::env, 0); +#endif + if (jmpRes == CommonTestUtils::JMP_STATUS::ok) { + crashHandler->StartTimer(); + } else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) { + api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::HANGED); + GTEST_FAIL(); + } + } + + void TearDown() override { + if (api_entity == ov::test::utils::ov_entity::undefined) { + set_api_entity(); + } + if (this->HasFailure()) { + api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::FAILED); + } else if (this->IsSkipped()) { + api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::SKIPPED); + } else { + api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::PASSED); + } + } +}; + +class OVInferRequestTestBase : public APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ov_infer_request; + }; +}; + +class OVCompiledNetworkTestBase : public APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ov_compiled_model; + }; +}; + +class OVPluginTestBase : public APIBaseTest { +private: + void set_api_entity() override { + api_entity = ov::test::utils::ov_entity::ov_plugin; + }; +}; + typedef std::tuple< - std::string, // Device name + std::string, // Device name ov::AnyMap // Config > InferRequestParams; class OVInferRequestTests : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVInferRequestTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { std::string targetDevice; ov::AnyMap configuration; std::tie(targetDevice, configuration) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); std::ostringstream result; result << "targetDevice=" << targetDevice << "_"; if (!configuration.empty()) { @@ -58,21 +136,23 @@ public: } void SetUp() override { + std::tie(target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); ov::AnyMap params; for (auto&& v : configuration) { params.emplace(v.first, v.second); } - execNet = core->compile_model(function, targetDevice, params); + execNet = core->compile_model(function, target_device, params); } void TearDown() override { if (!configuration.empty()) { - utils::PluginCache::get().reset(); + PluginCache::get().reset(); } + APIBaseTest::TearDown(); } protected: @@ -95,11 +175,11 @@ inline ov::Core createCoreWithTemplate() { return core; } -class OVClassNetworkTest : public ::testing::Test { +class OVClassNetworkTest { public: std::shared_ptr actualNetwork, simpleNetwork, multinputNetwork, ksoNetwork; - void SetUp() override { + void SetUp() { SKIP_IF_CURRENT_TEST_IS_DISABLED(); // Generic network actualNetwork = ngraph::builder::subgraph::makeSplitConvConcat(); @@ -129,18 +209,33 @@ public: } }; -class OVClassBaseTestP : public OVClassNetworkTest, public ::testing::WithParamInterface { +class OVClassBaseTestP : public OVClassNetworkTest, + public ::testing::WithParamInterface, + public OVPluginTestBase { public: - std::string deviceName; - void SetUp() override { + target_device = GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); + // TODO: Remove it after fixing issue 69529 + // w/a for myriad (cann't store 2 caches simultaneously) + PluginCache::get().reset(); + OVClassNetworkTest::SetUp(); + } +}; + +class OVCompiledModelClassBaseTestP : public OVClassNetworkTest, + public ::testing::WithParamInterface, + public OVCompiledNetworkTestBase { +public: + void SetUp() override { + target_device = GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); // TODO: Remove it after fixing issue 69529 // w/a for myriad (cann't store 2 caches simultaneously) PluginCache::get().reset(); - - SKIP_IF_CURRENT_TEST_IS_DISABLED(); OVClassNetworkTest::SetUp(); - deviceName = GetParam(); } }; @@ -148,16 +243,17 @@ using PriorityParams = std::tuple< std::string, // Device name ov::AnyMap // device priority Configuration key >; -class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::Test, public ::testing::WithParamInterface { +class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::WithParamInterface, + public OVCompiledNetworkTestBase { protected: - std::string deviceName; ov::AnyMap configuration; std::shared_ptr simpleNetwork; public: void SetUp() override { + std::tie(target_device, configuration) = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(deviceName, configuration) = GetParam(); + APIBaseTest::SetUp(); simpleNetwork = ngraph::builder::subgraph::makeSingleConv(); } }; diff --git a/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_graph_info.hpp b/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_graph_info.hpp index 28ab28a566d..2ab2f5eb73d 100644 --- a/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_graph_info.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_graph_info.hpp @@ -18,18 +18,17 @@ namespace ExecutionGraphTests { class ExecGraphUniqueNodeNames : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEExecutableNetworkTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj); void SetUp() override; - void TearDown() override; protected: - std::string targetDevice; std::shared_ptr fnPtr; }; -class ExecGraphSerializationTest : public CommonTestUtils::TestsCommon, public testing::WithParamInterface { +class ExecGraphSerializationTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase, + public testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); void SetUp() override; @@ -56,6 +55,6 @@ protected: std::pair compare_docs(const pugi::xml_document &doc1, const pugi::xml_document &doc2); - std::string deviceName, m_out_xml_path, m_out_bin_path; + std::string m_out_xml_path, m_out_bin_path; }; } // namespace ExecutionGraphTests diff --git a/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_network_base.hpp b/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_network_base.hpp index b38e2665dcb..b7f4c0f4238 100644 --- a/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_network_base.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/executable_network/exec_network_base.hpp @@ -9,15 +9,16 @@ #include "openvino/core/model.hpp" namespace BehaviorTestsDefinitions { -class ExecutableNetworkBaseTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { +class ExecutableNetworkBaseTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase, + public testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj) { - std::string targetDevice; + std::string target_device; std::map configuration; - std::tie(targetDevice, configuration) = obj.param; + std::tie(target_device, configuration) = obj.param; std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; + std::replace(target_device.begin(), target_device.end(), ':', '.'); + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { using namespace CommonTestUtils; result << "config=" << configuration; @@ -26,55 +27,49 @@ public: } void SetUp() override { + std::tie(target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - ie = PluginCache::get().ie(targetDevice); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + ov::test::behavior::APIBaseTest::SetUp(); + ie = PluginCache::get().ie(target_device); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); cnnNet = InferenceEngine::CNNNetwork(function); } - void TearDown() override { - if (!configuration.empty()) { - PluginCache::get().reset(); - } - } - protected: InferenceEngine::CNNNetwork cnnNet; std::shared_ptr ie; std::shared_ptr function; - std::string targetDevice; std::map configuration; }; TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) { - ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) { std::map incorrectConfig = {{ "abc", "def" }}; - ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, incorrectConfig)); + ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, incorrectConfig)); } TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(auto req = execNet.CreateInferRequest()); } TEST_P(ExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); InferenceEngine::CNNNetwork execGraph = execNet.GetExecGraphInfo(); ASSERT_NE(execGraph.getFunction(), nullptr); } TEST_P(ExecutableNetworkBaseTest, checkGetMetric) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(execNet.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); } TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); for (const auto& configItem : configuration) { InferenceEngine::Parameter param; ASSERT_NO_THROW(param = execNet.GetConfig(configItem.first)); @@ -84,7 +79,7 @@ TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckCo } TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice); + auto execNet = ie->LoadNetwork(cnnNet, target_device); std::map config; for (const auto& confItem : configuration) { config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)}); @@ -93,7 +88,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) { } TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice); + auto execNet = ie->LoadNetwork(cnnNet, target_device); std::map incorrectConfig = {{ "abc", "def" }}; std::map config; for (const auto& confItem : incorrectConfig) { @@ -103,7 +98,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) { } TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice); + auto execNet = ie->LoadNetwork(cnnNet, target_device); std::map config; for (const auto& confItem : configuration) { config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)}); @@ -120,7 +115,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) { TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { std::vector vec; for (auto i = 0; i < 2; i++) { - ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration))); + ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration))); ASSERT_NE(nullptr, cnnNet.getFunction()); } } @@ -128,24 +123,24 @@ TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) { std::vector vec; for (auto i = 0; i < 2; i++) { - ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration))); + ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration))); ASSERT_NE(nullptr, vec[i].GetExecGraphInfo().getFunction()); ASSERT_NE(vec.begin()->GetExecGraphInfo().getFunction(), vec[i].GetExecGraphInfo().getFunction()); } } TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfo) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(auto inInfo = execNet.GetInputsInfo()); } TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfo) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(auto outInfo = execNet.GetOutputsInfo()); } TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); auto inInfo = execNet.GetInputsInfo(); auto inCnnInfo = cnnNet.getInputsInfo(); for (const auto& itemInInfo : inCnnInfo) { @@ -154,7 +149,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { } TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); auto outInfo = execNet.GetOutputsInfo(); auto outCnnInfo = cnnNet.getOutputsInfo(); for (const auto& itemOutInfo : outCnnInfo) { @@ -165,7 +160,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { InferenceEngine::CNNNetwork execGraph; // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); std::map originalLayersMap; for (const auto &layer : function->get_ops()) { @@ -215,7 +210,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) { InferenceEngine::CNNNetwork execGraph; // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); std::map originalLayersMap; for (const auto &layer : function->get_ops()) { @@ -278,7 +273,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) { InferenceEngine::CNNNetwork execGraph; // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); ASSERT_NO_THROW(execGraph.serialize(out_xml_path, out_bin_path)); CommonTestUtils::removeIRFiles(out_xml_path, out_bin_path); @@ -287,7 +282,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) { TEST_P(ExecutableNetworkBaseTest, canExport) { auto ts = CommonTestUtils::GetTimestamp(); std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts; - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); ASSERT_NO_THROW(execNet.Export(modelName)); ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".xml")); ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".bin")); @@ -300,14 +295,29 @@ TEST_P(ExecutableNetworkBaseTest, pluginDoesNotChangeOriginalNetwork) { compare_functions(cnnNet.getFunction(), referenceNetwork); } -using ExecNetSetPrecision = BehaviorTestsUtils::BehaviorTestsBasic; +class ExecNetSetPrecision : public BehaviorTestsUtils::BehaviorTestsBasicBase, + public BehaviorTestsUtils::IEExecutableNetworkTestBase { +protected: + void SetUp() override { + std::tie(netPrecision, target_device, configuration) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); + function = ngraph::builder::subgraph::makeConvPoolRelu(); + } + void TearDown() override { + if (!configuration.empty()) { + PluginCache::get().reset(); + } + APIBaseTest::TearDown(); + } +}; TEST_P(ExecNetSetPrecision, canSetInputPrecisionForNetwork) { InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::InputsDataMap inputs_info = cnnNet.getInputsInfo(); ASSERT_EQ(1u, inputs_info.size()); inputs_info.begin()->second->setPrecision(netPrecision); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) { @@ -315,7 +325,7 @@ TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) { InferenceEngine::OutputsDataMap outputs_info = cnnNet.getOutputsInfo(); ASSERT_EQ(outputs_info.size(), 1u); outputs_info.begin()->second->setPrecision(netPrecision); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) { // Skip test according to plugin specific disabledTestPatterns() (if any) @@ -337,7 +347,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) { function->set_friendly_name("SimpleReLU"); } InferenceEngine::CNNNetwork cnnNet(function); - EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) { @@ -360,7 +370,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) { function->set_friendly_name("SimpleReLU"); } InferenceEngine::CNNNetwork cnnNet(function); - EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } } // namespace BehaviorTestsDefinitions diff --git a/src/tests/functional/plugin/shared/include/behavior/executable_network/get_metric.hpp b/src/tests/functional/plugin/shared/include/behavior/executable_network/get_metric.hpp index 7ea8b3dca23..9ec864f0c32 100644 --- a/src/tests/functional/plugin/shared/include/behavior/executable_network/get_metric.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/executable_network/get_metric.hpp @@ -8,6 +8,7 @@ #include "base/behavior_test_utils.hpp" #include "common_test_utils/common_utils.hpp" +#include "common_test_utils/file_utils.hpp" #include "common_test_utils/test_assertions.hpp" #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT @@ -32,18 +33,19 @@ namespace BehaviorTestsDefinitions { } class IEClassExecutableNetworkGetMetricTestForSpecificConfig : + public BehaviorTestsUtils::IEExecutableNetworkTestBase, public BehaviorTestsUtils::IEClassNetworkTest, public ::testing::WithParamInterface>> { protected: - std::string deviceName; std::string configKey; std::string configValue; public: void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - IEClassNetworkTest::SetUp(); - deviceName = std::get<0>(GetParam()); + target_device = std::get<0>(GetParam()); std::tie(configKey, configValue) = std::get<1>(GetParam()); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + ov::test::behavior::APIBaseTest::SetUp(); + IEClassNetworkTest::SetUp(); } }; @@ -51,17 +53,18 @@ public: // Hetero Executable network case // class IEClassHeteroExecutableNetworkGetMetricTest : + public BehaviorTestsUtils::IEExecutableNetworkTestBase, public BehaviorTestsUtils::IEClassNetworkTest, public ::testing::WithParamInterface { protected: - std::string deviceName; std::string heteroDeviceName; public: void SetUp() override { + target_device = GetParam(); + heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU; SKIP_IF_CURRENT_TEST_IS_DISABLED(); + ov::test::behavior::APIBaseTest::SetUp(); IEClassNetworkTest::SetUp(); - deviceName = GetParam(); - heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") + CommonTestUtils::DEVICE_CPU; } }; @@ -70,13 +73,14 @@ public: // ImportExportNetwork // -using IEClassImportExportTestP = BehaviorTestsUtils::IEClassBaseTestP; +using IEClassGetMetricP = BehaviorTestsUtils::IEExecNetClassBaseTestP; +using IEClassImportExportTestP = IEClassGetMetricP; TEST_P(IEClassImportExportTestP, smoke_ImportNetworkThrowsIfNoDeviceName) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); std::stringstream strm; InferenceEngine::ExecutableNetwork executableNetwork; - ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName)); + ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device)); ASSERT_NO_THROW(executableNetwork.Export(strm)); IE_SUPPRESS_DEPRECATED_START @@ -88,9 +92,9 @@ TEST_P(IEClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); std::stringstream strm; InferenceEngine::ExecutableNetwork executableNetwork; - ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName)); + ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device)); ASSERT_NO_THROW(executableNetwork.Export(strm)); - ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName)); + ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device)); ASSERT_NO_THROW(executableNetwork.CreateInferRequest()); } @@ -99,27 +103,28 @@ TEST_P(IEClassImportExportTestP, smoke_ExportUsingFileNameImportFromStreamNoThro InferenceEngine::ExecutableNetwork executableNetwork; std::string fileName{"ExportedNetwork"}; { - ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName)); + ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device)); ASSERT_NO_THROW(executableNetwork.Export(fileName)); } { { std::ifstream strm(fileName, std::ifstream::binary | std::ifstream::in); - ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName)); + ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device)); } ASSERT_EQ(0, remove(fileName.c_str())); } ASSERT_NO_THROW(executableNetwork.CreateInferRequest()); + CommonTestUtils::removeFile(fileName); } -using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkSetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; -using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; +using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClassGetMetricP; +using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassGetMetricP; +using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassGetMetricP; +using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = IEClassGetMetricP; +using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = IEClassGetMetricP; +using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP; +using IEClassExecutableNetworkSetConfigTest = IEClassGetMetricP; +using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP; // // ExecutableNetwork GetMetric / GetConfig @@ -131,7 +136,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector configValues = p; @@ -149,7 +154,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); std::vector metricValues = p; @@ -167,7 +172,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(NETWORK_NAME))); std::string networkname = p; @@ -181,7 +186,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))); unsigned int value = p; @@ -195,7 +200,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), InferenceEngine::Exception); } @@ -204,14 +209,14 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector configValues = p; for (auto &&confKey : configValues) { InferenceEngine::Parameter defaultValue; - ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); + ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey)); ASSERT_FALSE(defaultValue.empty()); } } @@ -220,7 +225,7 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), InferenceEngine::Exception); } @@ -229,7 +234,7 @@ TEST_P(IEClassExecutableNetworkSetConfigTest, SetConfigThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), InferenceEngine::Exception); } @@ -238,7 +243,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(exeNetwork.SetConfig({{configKey, configValue}})); ASSERT_NO_THROW(p = exeNetwork.GetConfig(configKey)); @@ -249,7 +254,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) { TEST_P(IEClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception); } @@ -258,10 +263,10 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector devConfigValues = p; - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector execConfigValues = p; @@ -280,14 +285,14 @@ using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClas using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassHeteroExecutableNetworkGetMetricTest; using IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassHeteroExecutableNetworkGetMetricTest; using IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = IEClassHeteroExecutableNetworkGetMetricTest; -using IEClassExecutableNetworkGetMetricTest = BehaviorTestsUtils::IEClassBaseTestP; +using IEClassExecutableNetworkGetMetricTest = IEClassGetMetricP; TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter pHetero, pDevice; InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); - InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device); ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); @@ -320,7 +325,7 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN InferenceEngine::Parameter pHetero, pDevice; InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); - InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device); ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); @@ -369,13 +374,13 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoT InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - setHeteroNetworkAffinity(deviceName); + setHeteroNetworkAffinity(target_device); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); ASSERT_NO_THROW(p = exeNetwork.GetConfig("TARGET_FALLBACK")); std::string targets = p; - auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU; + auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU; std::cout << "Exe network fallback targets: " << targets << std::endl; ASSERT_EQ(expectedTargets, targets); diff --git a/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp b/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp index e857f7339cf..a79102f8e65 100644 --- a/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp @@ -5,9 +5,11 @@ #pragma once #include "common_test_utils/test_common.hpp" +#include "common_test_utils/file_utils.hpp" #include "functional_test_utils/plugin_cache.hpp" #include "ngraph_functions/subgraph_builders.hpp" -#include "common_test_utils/file_utils.hpp" + +#include "base/behavior_test_utils.hpp" namespace BehaviorTestsDefinitions { @@ -16,13 +18,12 @@ typedef std::tuple< std::string> // Target device name LocaleParams; -class CustomLocaleTest : public CommonTestUtils::TestsCommon, +class CustomLocaleTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase, public ::testing::WithParamInterface { protected: std::shared_ptr function; std::string localeName; std::string testName; - std::string deviceName; void SetUp() override; public: diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/callback.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/callback.hpp index 13a33aeb988..c34ed68e01d 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/callback.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/callback.hpp @@ -118,7 +118,7 @@ TEST_P(InferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooS function = SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38); cnnNet = InferenceEngine::CNNNetwork(function); // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest InferenceEngine::InferRequest req; ASSERT_NO_THROW(req = execNet.CreateInferRequest()); @@ -145,7 +145,7 @@ TEST_P(InferRequestCallbackTests, ImplDoseNotCopyCallback) { // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() InferenceEngine::CNNNetwork cnnNet(function); - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); auto req = execNet.CreateInferRequest(); { auto somePtr = std::make_shared(42); diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/cancellation.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/cancellation.hpp index 905b8a33025..c393a8e9958 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/cancellation.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/cancellation.hpp @@ -10,18 +10,7 @@ namespace BehaviorTestsDefinitions { -class InferRequestCancellationTests : public BehaviorTestsUtils::InferRequestTests { -public: - void SetUp() override { - // Skip test according to plugin specific disabledTestPatterns() (if any) - SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice, {1, 3, 640, 640}); - cnnNet = InferenceEngine::CNNNetwork(function); - // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); - } -}; +using InferRequestCancellationTests = BehaviorTestsUtils::InferRequestTests; TEST_P(InferRequestCancellationTests, canCancelAsyncRequest) { // Create InferRequest diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/config.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/config.hpp index cb579ce0364..558bdab4c21 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/config.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/config.hpp @@ -18,35 +18,37 @@ typedef std::tuple< > InferRequestParams; class InferRequestConfigTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEInferRequestTestBase { public: void SetUp() override { + std::tie(streamExecutorNumber, target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(streamExecutorNumber, targetDevice, configuration) = this->GetParam(); + APIBaseTest::SetUp(); // Create CNNNetwork from ngrpah::Function - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); cnnNet = InferenceEngine::CNNNetwork(function); } - static std::string getTestCaseName(testing::TestParamInfo obj) { - std::string targetDevice; - size_t streamExecutorNumber; - std::map configuration; - std::tie(streamExecutorNumber, targetDevice, configuration) = obj.param; - std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; - result << "streamExecutorNumber=" << targetDevice << "_"; - if (!configuration.empty()) { - result << "config=" << configuration; - } - return result.str(); - } - void TearDown() override { if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::TearDown(); + } + + static std::string getTestCaseName(testing::TestParamInfo obj) { + std::string target_device; + size_t streamExecutorNumber; + std::map configuration; + std::tie(streamExecutorNumber, target_device, configuration) = obj.param; + std::ostringstream result; + result << "target_device=" << target_device << "_"; + result << "streamExecutorNumber=" << target_device << "_"; + if (!configuration.empty()) { + result << "config=" << configuration; + } + return result.str(); } protected: @@ -54,20 +56,22 @@ protected: InferenceEngine::ExecutableNetwork execNet; std::shared_ptr ie = PluginCache::get().ie(); std::shared_ptr function; - std::string targetDevice; std::map configuration; size_t streamExecutorNumber; + void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ie_infer_request; } + inline InferenceEngine::InferRequest createInferRequestWithConfig() { // Load config configuration.insert({CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(YES)}); - if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { - ie->SetConfig(configuration, targetDevice); + if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) { + ie->SetConfig(configuration, target_device); } // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); auto req = execNet.CreateInferRequest(); return req; } @@ -76,9 +80,10 @@ protected: TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) { ASSERT_EQ(0ul, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_NO_THROW(createInferRequestWithConfig()); - if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { + if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) { ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber()); } } @@ -86,9 +91,10 @@ TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) { TEST_P(InferRequestConfigTest, withoutExclusiveAsyncRequests) { ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_NO_THROW(createInferRequestWithConfig()); - if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { + if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) { ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber()); } } @@ -101,20 +107,21 @@ TEST_P(InferRequestConfigTest, ReusableCPUStreamsExecutor) { // Load config std::map config = {{CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(NO)}}; config.insert(configuration.begin(), configuration.end()); - if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { - ASSERT_NO_THROW(ie->SetConfig(config, targetDevice)); + if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) { + ASSERT_NO_THROW(ie->SetConfig(config, target_device)); } // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, config); + execNet = ie->LoadNetwork(cnnNet, target_device, config); execNet.CreateInferRequest(); - if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) || - (targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) { + if ((target_device == CommonTestUtils::DEVICE_MYRIAD) || + (target_device == CommonTestUtils::DEVICE_KEEMBAY)) { ASSERT_EQ(1u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(0u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber()); - } else if ((targetDevice == CommonTestUtils::DEVICE_AUTO) || - (targetDevice == CommonTestUtils::DEVICE_MULTI)) { + } else if ((target_device == CommonTestUtils::DEVICE_AUTO) || + (target_device == CommonTestUtils::DEVICE_MULTI)) { } else { ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_GE(2u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber()); diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/dynamic_batch.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/dynamic_batch.hpp index a89cfa4b546..0c8cb974bb2 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/dynamic_batch.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/dynamic_batch.hpp @@ -21,7 +21,7 @@ typedef std::tuple< > dynamicBatchTestParams; class DynamicBatchTest : virtual public LayerTestsUtils::LayerTestsCommon, - public testing::WithParamInterface { + public testing::WithParamInterface { private: bool run_async = false; size_t max_batch_size = 0; diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/io_blob.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/io_blob.hpp index ce636069130..9d3e0217cf4 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/io_blob.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/io_blob.hpp @@ -11,8 +11,8 @@ #include "shared_test_classes/subgraph/basic_lstm.hpp" namespace BehaviorTestsDefinitions { -using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests; using namespace CommonTestUtils; +using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests; TEST_P(InferRequestIOBBlobTest, CanCreateInferRequest) { // Create InferRequest @@ -331,16 +331,25 @@ TEST_P(InferRequestIOBBlobTest, canInferWithGetOut) { ASSERT_NO_THROW(InferenceEngine::Blob::Ptr outputBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first)); } -class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasic { -public: - void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(netPrecision, targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); - cnnNet = InferenceEngine::CNNNetwork(function); - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); - } +class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasicBase, + public BehaviorTestsUtils::IEInferRequestTestBase { protected: + void SetUp() override { + std::tie(netPrecision, target_device, configuration) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); + cnnNet = InferenceEngine::CNNNetwork(function); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); + } + + void TearDown() override { + if (!configuration.empty()) { + PluginCache::get().reset(); + } + APIBaseTest::TearDown(); + } + InferenceEngine::ExecutableNetwork execNet; InferenceEngine::CNNNetwork cnnNet; }; @@ -386,16 +395,16 @@ typedef std::tuple< > InferRequestIOBBlobSetLayoutParams; class InferRequestIOBBlobSetLayoutTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public ov::test::behavior::APIBaseTest { public: static std::string getTestCaseName(testing::TestParamInfo obj) { InferenceEngine::Layout layout; - std::string targetDevice; + std::string target_device; std::map configuration; - std::tie(layout, targetDevice, configuration) = obj.param; + std::tie(layout, target_device, configuration) = obj.param; std::ostringstream result; result << "layout=" << layout << "_"; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { result << "config=" << configuration; } @@ -403,17 +412,18 @@ public: } void SetUp() override { + std::tie(layout, target_device, configuration) = this->GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(layout, targetDevice, configuration) = this->GetParam(); function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); } void TearDown() override { if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::SetUp(); } std::shared_ptr ie = PluginCache::get().ie(); @@ -421,7 +431,6 @@ public: InferenceEngine::Layout layout; InferenceEngine::CNNNetwork cnnNet; InferenceEngine::ExecutableNetwork execNet; - std::string targetDevice; std::map configuration; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/memory_states.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/memory_states.hpp index c8d42203692..98c10b7225d 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/memory_states.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/memory_states.hpp @@ -6,7 +6,7 @@ #include "common_test_utils/test_common.hpp" -#include +#include "base/behavior_test_utils.hpp" namespace BehaviorTestsDefinitions { typedef std::tuple< @@ -16,7 +16,7 @@ typedef std::tuple< std::map> // device configuration memoryStateParams; -class InferRequestVariableStateTest : public CommonTestUtils::TestsCommon, +class InferRequestVariableStateTest : public BehaviorTestsUtils::IEInferRequestTestBase, public testing::WithParamInterface { protected: InferenceEngine::CNNNetwork net; diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/perf_counters.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/perf_counters.hpp index 41b6a92f385..bb262254fe7 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/perf_counters.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/perf_counters.hpp @@ -10,15 +10,15 @@ namespace BehaviorTestsDefinitions { class InferRequestPerfCountersTest : public BehaviorTestsUtils::InferRequestTests { public: void SetUp() override { + std::tie(target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - ie = PluginCache::get().ie(targetDevice); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); cnnNet = InferenceEngine::CNNNetwork(function); configuration.insert({ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }); // Load CNNNetwork to target plugins - execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + execNet = ie->LoadNetwork(cnnNet, target_device, configuration); } }; diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/set_blob_by_type.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/set_blob_by_type.hpp index 7259658c4d1..d59541637bb 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/set_blob_by_type.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/set_blob_by_type.hpp @@ -19,13 +19,14 @@ using InferRequestSetBlobByTypeParams = std::tuple< >; class InferRequestSetBlobByType : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEInferRequestTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { FuncTestUtils::BlobType BlobType; std::string targetDevice; std::map configuration; std::tie(BlobType, targetDevice, configuration) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); std::ostringstream result; result << "BlobType=" << BlobType << "_"; @@ -35,14 +36,15 @@ public: } void SetUp() override { + std::map config; + std::tie(blobType, target_device, config) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::map config; - std::tie(blobType, targetDevice, config) = this->GetParam(); + APIBaseTest::SetUp(); std::shared_ptr function = ngraph::builder::subgraph::makeConvPoolRelu( {4, 3, 6, 8}, ngraph::element::Type_t::u8); InferenceEngine::CNNNetwork cnnNetwork(function); - executableNetwork = ie->LoadNetwork(cnnNetwork, targetDevice, config); + executableNetwork = ie->LoadNetwork(cnnNetwork, target_device, config); } protected: @@ -52,18 +54,18 @@ protected: return true; case FuncTestUtils::BlobType::Compound: case FuncTestUtils::BlobType::I420: -// case FuncTestUtils::BlobType::Remote: + case FuncTestUtils::BlobType::Remote: case FuncTestUtils::BlobType::NV12: return false; case FuncTestUtils::BlobType::Batched: { - std::vector supported_metrics = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_METRICS)); + std::vector supported_metrics = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)); if (std::find(supported_metrics.begin(), supported_metrics.end(), METRIC_KEY(OPTIMIZATION_CAPABILITIES)) == supported_metrics.end()) { return false; } std::vector optimization_caps = - ie->GetMetric(targetDevice, METRIC_KEY(OPTIMIZATION_CAPABILITIES)); + ie->GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES)); return std::find(optimization_caps.begin(), optimization_caps.end(), METRIC_VALUE(BATCHED_BLOB)) != optimization_caps.end(); } @@ -72,7 +74,6 @@ protected: } } - std::string targetDevice; FuncTestUtils::BlobType blobType; InferenceEngine::ExecutableNetwork executableNetwork; std::shared_ptr ie = PluginCache::get().ie(); diff --git a/src/tests/functional/plugin/shared/include/behavior/infer_request/set_io_blob_precision.hpp b/src/tests/functional/plugin/shared/include/behavior/infer_request/set_io_blob_precision.hpp index 7649c28cb33..4a0c8472c64 100644 --- a/src/tests/functional/plugin/shared/include/behavior/infer_request/set_io_blob_precision.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/infer_request/set_io_blob_precision.hpp @@ -23,7 +23,8 @@ using SetBlobParams = std::tuple; // Device name -class SetBlobTest : public testing::WithParamInterface, virtual public LayerTestsUtils::LayerTestsCommon { +class SetBlobTest : public testing::WithParamInterface, + virtual public LayerTestsUtils::LayerTestsCommon { public: static std::string getTestCaseName(testing::TestParamInfo obj); void Infer() override; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_graph_info.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_graph_info.hpp index 90b524a5165..944f973ecb4 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_graph_info.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_graph_info.hpp @@ -25,13 +25,14 @@ typedef std::tuple< > OVExecGraphImportExportTestParams; class OVExecGraphImportExportTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVCompiledNetworkTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { ov::element::Type_t elementType; std::string targetDevice; ov::AnyMap configuration; std::tie(elementType, targetDevice, configuration) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); std::ostringstream result; result << "targetDevice=" << targetDevice << "_"; result << "elementType=" << elementType << "_"; @@ -48,27 +49,28 @@ class OVExecGraphImportExportTest : public testing::WithParamInterfaceGetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(elementType, targetDevice, configuration) = this->GetParam(); + APIBaseTest::SetUp(); } void TearDown() override { if (!configuration.empty()) { utils::PluginCache::get().reset(); } + APIBaseTest::TearDown(); } protected: std::shared_ptr core = utils::PluginCache::get().core(); - std::string targetDevice; ov::AnyMap configuration; ov::element::Type_t elementType; std::shared_ptr function; }; TEST_P(OVExecGraphImportExportTest, importExportedFunction) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -96,12 +98,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) { ngraph::ParameterVector{param1, param2}); function->set_friendly_name("SingleRuLU"); } - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); std::stringstream strm; execNet.export_model(strm); - ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size()); EXPECT_THROW(importedExecNet.input(), ov::Exception); @@ -151,7 +153,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) { } TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -167,11 +169,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) { function->set_friendly_name("ParamResult"); } - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); std::stringstream strm; execNet.export_model(strm); - ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_NO_THROW(importedCompiledModel.input()); @@ -191,7 +193,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) { } TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -207,11 +209,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) { function->set_friendly_name("ConstResult"); } - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); std::stringstream strm; execNet.export_model(strm); - ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 0); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_THROW(importedCompiledModel.input(), ov::Exception); @@ -286,20 +288,20 @@ TEST_P(OVExecGraphImportExportTest, readFromV10IR) { EXPECT_NO_THROW(function->input("in1")); // remove if read_model does not change function names EXPECT_NO_THROW(function->output("round")); // remove if read_model does not change function names - ov::CompiledModel execNet = core->compile_model(function, targetDevice, configuration); + ov::CompiledModel execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(execNet.inputs().size(), 1); EXPECT_EQ(execNet.outputs().size(), 1); EXPECT_NO_THROW(execNet.input("in1")); EXPECT_NO_THROW(execNet.output("round")); - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } std::stringstream strm; execNet.export_model(strm); - ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration); EXPECT_EQ(importedExecNet.inputs().size(), 1); EXPECT_EQ(importedExecNet.outputs().size(), 1); EXPECT_NO_THROW(importedExecNet.input("in1")); @@ -327,7 +329,7 @@ static std::map any_copy(const ov::AnyMap& params) { } TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -356,12 +358,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) { ngraph::ParameterVector{param1, param2}); function->set_friendly_name("SingleReLU"); } - execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); + execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration)); std::stringstream strm; execNet.Export(strm); - ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size()); EXPECT_THROW(importedExecNet.input(), ov::Exception); @@ -392,7 +394,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) { } TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -410,7 +412,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) ngraph::ParameterVector{param}); function->set_friendly_name("ParamResult"); } - execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); + execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration)); auto inputPrecision = InferenceEngine::details::convertPrecision(execNet.GetInputsInfo().at("param")->getPrecision()); auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("param")->getPrecision()); @@ -418,7 +420,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) std::stringstream strm; execNet.Export(strm); - ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_NO_THROW(importedCompiledModel.input()); @@ -438,7 +440,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) } TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -456,14 +458,14 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) { ngraph::ParameterVector{}); function->set_friendly_name("ConstResult"); } - execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); + execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration)); auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("constant")->getPrecision()); std::stringstream strm; execNet.Export(strm); - ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); + ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration); EXPECT_EQ(function->inputs().size(), 0); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_THROW(importedCompiledModel.input(), ov::Exception); @@ -483,7 +485,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) { } TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) { - if (targetDevice == "MULTI" || targetDevice == "AUTO") { + if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) { GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; } @@ -512,12 +514,12 @@ TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) { ngraph::ParameterVector{param1, param2}); function->set_friendly_name("SingleReLU"); } - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); std::stringstream strm; execNet.export_model(strm); - InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, targetDevice, any_copy(configuration)); + InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, target_device, any_copy(configuration)); EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), importedExecNet.GetInputsInfo().size()); EXPECT_NO_THROW(importedExecNet.GetInputsInfo()["param1"]); diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_network_base.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_network_base.hpp index a7cc6529f6a..b648a6dee87 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_network_base.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/exec_network_base.hpp @@ -17,12 +17,14 @@ namespace test { namespace behavior { class OVExecutableNetworkBaseTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVCompiledNetworkTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { std::string targetDevice; ov::AnyMap configuration; std::tie(targetDevice, configuration) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); + std::ostringstream result; result << "targetDevice=" << targetDevice << "_"; if (!configuration.empty()) { @@ -36,16 +38,18 @@ public: } void SetUp() override { + std::tie(target_device, configuration) = this->GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); } void TearDown() override { if (!configuration.empty()) { utils::PluginCache::get().reset(); } + APIBaseTest::TearDown(); } bool compareTensors(const ov::Tensor& t1, const ov::Tensor& t2) { @@ -70,13 +74,14 @@ public: protected: std::shared_ptr core = utils::PluginCache::get().core(); - std::string targetDevice; ov::AnyMap configuration; std::shared_ptr function; + + void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ov_compiled_model; } }; TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) { - EXPECT_NO_THROW(auto execNet = core->compile_model(function, targetDevice, configuration)); + EXPECT_NO_THROW(auto execNet = core->compile_model(function, target_device, configuration)); } TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) { @@ -88,27 +93,27 @@ TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) { TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) { ov::AnyMap incorrectConfig = {{"abc", "def"}}; - EXPECT_ANY_THROW(auto execNet = core->compile_model(function, targetDevice, incorrectConfig)); + EXPECT_ANY_THROW(auto execNet = core->compile_model(function, target_device, incorrectConfig)); } TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(auto req = execNet.create_infer_request()); } TEST_P(OVExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); auto execGraph = execNet.get_runtime_model(); EXPECT_NE(execGraph, nullptr); } TEST_P(OVExecutableNetworkBaseTest, checkGetMetric) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(execNet.get_property(ov::supported_properties)); } TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); for (const auto& configItem : configuration) { ov::Any param; EXPECT_NO_THROW(param = execNet.get_property(configItem.first)); @@ -118,7 +123,7 @@ TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheck } TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) { - auto execNet = core->compile_model(function, targetDevice); + auto execNet = core->compile_model(function, target_device); std::map config; for (const auto& confItem : configuration) { config.emplace(confItem.first, confItem.second); @@ -127,7 +132,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) { } TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) { - auto execNet = core->compile_model(function, targetDevice); + auto execNet = core->compile_model(function, target_device); std::map incorrectConfig = {{"abc", "def"}}; std::map config; for (const auto& confItem : incorrectConfig) { @@ -137,7 +142,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) { } TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck) { - auto execNet = core->compile_model(function, targetDevice); + auto execNet = core->compile_model(function, target_device); std::map config; for (const auto& confItem : configuration) { config.emplace(confItem.first, confItem.second); @@ -154,7 +159,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck) TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { std::vector vec; for (auto i = 0; i < 2; i++) { - EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration))); + EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration))); EXPECT_NE(nullptr, function); } } @@ -162,24 +167,24 @@ TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) { std::vector vec; for (auto i = 0; i < 2; i++) { - EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration))); + EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration))); EXPECT_NE(nullptr, vec[i].get_runtime_model()); EXPECT_NE(vec.begin()->get_runtime_model(), vec[i].get_runtime_model()); } } TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfo) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(auto inInfo = execNet.inputs()); } TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfo) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(auto outInfo = execNet.outputs()); } TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); auto inputs = execNet.inputs(); std::vector paramVec; for (const auto& input : inputs) { @@ -193,7 +198,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { } TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); auto outputs = execNet.outputs(); std::vector resVec; for (const auto& out : outputs) { @@ -209,7 +214,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { std::shared_ptr execGraph; // Load CNNNetwork to target plugins - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(execGraph = execNet.get_runtime_model()); std::map originalLayersMap; for (const auto& layer : function->get_ops()) { @@ -259,7 +264,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) { std::shared_ptr execGraph; // Load CNNNetwork to target plugins - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); EXPECT_NO_THROW(execGraph = execNet.get_runtime_model()); std::map originalLayersMap; for (const auto& layer : function->get_ops()) { @@ -319,7 +324,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) { TEST_P(OVExecutableNetworkBaseTest, canExport) { auto ts = CommonTestUtils::GetTimestamp(); std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts; - auto execNet = core->compile_model(function, targetDevice, configuration); + auto execNet = core->compile_model(function, target_device, configuration); std::ofstream out(modelName, std::ios::out); EXPECT_NO_THROW(execNet.export_model(out)); out.close(); @@ -339,7 +344,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputFromFunctionWithSingleInput) { SKIP_IF_CURRENT_TEST_IS_DISABLED() ov::CompiledModel execNet; - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), execNet.inputs().size()); EXPECT_NO_THROW(execNet.input()); @@ -366,7 +371,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputFromFunctionWithSingleInput) { SKIP_IF_CURRENT_TEST_IS_DISABLED() ov::CompiledModel execNet; - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(function->outputs().size(), 1); EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_NO_THROW(execNet.output()); @@ -414,7 +419,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputsFromFunctionWithSeveralInputs) { ngraph::ParameterVector{param1, param2}); function->set_friendly_name("SimpleReLU"); } - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), execNet.inputs().size()); EXPECT_THROW(execNet.input(), ov::Exception); @@ -485,7 +490,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromFunctionWithSeveralOutputs) { ngraph::ParameterVector{param1, param2}); function->set_friendly_name("SimpleReLU"); } - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(function->outputs().size(), 2); EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_THROW(execNet.output(), ov::Exception); @@ -552,7 +557,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput std::make_shared(ngraph::ResultVector{result1, result2}, ngraph::ParameterVector{param1}); function->set_friendly_name("SingleSplit"); } - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); EXPECT_EQ(function->outputs().size(), 2); EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_THROW(execNet.output(), ov::Exception); @@ -599,7 +604,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput // Load correct network to Plugin to get executable network TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalFunction) { ov::CompiledModel execNet; - EXPECT_NO_THROW(execNet = core->compile_model(function, targetDevice, configuration)); + EXPECT_NO_THROW(execNet = core->compile_model(function, target_device, configuration)); EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size()); auto ref_parameter = function->get_parameters().back(); @@ -623,7 +628,7 @@ TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalIR) { ov::pass::Serialize(m_out_xml_path_1, m_out_bin_path_1).run_on_function(function); ov::CompiledModel execNet; - EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, targetDevice, configuration)); + EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, target_device, configuration)); CommonTestUtils::removeIRFiles(m_out_xml_path_1, m_out_bin_path_1); EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size()); @@ -645,7 +650,7 @@ TEST_P(OVExecutableNetworkBaseTest, getCompiledModelFromInferRequest) { ov::InferRequest req; { ov::CompiledModel compiled_model; - ASSERT_NO_THROW(compiled_model = core->compile_model(function, targetDevice, configuration)); + ASSERT_NO_THROW(compiled_model = core->compile_model(function, target_device, configuration)); ASSERT_NO_THROW(req = compiled_model.create_infer_request()); ASSERT_NO_THROW(req.infer()); } @@ -677,7 +682,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV10Model) { function->get_rt_info()["version"] = int64_t(10); function->set_friendly_name("SimpleReLU"); } - EXPECT_THROW(core->compile_model(function, targetDevice, configuration), ov::Exception); + EXPECT_THROW(core->compile_model(function, target_device, configuration), ov::Exception); } TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) { @@ -699,7 +704,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) { function->get_rt_info()["version"] = int64_t(11); function->set_friendly_name("SimpleReLU"); } - EXPECT_NO_THROW(core->compile_model(function, targetDevice, configuration)); + EXPECT_NO_THROW(core->compile_model(function, target_device, configuration)); } } // namespace behavior diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/get_metric.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/get_metric.hpp index 8d5962152cb..797d711e250 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/get_metric.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/get_metric.hpp @@ -27,30 +27,32 @@ namespace behavior { ASSERT_NE(properties.end(), it); \ } -using OVClassImportExportTestP = OVClassBaseTestP; -using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassBaseTestP; -using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVClassBaseTestP; -using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVClassBaseTestP; -using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVClassBaseTestP; -using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVClassBaseTestP; -using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP; -using OVClassExecutableNetworkSetConfigTest = OVClassBaseTestP; -using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP; +using OVCompiledModelClassBaseTest = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkImportExportTestP = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkSetConfigTest = OVCompiledModelClassBaseTestP; +using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP; class OVClassExecutableNetworkGetMetricTestForSpecificConfig : public OVClassNetworkTest, - public ::testing::WithParamInterface>> { + public ::testing::WithParamInterface>>, + public OVCompiledNetworkTestBase { protected: - std::string deviceName; std::string configKey; ov::Any configValue; public: void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - OVClassNetworkTest::SetUp(); - deviceName = std::get<0>(GetParam()); + target_device = std::get<0>(GetParam()); std::tie(configKey, configValue) = std::get<1>(GetParam()); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); + OVClassNetworkTest::SetUp(); } }; @@ -62,18 +64,17 @@ using OVClassExecutableNetworkUnsupportedConfigTest = OVClassExecutableNetworkGe // class OVClassHeteroExecutableNetworkGetMetricTest : public OVClassNetworkTest, - public ::testing::WithParamInterface { + public ::testing::WithParamInterface, + public OVCompiledNetworkTestBase { protected: - std::string deviceName; std::string heteroDeviceName; public: void SetUp() override { + target_device = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU;; SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); OVClassNetworkTest::SetUp(); - deviceName = GetParam(); - heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") + - CommonTestUtils::DEVICE_CPU; } }; using OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassHeteroExecutableNetworkGetMetricTest; @@ -85,13 +86,13 @@ using OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = OVClassHeter // ImportExportNetwork // -TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) { +TEST_P(OVClassExecutableNetworkImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) { ov::Core ie = createCoreWithTemplate(); std::stringstream strm; ov::CompiledModel executableNetwork; - OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, deviceName)); + OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, target_device)); OV_ASSERT_NO_THROW(executableNetwork.export_model(strm)); - OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, deviceName)); + OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, target_device)); OV_ASSERT_NO_THROW(executableNetwork.create_infer_request()); } @@ -101,7 +102,7 @@ TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) { TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); std::vector supported_properties; OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); @@ -118,7 +119,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); std::vector supported_properties; OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); @@ -135,7 +136,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); std::string model_name; OV_ASSERT_NO_THROW(model_name = compiled_model.get_property(ov::model_name)); @@ -148,7 +149,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); unsigned int value = 0; OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::optimal_number_of_infer_requests)); @@ -159,7 +160,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G } TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration); + auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration); ov::hint::Priority value; OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::hint::model_priority)); @@ -168,7 +169,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration); + auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration); std::string value; OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::device::priorities)); @@ -178,7 +179,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow) TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception); } @@ -186,7 +187,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); std::vector property_names; OV_ASSERT_NO_THROW(property_names = compiled_model.get_property(ov::supported_properties)); @@ -202,7 +203,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) { ov::Core ie = createCoreWithTemplate(); ov::Any p; - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception); } @@ -210,7 +211,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) { TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); ASSERT_THROW(compiled_model.set_property({{"unsupported_config", "some_value"}}), ov::Exception); } @@ -219,7 +220,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) { ov::Core ie = createCoreWithTemplate(); ov::Any p; - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); OV_ASSERT_NO_THROW(compiled_model.set_property({{configKey, configValue}})); OV_ASSERT_NO_THROW(p = compiled_model.get_property(configKey)); ASSERT_EQ(p, configValue); @@ -228,7 +229,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) { TEST_P(OVClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) { ov::Core ie = createCoreWithTemplate(); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); ASSERT_THROW(compiled_model.set_property({{configKey, configValue}}), ov::Exception); } @@ -237,9 +238,9 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector dev_property_names; - OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(target_device, ov::supported_properties)); - auto compiled_model = ie.compile_model(simpleNetwork, deviceName); + auto compiled_model = ie.compile_model(simpleNetwork, target_device); std::vector model_property_names; OV_ASSERT_NO_THROW(model_property_names = compiled_model.get_property(ov::supported_properties)); @@ -249,7 +250,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMet ov::Core ie = createCoreWithTemplate(); auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName); - auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName); + auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device); std::vector heteroConfigValues, deviceConfigValues; OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties)); @@ -285,7 +286,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN ov::Core ie = createCoreWithTemplate(); auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName); - auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName); + auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device); std::vector heteroConfigValues, deviceConfigValues; OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties)); @@ -331,13 +332,13 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThro TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoThrow) { ov::Core ie = createCoreWithTemplate(); - setHeteroNetworkAffinity(deviceName); + setHeteroNetworkAffinity(target_device); auto compiled_model = ie.compile_model(actualNetwork, heteroDeviceName); std::string targets; OV_ASSERT_NO_THROW(targets = compiled_model.get_property(ov::device::priorities)); - auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU; + auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU; std::cout << "Compiled model fallback targets: " << targets << std::endl; ASSERT_EQ(expectedTargets, targets); diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/properties.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/properties.hpp index a30b73b6d74..b35540924a2 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/properties.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_executable_network/properties.hpp @@ -16,31 +16,27 @@ namespace ov { namespace test { namespace behavior { -class OVCompiledModelPropertiesBase : public CommonTestUtils::TestsCommon { +class OVCompiledModelPropertiesBase : public OVCompiledNetworkTestBase { public: std::shared_ptr core = utils::PluginCache::get().core(); std::shared_ptr model; - std::string device_name; AnyMap properties; }; class OVCompiledModelEmptyPropertiesTests : public testing::WithParamInterface, - public OVCompiledModelPropertiesBase { + public OVCompiledModelPropertiesBase { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; }; using PropertiesParams = std::tuple; class OVCompiledModelPropertiesTests : public testing::WithParamInterface, - public OVCompiledModelPropertiesBase { + public OVCompiledModelPropertiesBase { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; - void TearDown() override; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/batched_tensors.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/batched_tensors.hpp index e2e96c4c1c9..f30bb61f76c 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/batched_tensors.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/batched_tensors.hpp @@ -14,13 +14,12 @@ namespace test { namespace behavior { class OVInferRequestBatchedTests : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVInferRequestTestBase { public: static std::string getTestCaseName(const testing::TestParamInfo& device_name); protected: void SetUp() override; - void TearDown() override; static std::string generateCacheDirName(const std::string& test_name); @@ -28,7 +27,6 @@ protected: const PartialShape& shape, const ov::Layout& layout); std::shared_ptr ie = utils::PluginCache::get().core(); - std::string targetDevice; std::string m_cache_dir; // internal member bool m_need_reset_core = false; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/callback.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/callback.hpp index 3f79aeffbcc..0c1b35fe2d9 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/callback.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/callback.hpp @@ -4,14 +4,127 @@ #pragma once +#include #include "base/ov_behavior_test_utils.hpp" +#include "shared_test_classes/subgraph/basic_lstm.hpp" namespace ov { namespace test { namespace behavior { -struct OVInferRequestCallbackTests : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); -}; +using OVInferRequestCallbackTests = OVInferRequestTests; + +TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + bool is_called = false; + OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) { + // HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE + ASSERT_EQ(exception_ptr, nullptr); + is_called = true; + })); + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.wait()); + ASSERT_TRUE(is_called); +} + +TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + bool is_called = false; + req.set_callback([&] (std::exception_ptr exception_ptr) { + ASSERT_EQ(nullptr, exception_ptr); + is_called = true; + }); + req.infer(); + ASSERT_FALSE(is_called); +} + +// test that can wait all callbacks on dtor +TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) { + const int NUM_ITER = 10; + struct TestUserData { + std::atomic numIter = {0}; + std::promise promise; + }; + TestUserData data; + + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) { + if (exception_ptr) { + data.promise.set_exception(exception_ptr); + } else { + if (data.numIter.fetch_add(1) != NUM_ITER) { + req.start_async(); + } else { + data.promise.set_value(true); + } + } + })); + auto future = data.promise.get_future(); + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.wait()); + future.wait(); + auto callbackStatus = future.get(); + ASSERT_TRUE(callbackStatus); + auto dataNumIter = data.numIter - 1; + ASSERT_EQ(NUM_ITER, dataNumIter); +} + +TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) { + OPENVINO_UNREACHABLE("Throw"); + })); + OV_ASSERT_NO_THROW(req.start_async()); + ASSERT_THROW(req.wait(), ov::Exception); +} + +TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) { + // GetNetwork(3000, 380) make inference around 20ms on GNA SW + // so increases chances for getting RESULT_NOT_READY + OV_ASSERT_NO_THROW(execNet = core->compile_model( + SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), target_device, configuration)); + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + std::promise callbackTimeStamp; + auto callbackTimeStampFuture = callbackTimeStamp.get_future(); + // add a callback to the request and capture the timestamp + OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) { + if (exception_ptr) { + callbackTimeStamp.set_exception(exception_ptr); + } else { + callbackTimeStamp.set_value(std::chrono::system_clock::now()); + } + })); + OV_ASSERT_NO_THROW(req.start_async()); + bool ready = false; + OV_ASSERT_NO_THROW(ready = req.wait_for({})); + // get timestamp taken AFTER return from the wait(STATUS_ONLY) + const auto afterWaitTimeStamp = std::chrono::system_clock::now(); + // IF the callback timestamp is larger than the afterWaitTimeStamp + // then we should observe false ready result + if (afterWaitTimeStamp < callbackTimeStampFuture.get()) { + ASSERT_FALSE(ready); + } + OV_ASSERT_NO_THROW(req.wait()); +} + +TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + { + auto somePtr = std::make_shared(42); + OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) { + ASSERT_EQ(nullptr, exception_ptr); + ASSERT_EQ(1, somePtr.use_count()); + })); + } + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.wait()); +} + } // namespace behavior } // namespace test } // namespace ov \ No newline at end of file diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/cancellation.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/cancellation.hpp index 16d1f7e8f98..533099fd982 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/cancellation.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/cancellation.hpp @@ -6,14 +6,60 @@ #include +#include "openvino/runtime/exception.hpp" + #include "base/ov_behavior_test_utils.hpp" namespace ov { namespace test { namespace behavior { -struct OVInferRequestCancellationTests : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); -}; +using OVInferRequestCancellationTests = OVInferRequestTests; + +TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.cancel()); + try { + req.wait(); + } catch (const ov::Cancelled&) { + SUCCEED(); + } +} + +TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.cancel()); + try { + req.wait(); + } catch (const ov::Cancelled&) { + SUCCEED(); + } + OV_ASSERT_NO_THROW(req.start_async()); + OV_ASSERT_NO_THROW(req.wait()); +} + +TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req.cancel()); +} + +TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) { + ov::InferRequest req; + OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); + auto infer = std::async(std::launch::async, [&req]{req.infer();}); + while (!req.wait_for({})) { + } + OV_ASSERT_NO_THROW(req.cancel()); + try { + infer.get(); + } catch (const ov::Cancelled&) { + SUCCEED(); + } +} } // namespace behavior } // namespace test } // namespace ov diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/infer_request_dynamic.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/infer_request_dynamic.hpp index b00276342ec..d852982b3aa 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/infer_request_dynamic.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/infer_request_dynamic.hpp @@ -36,26 +36,23 @@ namespace test { namespace behavior { using OVInferRequestDynamicParams = std::tuple< - std::shared_ptr, // ov Model + std::shared_ptr, // ov Model std::vector, std::vector>>, // input/expected output shapes per inference std::string, // Device name - ov::AnyMap // Config + ov::AnyMap // Config >; class OVInferRequestDynamicTests : public testing::WithParamInterface, - virtual public ov::test::SubgraphBaseTest { + public OVInferRequestTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj); protected: void SetUp() override; - - void TearDown() override; bool checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual); std::shared_ptr ie = utils::PluginCache::get().core(); std::shared_ptr function; - std::string targetDevice; ov::AnyMap configuration; std::vector, std::vector>> inOutShapes; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/inference.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/inference.hpp index 8dfb6cca28a..929ce1472c5 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/inference.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/inference.hpp @@ -71,21 +71,18 @@ inline OVInferReqInferParam roi_1d() { } // namespace tensor_roi class OVInferRequestInferenceTests : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVInferRequestTestBase { public: static std::string getTestCaseName(const testing::TestParamInfo& device_name); protected: void SetUp() override; - void TearDown() override; - static std::shared_ptr create_n_inputs(size_t num, element::Type type, const PartialShape& shape); std::shared_ptr ie = utils::PluginCache::get().core(); OVInferReqInferParam m_param; - std::string m_device_name; }; } // namespace behavior diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/io_tensor.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/io_tensor.hpp index c8126be6035..73af51f5fec 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/io_tensor.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/io_tensor.hpp @@ -15,7 +15,6 @@ namespace test { namespace behavior { struct OVInferRequestIOTensorTest : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); void SetUp() override; void TearDown() override; ov::InferRequest req; @@ -29,7 +28,7 @@ using OVInferRequestSetPrecisionParams = std::tuple< ov::AnyMap // Config >; struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVInferRequestTestBase { static std::string getTestCaseName(const testing::TestParamInfo& obj); void SetUp() override; void TearDown() override; @@ -37,7 +36,6 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa std::shared_ptr function; ov::CompiledModel execNet; ov::InferRequest req; - std::string target_device; ov::AnyMap config; element::Type element_type; }; @@ -45,7 +43,7 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa using OVInferRequestCheckTensorPrecisionParams = OVInferRequestSetPrecisionParams; struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public OVInferRequestTestBase { static std::string getTestCaseName(const testing::TestParamInfo& obj); void SetUp() override; void TearDown() override; @@ -56,8 +54,7 @@ struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface + #include "base/ov_behavior_test_utils.hpp" namespace ov { namespace test { namespace behavior { -struct OVInferRequestMultithreadingTests : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); -}; +using OVInferRequestMultithreadingTests = OVInferRequestTests; + +TEST_P(OVInferRequestMultithreadingTests, canRun3SyncRequestsConsistentlyFromThreads) { + ov::InferRequest req1, req2, req3; + OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); + + auto f1 = std::async(std::launch::async, [&] { req1.infer(); }); + auto f2 = std::async(std::launch::async, [&] { req2.infer(); }); + auto f3 = std::async(std::launch::async, [&] { req3.infer(); }); + + f1.wait(); + f2.wait(); + f3.wait(); + + OV_ASSERT_NO_THROW(f1.get()); + OV_ASSERT_NO_THROW(f2.get()); + OV_ASSERT_NO_THROW(f3.get()); +} + +TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyFromThreadsWithoutWait) { + ov::InferRequest req1, req2, req3; + OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); + + OV_ASSERT_NO_THROW(req1.infer()); + OV_ASSERT_NO_THROW(req2.infer()); + OV_ASSERT_NO_THROW(req3.infer()); + + auto f1 = std::async(std::launch::async, [&] { req1.start_async(); }); + auto f2 = std::async(std::launch::async, [&] { req2.start_async(); }); + auto f3 = std::async(std::launch::async, [&] { req3.start_async(); }); + + f1.wait(); + f2.wait(); + f3.wait(); + + OV_ASSERT_NO_THROW(f1.get()); + OV_ASSERT_NO_THROW(f2.get()); + OV_ASSERT_NO_THROW(f3.get()); +} + +TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyWithWait) { + ov::InferRequest req1, req2, req3; + OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); + + req1.start_async(); + OV_ASSERT_NO_THROW(req1.wait()); + + req2.start_async(); + OV_ASSERT_NO_THROW(req2.wait()); + + req3.start_async(); + OV_ASSERT_NO_THROW(req3.wait()); +} + +TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsParallelWithWait) { + ov::InferRequest req1, req2, req3; + OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); + OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); + + req1.start_async(); + req2.start_async(); + req3.start_async(); + + OV_ASSERT_NO_THROW(req2.wait()); + OV_ASSERT_NO_THROW(req1.wait()); + OV_ASSERT_NO_THROW(req3.wait()); +} } // namespace behavior } // namespace test } // namespace ov diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/perf_counters.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/perf_counters.hpp index 232a7b48446..44adac66871 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/perf_counters.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/perf_counters.hpp @@ -9,8 +9,7 @@ namespace ov { namespace test { namespace behavior { -struct OVInferRequestPerfCountersTest : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); +struct OVInferRequestPerfCountersTest : public virtual OVInferRequestTests { void SetUp() override; ov::InferRequest req; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/wait.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/wait.hpp index e5adf8bf2b1..da918b700ca 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/wait.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_infer_request/wait.hpp @@ -10,7 +10,6 @@ namespace ov { namespace test { namespace behavior { struct OVInferRequestWaitTests : public OVInferRequestTests { - static std::string getTestCaseName(const testing::TestParamInfo& obj); void SetUp() override; void TearDown() override; ov::InferRequest req; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/caching_tests.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/caching_tests.hpp index 6f02b717815..9dbcfde4917 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/caching_tests.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/caching_tests.hpp @@ -13,6 +13,7 @@ #include "functional_test_utils/plugin_cache.hpp" #include "common_test_utils/unicode_utils.hpp" #include "openvino/util/common_util.hpp" +#include "base/ov_behavior_test_utils.hpp" #include #include @@ -33,7 +34,8 @@ using compileModelCacheParams = std::tuple< >; class CompileModelCacheTestBase : public testing::WithParamInterface, - virtual public SubgraphBaseTest { + virtual public SubgraphBaseTest, + virtual public OVPluginTestBase { std::string m_cacheFolderName; std::string m_functionName; ov::element::Type m_precision; @@ -52,35 +54,21 @@ public: }; using compileKernelsCacheParams = std::tuple< - std::string, // device name + std::string, // device name std::pair // device and cache configuration >; class CompiledKernelsCacheTest : virtual public SubgraphBaseTest, + virtual public OVPluginTestBase, public testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); protected: std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); - std::shared_ptr function; std::string cache_path; std::vector m_extList; - void SetUp() override { - function = ngraph::builder::subgraph::makeConvPoolRelu(); - std::pair userConfig; - std::tie(targetDevice, userConfig) = GetParam(); - configuration = userConfig.first; - std::string ext = userConfig.second; - std::string::size_type pos = 0; - if ((pos = ext.find(",", pos)) != std::string::npos) { - m_extList.push_back(ext.substr(0, pos)); - m_extList.push_back(ext.substr(pos + 1)); - } else { - m_extList.push_back(ext); - } - std::replace(test_name.begin(), test_name.end(), '/', '_'); - std::replace(test_name.begin(), test_name.end(), '\\', '_'); - cache_path = "compiledModel" + test_name + "_cache"; - } + + void SetUp() override; + void TearDown() override; }; } // namespace behavior } // namespace test diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp index 6ede5ace7e1..462ec0a640b 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/core_integration.hpp @@ -25,21 +25,23 @@ namespace behavior { #define OV_ASSERT_PROPERTY_SUPPORTED(property_key) \ { \ - auto properties = ie.get_property(deviceName, ov::supported_properties); \ + auto properties = ie.get_property(target_device, ov::supported_properties); \ auto it = std::find(properties.begin(), properties.end(), property_key); \ ASSERT_NE(properties.end(), it); \ } -class OVClassBasicTestP : public ::testing::Test, public ::testing::WithParamInterface> { +class OVClassBasicTestP : public OVPluginTestBase, + public ::testing::WithParamInterface> { protected: std::string deviceName; std::string pluginName; public: void SetUp() override { + std::tie(pluginName, target_device) = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(pluginName, deviceName) = GetParam(); + APIBaseTest::SetUp(); pluginName += IE_BUILD_POSTFIX; if (pluginName == (std::string("openvino_template_plugin") + IE_BUILD_POSTFIX)) { pluginName = ov::util::make_plugin_library_name(CommonTestUtils::getExecutableDirectory(), pluginName); @@ -47,14 +49,17 @@ public: } }; -class OVClassSetDefaultDeviceIDTest : public ::testing::Test, +class OVClassSetDefaultDeviceIDTest : public OVPluginTestBase, public ::testing::WithParamInterface> { protected: std::string deviceName; std::string deviceID; + public: void SetUp() override { - std::tie(deviceName, deviceID) = GetParam(); + std::tie(target_device, deviceID) = GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); } }; @@ -63,7 +68,8 @@ using DevicePriorityParams = std::tuple< ov::AnyMap // Configuration key and its default value >; -class OVClassSetDevicePriorityConfigTest : public ::testing::Test, public ::testing::WithParamInterface { +class OVClassSetDevicePriorityConfigTest : public OVPluginTestBase, + public ::testing::WithParamInterface { protected: std::string deviceName; ov::AnyMap configuration; @@ -71,8 +77,9 @@ protected: public: void SetUp() override { + std::tie(target_device, configuration) = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(deviceName, configuration) = GetParam(); + APIBaseTest::SetUp(); actualNetwork = ngraph::builder::subgraph::makeSplitConvConcat(); } }; @@ -108,28 +115,34 @@ using OVClassLoadNetworkWithCorrectPropertiesTest = OVClassSetDevicePriorityConf using OVClassLoadNetworkWithDefaultPropertiesTest = OVClassSetDevicePriorityConfigTest; using OVClassLoadNetworkWithDefaultIncorrectPropertiesTest = OVClassSetDevicePriorityConfigTest; -class OVClassSeveralDevicesTest : public OVClassNetworkTest, +class OVClassSeveralDevicesTest : public OVPluginTestBase, + public OVClassNetworkTest, public ::testing::WithParamInterface> { public: - std::vector deviceNames; + std::vector target_devices; + void SetUp() override { + target_device = CommonTestUtils::DEVICE_MULTI; + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); OVClassNetworkTest::SetUp(); - deviceNames = GetParam(); + target_devices = GetParam(); } }; + using OVClassSeveralDevicesTestLoadNetwork = OVClassSeveralDevicesTest; using OVClassSeveralDevicesTestQueryNetwork = OVClassSeveralDevicesTest; using OVClassSeveralDevicesTestDefaultCore = OVClassSeveralDevicesTest; -inline bool supportsAvaliableDevices(ov::Core& ie, const std::string& deviceName) { - auto supported_properties = ie.get_property(deviceName, ov::supported_properties); +inline bool supportsAvaliableDevices(ov::Core& ie, const std::string& target_device) { + auto supported_properties = ie.get_property(target_device, ov::supported_properties); return supported_properties.end() != std::find(std::begin(supported_properties), std::end(supported_properties), ov::available_devices); } -bool supportsDeviceID(ov::Core& ie, const std::string& deviceName) { +bool supportsDeviceID(ov::Core& ie, const std::string& target_device) { auto supported_properties = - ie.get_property(deviceName, ov::supported_properties); + ie.get_property(target_device, ov::supported_properties); return supported_properties.end() != std::find(std::begin(supported_properties), std::end(supported_properties), ov::device::id); } @@ -140,7 +153,7 @@ TEST(OVClassBasicTest, smoke_createDefault) { TEST_P(OVClassBasicTestP, registerExistingPluginThrows) { ov::Core ie = createCoreWithTemplate(); - ASSERT_THROW(ie.register_plugin(pluginName, deviceName), ov::Exception); + ASSERT_THROW(ie.register_plugin(pluginName, target_device), ov::Exception); } // TODO: CVS-68982 @@ -186,8 +199,7 @@ TEST(OVClassBasicTest, smoke_createMockEngineConfigThrows) { CommonTestUtils::removeFile(filename.c_str()); } -#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT - +#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPOR TEST_P(OVClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { const std::string pluginXML = getPluginFile(); @@ -211,7 +223,7 @@ TEST_P(OVClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { OV_ASSERT_NO_THROW(ie.register_plugins(::ov::util::wstring_to_string(pluginsXmlW))); CommonTestUtils::removeFile(pluginsXmlW); OV_ASSERT_NO_THROW(ie.get_versions("mock")); // from pluginXML - OV_ASSERT_NO_THROW(ie.get_versions(deviceName)); + OV_ASSERT_NO_THROW(ie.get_versions(target_device)); GTEST_COUT << "Plugin created " << testIndex << std::endl; OV_ASSERT_NO_THROW(ie.register_plugin(pluginName, "TEST_DEVICE")); @@ -237,17 +249,17 @@ TEST_P(OVClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { TEST_P(OVClassBasicTestP, getVersionsByExactDeviceNoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.get_versions(deviceName + ".0")); + OV_ASSERT_NO_THROW(ie.get_versions(target_device + ".0")); } TEST_P(OVClassBasicTestP, getVersionsByDeviceClassNoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.get_versions(deviceName)); + OV_ASSERT_NO_THROW(ie.get_versions(target_device)); } TEST_P(OVClassBasicTestP, getVersionsNonEmpty) { ov::Core ie = createCoreWithTemplate(); - ASSERT_EQ(2, ie.get_versions(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName).size()); + ASSERT_EQ(2, ie.get_versions(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device).size()); } // @@ -257,22 +269,22 @@ TEST_P(OVClassBasicTestP, getVersionsNonEmpty) { TEST_P(OVClassBasicTestP, unregisterExistingPluginNoThrow) { ov::Core ie = createCoreWithTemplate(); // device instance is not created yet - ASSERT_THROW(ie.unload_plugin(deviceName), ov::Exception); + ASSERT_THROW(ie.unload_plugin(target_device), ov::Exception); // make the first call to IE which created device instance - ie.get_versions(deviceName); + ie.get_versions(target_device); // now, we can unregister device - OV_ASSERT_NO_THROW(ie.unload_plugin(deviceName)); + OV_ASSERT_NO_THROW(ie.unload_plugin(target_device)); } TEST_P(OVClassBasicTestP, accessToUnregisteredPluginThrows) { ov::Core ie = createCoreWithTemplate(); - ASSERT_THROW(ie.unload_plugin(deviceName), ov::Exception); - OV_ASSERT_NO_THROW(ie.get_versions(deviceName)); - OV_ASSERT_NO_THROW(ie.unload_plugin(deviceName)); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::AnyMap{})); - OV_ASSERT_NO_THROW(ie.get_versions(deviceName)); - OV_ASSERT_NO_THROW(ie.unload_plugin(deviceName)); + ASSERT_THROW(ie.unload_plugin(target_device), ov::Exception); + OV_ASSERT_NO_THROW(ie.get_versions(target_device)); + OV_ASSERT_NO_THROW(ie.unload_plugin(target_device)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::AnyMap{})); + OV_ASSERT_NO_THROW(ie.get_versions(target_device)); + OV_ASSERT_NO_THROW(ie.unload_plugin(target_device)); } TEST(OVClassBasicTest, smoke_unregisterNonExistingPluginThrows) { @@ -287,7 +299,7 @@ TEST(OVClassBasicTest, smoke_unregisterNonExistingPluginThrows) { TEST_P(OVClassBasicTestP, SetConfigAllThrows) { ov::Core ie = createCoreWithTemplate(); OV_ASSERT_NO_THROW(ie.set_property({{"unsupported_key", "4"}})); - ASSERT_ANY_THROW(ie.get_versions(deviceName)); + ASSERT_ANY_THROW(ie.get_versions(target_device)); } TEST_P(OVClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { @@ -297,13 +309,13 @@ TEST_P(OVClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { TEST_P(OVClassBasicTestP, SetConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::enable_profiling(true))); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::enable_profiling(true))); } TEST_P(OVClassBasicTestP, SetConfigAllNoThrow) { ov::Core ie = createCoreWithTemplate(); OV_ASSERT_NO_THROW(ie.set_property(ov::enable_profiling(true))); - OV_ASSERT_NO_THROW(ie.get_versions(deviceName)); + OV_ASSERT_NO_THROW(ie.get_versions(target_device)); } TEST(OVClassBasicTest, smoke_SetConfigHeteroThrows) { @@ -313,20 +325,20 @@ TEST(OVClassBasicTest, smoke_SetConfigHeteroThrows) { TEST_P(OVClassBasicTestP, SetConfigHeteroTargetFallbackThrows) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName))); + OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device))); } TEST_P(OVClassBasicTestP, smoke_SetConfigHeteroNoThrow) { ov::Core ie = createCoreWithTemplate(); std::string value; - OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName))); + OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device))); OV_ASSERT_NO_THROW(value = ie.get_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities)); - ASSERT_EQ(deviceName, value); + ASSERT_EQ(target_device, value); - OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName))); + OV_ASSERT_NO_THROW(ie.set_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device))); OV_ASSERT_NO_THROW(value = ie.get_property(CommonTestUtils::DEVICE_HETERO, ov::device::priorities)); - ASSERT_EQ(deviceName, value); + ASSERT_EQ(target_device, value); } TEST(OVClassBasicTest, smoke_SetConfigAutoNoThrows) { @@ -348,23 +360,23 @@ TEST(OVClassBasicTest, smoke_SetConfigAutoNoThrows) { TEST_P(OVClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) { ov::Core ie = createCoreWithTemplate(); - std::string deviceID, clearDeviceName; - auto pos = deviceName.find('.'); + std::string deviceID, cleartarget_device; + auto pos = target_device.find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceName.substr(0, pos); - deviceID = deviceName.substr(pos + 1, deviceName.size()); + cleartarget_device = target_device.substr(0, pos); + deviceID = target_device.substr(pos + 1, target_device.size()); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - auto deviceIDs = ie.get_property(clearDeviceName, ov::available_devices); + auto deviceIDs = ie.get_property(cleartarget_device, ov::available_devices); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::enable_profiling(true))); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::enable_profiling(true))); bool value = false; - OV_ASSERT_NO_THROW(value = ie.get_property(deviceName, ov::enable_profiling)); + OV_ASSERT_NO_THROW(value = ie.get_property(target_device, ov::enable_profiling)); ASSERT_TRUE(value); } @@ -373,22 +385,22 @@ TEST_P(OVClassSetModelPriorityConfigTest, SetConfigNoThrow) { // priority config test ov::hint::Priority value; - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::LOW))); - OV_ASSERT_NO_THROW(value = ie.get_property(deviceName, ov::hint::model_priority)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::LOW))); + OV_ASSERT_NO_THROW(value = ie.get_property(target_device, ov::hint::model_priority)); EXPECT_EQ(value, ov::hint::Priority::LOW); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::MEDIUM))); - OV_ASSERT_NO_THROW(value = ie.get_property(deviceName, ov::hint::model_priority)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::MEDIUM))); + OV_ASSERT_NO_THROW(value = ie.get_property(target_device, ov::hint::model_priority)); EXPECT_EQ(value, ov::hint::Priority::MEDIUM); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::HIGH))); - OV_ASSERT_NO_THROW(value = ie.get_property(deviceName, ov::hint::model_priority)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::HIGH))); + OV_ASSERT_NO_THROW(value = ie.get_property(target_device, ov::hint::model_priority)); EXPECT_EQ(value, ov::hint::Priority::HIGH); } TEST_P(OVClassSetDevicePriorityConfigTest, SetConfigAndCheckGetConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); std::string devicePriority; - OV_ASSERT_NO_THROW(ie.set_property(deviceName, configuration)); - OV_ASSERT_NO_THROW(devicePriority = ie.get_property(deviceName, ov::device::priorities)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, configuration)); + OV_ASSERT_NO_THROW(devicePriority = ie.get_property(target_device, ov::device::priorities)); ASSERT_EQ(devicePriority, configuration[ov::device::priorities.name()].as()); } @@ -425,23 +437,23 @@ TEST_P(OVClassSetLogLevelConfigTest, SetConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); // log level ov::log::Level logValue; - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::NO))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::NO))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::NO); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::ERR))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::ERR))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::ERR); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::WARNING))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::WARNING))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::WARNING); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::INFO))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::INFO))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::INFO); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::DEBUG))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::DEBUG))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::DEBUG); - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::log::level(ov::log::Level::TRACE))); - OV_ASSERT_NO_THROW(logValue = ie.get_property(deviceName, ov::log::level)); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::log::level(ov::log::Level::TRACE))); + OV_ASSERT_NO_THROW(logValue = ie.get_property(target_device, ov::log::level)); EXPECT_EQ(logValue, ov::log::Level::TRACE); } // @@ -450,14 +462,14 @@ TEST_P(OVClassSetLogLevelConfigTest, SetConfigNoThrow) { TEST_P(OVClassNetworkTestP, QueryNetworkActualThrows) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); + OV_ASSERT_NO_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device)); } TEST_P(OVClassNetworkTestP, QueryNetworkActualNoThrow) { ov::Core ie = createCoreWithTemplate(); try { - ie.query_model(actualNetwork, deviceName); + ie.query_model(actualNetwork, target_device); } catch (const ov::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -468,11 +480,11 @@ TEST_P(OVClassNetworkTestP, QueryNetworkWithKSO) { ov::Core ie = createCoreWithTemplate(); try { - auto rl_map = ie.query_model(ksoNetwork, deviceName); + auto rl_map = ie.query_model(ksoNetwork, target_device); auto func = ksoNetwork; for (const auto& op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } } catch (const ov::Exception& ex) { @@ -484,26 +496,26 @@ TEST_P(OVClassNetworkTestP, QueryNetworkWithKSO) { TEST_P(OVClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) { ov::Core ie = createCoreWithTemplate(); - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - auto deviceIDs = ie.get_property(clearDeviceName, ov::available_devices); - if (deviceIDs.size() < deviceNames.size()) + auto deviceIDs = ie.get_property(cleartarget_device, ov::available_devices); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); - for (auto& dev_name : deviceNames) { - multiDeviceName += dev_name; - if (&dev_name != &(deviceNames.back())) { - multiDeviceName += ","; + std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":"); + for (auto& dev_name : target_devices) { + multitarget_device += dev_name; + if (&dev_name != &(target_devices.back())) { + multitarget_device += ","; } } - OV_ASSERT_NO_THROW(ie.query_model(actualNetwork, multiDeviceName)); + OV_ASSERT_NO_THROW(ie.query_model(actualNetwork, multitarget_device)); } TEST_P(OVClassNetworkTestP, SetAffinityWithConstantBranches) { @@ -535,17 +547,17 @@ TEST_P(OVClassNetworkTestP, SetAffinityWithConstantBranches) { func = std::make_shared(results, params); } - auto rl_map = ie.query_model(func, deviceName); + auto rl_map = ie.query_model(func, target_device); for (const auto& op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } for (const auto& op : func->get_ops()) { std::string affinity = rl_map[op->get_friendly_name()]; op->get_rt_info()["affinity"] = affinity; } - auto exeNetwork = ie.compile_model(ksoNetwork, deviceName); + auto exeNetwork = ie.compile_model(ksoNetwork, target_device); } catch (const InferenceEngine::NotImplemented& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -556,18 +568,18 @@ TEST_P(OVClassNetworkTestP, SetAffinityWithKSO) { ov::Core ie = createCoreWithTemplate(); try { - auto rl_map = ie.query_model(ksoNetwork, deviceName); + auto rl_map = ie.query_model(ksoNetwork, target_device); auto func = ksoNetwork; for (const auto& op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } for (const auto& op : func->get_ops()) { std::string affinity = rl_map[op->get_friendly_name()]; op->get_rt_info()["affinity"] = affinity; } - auto exeNetwork = ie.compile_model(ksoNetwork, deviceName); + auto exeNetwork = ie.compile_model(ksoNetwork, target_device); } catch (const ov::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -578,7 +590,7 @@ TEST_P(OVClassNetworkTestP, QueryNetworkHeteroActualNoThrow) { ov::Core ie = createCoreWithTemplate(); ov::SupportedOpsMap res; OV_ASSERT_NO_THROW( - res = ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName))); + res = ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device))); ASSERT_LT(0, res.size()); } @@ -589,10 +601,10 @@ TEST_P(OVClassNetworkTestP, QueryNetworkMultiThrows) { TEST(OVClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { ov::Core ie = createCoreWithTemplate(); - std::string deviceName = CommonTestUtils::DEVICE_HETERO; + std::string target_device = CommonTestUtils::DEVICE_HETERO; std::vector t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::supported_properties)); std::cout << "Supported HETERO properties: " << std::endl; for (auto&& str : t) { @@ -605,15 +617,15 @@ TEST(OVClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { TEST(OVClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroThrows) { ov::Core ie = createCoreWithTemplate(); // TODO: check - std::string targetDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + CommonTestUtils::DEVICE_CPU; - ASSERT_THROW(ie.get_property(targetDevice, ov::supported_properties), ov::Exception); + std::string target_device = CommonTestUtils::DEVICE_HETERO + std::string(":") + CommonTestUtils::DEVICE_CPU; + ASSERT_THROW(ie.get_property(target_device, ov::supported_properties), ov::Exception); } TEST_P(OVClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::supported_properties)); std::cout << "Supported properties: " << std::endl; for (auto&& str : t) { @@ -627,7 +639,7 @@ TEST_P(OVClassGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::supported_properties)); std::cout << "Supported config values: " << std::endl; for (auto&& str : t) { @@ -641,7 +653,7 @@ TEST_P(OVClassGetMetricTest_AVAILABLE_DEVICES, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::available_devices)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::available_devices)); std::cout << "Available devices: " << std::endl; for (auto&& str : t) { @@ -655,7 +667,7 @@ TEST_P(OVClassGetMetricTest_FULL_DEVICE_NAME, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::string t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::device::full_name)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::device::full_name)); std::cout << "Full device name: " << std::endl << t << std::endl; OV_ASSERT_PROPERTY_SUPPORTED(ov::device::full_name); @@ -665,10 +677,10 @@ TEST_P(OVClassGetMetricTest_FULL_DEVICE_NAME_with_DEVICE_ID, GetMetricAndPrintNo ov::Core ie = createCoreWithTemplate(); std::string t; - if (supportsDeviceID(ie, deviceName)) { - auto device_ids = ie.get_property(deviceName, ov::available_devices); + if (supportsDeviceID(ie, target_device)) { + auto device_ids = ie.get_property(target_device, ov::available_devices); ASSERT_GT(device_ids.size(), 0); - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::device::full_name, ov::device::id(device_ids.front()))); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::device::full_name, ov::device::id(device_ids.front()))); std::cout << "Device " << device_ids.front() << " " << ", Full device name: " << std::endl << t << std::endl; OV_ASSERT_PROPERTY_SUPPORTED(ov::device::full_name); } else { @@ -680,7 +692,7 @@ TEST_P(OVClassGetMetricTest_DEVICE_UUID, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); ov::device::UUID t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::device::uuid)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::device::uuid)); std::cout << "Device uuid: " << std::endl << t << std::endl; OV_ASSERT_PROPERTY_SUPPORTED(ov::device::uuid); @@ -689,7 +701,7 @@ TEST_P(OVClassGetMetricTest_DEVICE_UUID, GetMetricAndPrintNoThrow) { TEST_P(OVClassGetMetricTest_OPTIMIZATION_CAPABILITIES, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector t; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::device::capabilities)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::device::capabilities)); std::cout << "Optimization capabilities: " << std::endl; for (auto&& str : t) { std::cout << str << std::endl; @@ -701,7 +713,7 @@ TEST_P(OVClassGetMetricTest_MAX_BATCH_SIZE, GetMetricAndPrintNoThrow) { ov::Core ie; uint32_t max_batch_size = 0; - ASSERT_NO_THROW(max_batch_size = ie.get_property(deviceName, ov::max_batch_size)); + ASSERT_NO_THROW(max_batch_size = ie.get_property(target_device, ov::max_batch_size)); std::cout << "Max batch size: " << max_batch_size << std::endl; @@ -711,7 +723,7 @@ TEST_P(OVClassGetMetricTest_MAX_BATCH_SIZE, GetMetricAndPrintNoThrow) { TEST_P(OVClassGetMetricTest_DEVICE_GOPS, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); std::cout << "Device GOPS: " << std::endl; - for (auto&& kv : ie.get_property(deviceName, ov::device::gops)) { + for (auto&& kv : ie.get_property(target_device, ov::device::gops)) { std::cout << kv.first << ": " << kv.second << std::endl; } OV_ASSERT_PROPERTY_SUPPORTED(ov::device::gops); @@ -721,7 +733,7 @@ TEST_P(OVClassGetMetricTest_DEVICE_TYPE, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); OV_ASSERT_PROPERTY_SUPPORTED(ov::device::type); ov::device::Type t = {}; - OV_ASSERT_NO_THROW(t = ie.get_property(deviceName, ov::device::type)); + OV_ASSERT_NO_THROW(t = ie.get_property(target_device, ov::device::type)); std::cout << "Device Type: " << t << std::endl; } @@ -729,7 +741,7 @@ TEST_P(OVClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS, GetMetricAndPrintNoT ov::Core ie = createCoreWithTemplate(); unsigned int start{0}, end{0}, step{0}; - ASSERT_NO_THROW(std::tie(start, end, step) = ie.get_property(deviceName, ov::range_for_async_infer_requests)); + ASSERT_NO_THROW(std::tie(start, end, step) = ie.get_property(target_device, ov::range_for_async_infer_requests)); std::cout << "Range for async infer requests: " << std::endl << start << std::endl @@ -746,7 +758,7 @@ TEST_P(OVClassGetMetricTest_RANGE_FOR_STREAMS, GetMetricAndPrintNoThrow) { ov::Core ie = createCoreWithTemplate(); unsigned int start = 0, end = 0; - ASSERT_NO_THROW(std::tie(start, end) = ie.get_property(deviceName, ov::range_for_streams)); + ASSERT_NO_THROW(std::tie(start, end) = ie.get_property(target_device, ov::range_for_streams)); std::cout << "Range for streams: " << std::endl << start << std::endl @@ -760,18 +772,18 @@ TEST_P(OVClassGetMetricTest_RANGE_FOR_STREAMS, GetMetricAndPrintNoThrow) { TEST_P(OVClassGetMetricTest_ThrowUnsupported, GetMetricThrow) { ov::Core ie = createCoreWithTemplate(); - ASSERT_THROW(ie.get_property(deviceName, "unsupported_metric"), ov::Exception); + ASSERT_THROW(ie.get_property(target_device, "unsupported_metric"), ov::Exception); } TEST_P(OVClassGetConfigTest, GetConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector configValues; - OV_ASSERT_NO_THROW(configValues = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(configValues = ie.get_property(target_device, ov::supported_properties)); for (auto&& confKey : configValues) { ov::Any defaultValue; - OV_ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, confKey)); + OV_ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, confKey)); ASSERT_FALSE(defaultValue.empty()); } } @@ -779,10 +791,10 @@ TEST_P(OVClassGetConfigTest, GetConfigNoThrow) { TEST_P(OVClassGetConfigTest, GetConfigHeteroNoThrow) { ov::Core ie = createCoreWithTemplate(); std::vector configValues; - OV_ASSERT_NO_THROW(configValues = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(configValues = ie.get_property(target_device, ov::supported_properties)); for (auto&& confKey : configValues) { - OV_ASSERT_NO_THROW(ie.get_property(deviceName, confKey)); + OV_ASSERT_NO_THROW(ie.get_property(target_device, confKey)); } } @@ -794,7 +806,7 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroThrow) { TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) { ov::Core ie = createCoreWithTemplate(); - ASSERT_THROW(ie.get_property(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName, + ASSERT_THROW(ie.get_property(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device, ov::device::priorities), ov::Exception); } @@ -802,33 +814,33 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) { TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigThrow) { ov::Core ie = createCoreWithTemplate(); - ASSERT_THROW(ie.get_property(deviceName, "unsupported_config"), ov::Exception); + ASSERT_THROW(ie.get_property(target_device, "unsupported_config"), ov::Exception); } TEST_P(OVClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) { ov::Core ie = createCoreWithTemplate(); ov::Any p; - std::string deviceID, clearDeviceName; - auto pos = deviceName.find('.'); + std::string deviceID, cleartarget_device; + auto pos = target_device.find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceName.substr(0, pos); - deviceID = deviceName.substr(pos + 1, deviceName.size()); + cleartarget_device = target_device.substr(0, pos); + deviceID = target_device.substr(pos + 1, target_device.size()); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - auto deviceIDs = ie.get_property(clearDeviceName, ov::available_devices); + auto deviceIDs = ie.get_property(cleartarget_device, ov::available_devices); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } std::vector configValues; - OV_ASSERT_NO_THROW(configValues = ie.get_property(deviceName, ov::supported_properties)); + OV_ASSERT_NO_THROW(configValues = ie.get_property(target_device, ov::supported_properties)); for (auto &&confKey : configValues) { ov::Any defaultValue; - OV_ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, confKey)); + OV_ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, confKey)); ASSERT_FALSE(defaultValue.empty()); } } @@ -842,7 +854,7 @@ TEST_P(OVClassGetAvailableDevices, GetAvailableDevicesNoThrow) { bool deviceFound = false; std::cout << "Available devices: " << std::endl; for (auto&& device : devices) { - if (device.find(deviceName) != std::string::npos) { + if (device.find(target_device) != std::string::npos) { deviceFound = true; } @@ -859,13 +871,13 @@ TEST_P(OVClassGetAvailableDevices, GetAvailableDevicesNoThrow) { TEST_P(OVClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.get_property(deviceName, ov::available_devices); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.get_property(target_device, ov::available_devices); if (deviceIDs.empty()) GTEST_SKIP(); OV_ASSERT_NO_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(deviceName + "." + deviceIDs[0], deviceName))); + ov::device::priorities(target_device + "." + deviceIDs[0], target_device))); } else { GTEST_SKIP(); } @@ -874,9 +886,9 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) { TEST_P(OVClassQueryNetworkTest, QueryNetworkWithDeviceID) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { try { - ie.query_model(simpleNetwork, deviceName + ".0"); + ie.query_model(simpleNetwork, target_device + ".0"); } catch (const ov::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -889,8 +901,8 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithDeviceID) { TEST_P(OVClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".110"), ov::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.query_model(actualNetwork, target_device + ".110"), ov::Exception); } else { GTEST_SKIP(); } @@ -899,8 +911,8 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { TEST_P(OVClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".l0"), ov::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.query_model(actualNetwork, target_device + ".l0"), ov::Exception); } else { GTEST_SKIP(); } @@ -909,10 +921,10 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { TEST_P(OVClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(deviceName + ".100", deviceName)), + ov::device::priorities(target_device + ".100", target_device)), ov::Exception); } else { GTEST_SKIP(); @@ -927,31 +939,31 @@ using OVClassNetworkTestP = OVClassBaseTestP; TEST_P(OVClassNetworkTestP, LoadNetworkActualNoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, deviceName)); + OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, target_device)); } TEST_P(OVClassNetworkTestP, LoadNetworkActualHeteroDeviceNoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); + OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device)); } TEST_P(OVClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName))); + OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device))); } TEST_P(OVClassNetworkTestP, LoadNetworkActualHeteroDeviceUsingDevicePropertiesNoThrow) { ov::Core ie = createCoreWithTemplate(); OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(deviceName), - ov::device::properties(deviceName, + ov::device::priorities(target_device), + ov::device::properties(target_device, ov::enable_profiling(true)))); } TEST_P(OVClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { auto ie = createCoreWithTemplate(); - auto net = ie.compile_model(actualNetwork, deviceName); + auto net = ie.compile_model(actualNetwork, target_device); auto runtime_function = net.get_runtime_model(); ASSERT_NE(nullptr, runtime_function); auto actual_parameters = runtime_function->get_parameters(); @@ -981,26 +993,26 @@ TEST_P(OVClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { TEST_P(OVClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) { ov::Core ie = createCoreWithTemplate(); - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - auto deviceIDs = ie.get_property(clearDeviceName, ov::available_devices); - if (deviceIDs.size() < deviceNames.size()) + auto deviceIDs = ie.get_property(cleartarget_device, ov::available_devices); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); - for (auto& dev_name : deviceNames) { - multiDeviceName += dev_name; - if (&dev_name != &(deviceNames.back())) { - multiDeviceName += ","; + std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":"); + for (auto& dev_name : target_devices) { + multitarget_device += dev_name; + if (&dev_name != &(target_devices.back())) { + multitarget_device += ","; } } - OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, multiDeviceName)); + OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, multitarget_device)); } // @@ -1009,12 +1021,12 @@ TEST_P(OVClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoTh TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.get_property(deviceName, ov::available_devices); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.get_property(target_device, ov::available_devices); if (deviceIDs.empty()) GTEST_SKIP(); std::string heteroDevice = - CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + "." + deviceIDs[0] + "," + deviceName; + CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device + "." + deviceIDs[0] + "," + target_device; OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, heteroDevice)); } else { GTEST_SKIP(); @@ -1024,11 +1036,11 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) { TEST_P(OVClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.get_property(deviceName, ov::available_devices); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.get_property(target_device, ov::available_devices); if (deviceIDs.empty()) GTEST_SKIP(); - OV_ASSERT_NO_THROW(ie.compile_model(simpleNetwork, deviceName + "." + deviceIDs[0])); + OV_ASSERT_NO_THROW(ie.compile_model(simpleNetwork, target_device + "." + deviceIDs[0])); } else { GTEST_SKIP(); } @@ -1037,8 +1049,8 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) { TEST_P(OVClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".10"), ov::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.compile_model(actualNetwork, target_device + ".10"), ov::Exception); } else { GTEST_SKIP(); } @@ -1046,13 +1058,13 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { TEST_P(OVClassLoadNetworkWithCorrectPropertiesTest, LoadNetworkWithCorrectPropertiesTest) { ov::Core ie = createCoreWithTemplate(); - OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, deviceName, configuration)); + OV_ASSERT_NO_THROW(ie.compile_model(actualNetwork, target_device, configuration)); } TEST_P(OVClassLoadNetworkWithDefaultPropertiesTest, LoadNetworkWithDefaultPropertiesTest) { ov::Core ie = createCoreWithTemplate(); ov::CompiledModel model; - OV_ASSERT_NO_THROW(model = ie.compile_model(actualNetwork, deviceName, configuration)); + OV_ASSERT_NO_THROW(model = ie.compile_model(actualNetwork, target_device, configuration)); ov::hint::PerformanceMode value; OV_ASSERT_NO_THROW(value = model.get_property(ov::hint::performance_mode)); ASSERT_EQ(value, ov::hint::PerformanceMode::THROUGHPUT); @@ -1061,7 +1073,7 @@ TEST_P(OVClassLoadNetworkWithDefaultPropertiesTest, LoadNetworkWithDefaultProper TEST_P(OVClassLoadNetworkWithDefaultIncorrectPropertiesTest, LoadNetworkWithDefaultIncorrectPropertiesTest) { ov::Core ie = createCoreWithTemplate(); ov::CompiledModel model; - OV_ASSERT_NO_THROW(model = ie.compile_model(actualNetwork, deviceName, configuration)); + OV_ASSERT_NO_THROW(model = ie.compile_model(actualNetwork, target_device, configuration)); ov::hint::PerformanceMode value; OV_ASSERT_NO_THROW(value = model.get_property(ov::hint::performance_mode)); ASSERT_EQ(value, ov::hint::PerformanceMode::UNDEFINED); @@ -1070,8 +1082,8 @@ TEST_P(OVClassLoadNetworkWithDefaultIncorrectPropertiesTest, LoadNetworkWithDefa TEST_P(OVClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".l0"), ov::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.compile_model(actualNetwork, target_device + ".l0"), ov::Exception); } else { GTEST_SKIP(); } @@ -1080,10 +1092,10 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.compile_model(actualNetwork, "HETERO", - ov::device::priorities(deviceName + ".100", CommonTestUtils::DEVICE_CPU)), + ov::device::priorities(target_device + ".100", CommonTestUtils::DEVICE_CPU)), ov::Exception); } else { GTEST_SKIP(); @@ -1093,10 +1105,10 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.compile_model(actualNetwork, CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(deviceName, CommonTestUtils::DEVICE_CPU), + ov::device::priorities(target_device, CommonTestUtils::DEVICE_CPU), ov::device::id("110")), ov::Exception); } else { @@ -1109,11 +1121,11 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { // TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithAUTONoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.get_property(deviceName, ov::available_devices); + auto availableDevices = ie.get_property(target_device, ov::available_devices); for (auto&& device : availableDevices) { - devices += deviceName + '.' + device; + devices += target_device + '.' + device; if (&device != &(availableDevices.back())) { devices += ','; } @@ -1123,7 +1135,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithAUTONoThrow) { CommonTestUtils::DEVICE_MULTI, ov::device::properties(CommonTestUtils::DEVICE_AUTO, ov::device::priorities(devices)), ov::device::properties(CommonTestUtils::DEVICE_MULTI, - ov::device::priorities(CommonTestUtils::DEVICE_AUTO, deviceName)))); + ov::device::priorities(CommonTestUtils::DEVICE_AUTO, target_device)))); } else { GTEST_SKIP(); } @@ -1135,11 +1147,11 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithAUTONoThrow) { TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.get_property(deviceName, ov::available_devices); + auto availableDevices = ie.get_property(target_device, ov::available_devices); for (auto&& device : availableDevices) { - devices += deviceName + '.' + device; + devices += target_device + '.' + device; if (&device != &(availableDevices.back())) { devices += ','; } @@ -1150,7 +1162,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { ov::device::properties(CommonTestUtils::DEVICE_MULTI, ov::device::priorities(devices)), ov::device::properties(CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(CommonTestUtils::DEVICE_MULTI, deviceName)))); + ov::device::priorities(CommonTestUtils::DEVICE_MULTI, target_device)))); } else { GTEST_SKIP(); } @@ -1159,9 +1171,9 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.get_property(deviceName, ov::available_devices); + auto availableDevices = ie.get_property(target_device, ov::available_devices); for (auto&& device : availableDevices) { devices += CommonTestUtils::DEVICE_HETERO + std::string(".") + device; if (&device != &(availableDevices.back())) { @@ -1172,7 +1184,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { actualNetwork, CommonTestUtils::DEVICE_MULTI, ov::device::properties(CommonTestUtils::DEVICE_MULTI, ov::device::priorities(devices)), - ov::device::properties(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(deviceName, deviceName)))); + ov::device::properties(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(target_device, target_device)))); } else { GTEST_SKIP(); } @@ -1185,11 +1197,11 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { TEST_P(OVClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.get_property(deviceName, ov::available_devices); + auto availableDevices = ie.get_property(target_device, ov::available_devices); for (auto&& device : availableDevices) { - devices += deviceName + '.' + device; + devices += target_device + '.' + device; if (&device != &(availableDevices.back())) { devices += ','; } @@ -1201,7 +1213,7 @@ TEST_P(OVClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { expectedLayers.emplace(node->get_friendly_name()); } ov::SupportedOpsMap result; - std::string hetero_device_priorities(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName); + std::string hetero_device_priorities(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device); OV_ASSERT_NO_THROW(result = ie.query_model( multinputNetwork, CommonTestUtils::DEVICE_HETERO, @@ -1209,7 +1221,7 @@ TEST_P(OVClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { ov::device::priorities(devices)), ov::device::properties(CommonTestUtils::DEVICE_HETERO, ov::device::priorities(CommonTestUtils::DEVICE_MULTI, - deviceName)))); + target_device)))); std::unordered_set actualLayers; for (auto&& layer : result) { @@ -1224,9 +1236,9 @@ TEST_P(OVClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { TEST_P(OVClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { ov::Core ie = createCoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.get_property(deviceName, ov::available_devices); + auto availableDevices = ie.get_property(target_device, ov::available_devices); for (auto&& device : availableDevices) { devices += "HETERO." + device; if (&device != &(availableDevices.back())) { @@ -1245,7 +1257,7 @@ TEST_P(OVClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { ov::device::properties(CommonTestUtils::DEVICE_MULTI, ov::device::priorities(devices)), ov::device::properties(CommonTestUtils::DEVICE_HETERO, - ov::device::priorities(deviceName, deviceName)))); + ov::device::priorities(target_device, target_device)))); std::unordered_set actualLayers; for (auto&& layer : result) { @@ -1261,49 +1273,49 @@ TEST_P(OVClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { TEST_P(OVClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins) { ov::Core ie = createCoreWithTemplate(); { - auto versions = ie.get_versions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + deviceName + "," + + auto versions = ie.get_versions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + target_device + "," + CommonTestUtils::DEVICE_CPU); ASSERT_EQ(3, versions.size()); } ov::AnyMap config; - if (deviceName == CommonTestUtils::DEVICE_CPU) { + if (target_device == CommonTestUtils::DEVICE_CPU) { config.insert(ov::enable_profiling(true)); } // OV_ASSERT_NO_THROW({ // ov::Core ie = createCoreWithTemplate(); // std::string name = actualNetwork.getInputsInfo().begin()->first; // actualNetwork.getInputsInfo().at(name)->setPrecision(Precision::U8); - // auto executableNetwork = ie.compile_model(actualNetwork, deviceName, config); + // auto executableNetwork = ie.compile_model(actualNetwork, target_device, config); // }); }; TEST_P(OVClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto deviceIDs = ie.get_property(deviceName, ov::available_devices); + auto deviceIDs = ie.get_property(target_device, ov::available_devices); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } std::string value; - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::device::id(deviceID), ov::enable_profiling(true))); - ASSERT_TRUE(ie.get_property(deviceName, ov::enable_profiling)); - OV_ASSERT_NO_THROW(value = ie.get_property(deviceName, ov::enable_profiling.name()).as()); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::device::id(deviceID), ov::enable_profiling(true))); + ASSERT_TRUE(ie.get_property(target_device, ov::enable_profiling)); + OV_ASSERT_NO_THROW(value = ie.get_property(target_device, ov::enable_profiling.name()).as()); ASSERT_EQ(value, "YES"); } TEST_P(OVClassSetGlobalConfigTest, SetGlobalConfigNoThrow) { ov::Core ie = createCoreWithTemplate(); - auto deviceIDs = ie.get_property(deviceName, ov::available_devices); + auto deviceIDs = ie.get_property(target_device, ov::available_devices); ov::Any ref, src; for (auto& dev_id : deviceIDs) { - OV_ASSERT_NO_THROW(ie.set_property(deviceName + "." + dev_id, ov::enable_profiling(false))); + OV_ASSERT_NO_THROW(ie.set_property(target_device + "." + dev_id, ov::enable_profiling(false))); } - OV_ASSERT_NO_THROW(ie.set_property(deviceName, ov::enable_profiling(true))); - OV_ASSERT_NO_THROW(ref = ie.get_property(deviceName, ov::enable_profiling.name())); + OV_ASSERT_NO_THROW(ie.set_property(target_device, ov::enable_profiling(true))); + OV_ASSERT_NO_THROW(ref = ie.get_property(target_device, ov::enable_profiling.name())); for (auto& dev_id : deviceIDs) { - OV_ASSERT_NO_THROW(src = ie.get_property(deviceName + "." + dev_id, ov::enable_profiling.name())); + OV_ASSERT_NO_THROW(src = ie.get_property(target_device + "." + dev_id, ov::enable_profiling.name())); ASSERT_EQ(src, ref); } } @@ -1311,24 +1323,24 @@ TEST_P(OVClassSetGlobalConfigTest, SetGlobalConfigNoThrow) { TEST_P(OVClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) { ov::Core ie; - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - auto deviceIDs = ie.get_property(clearDeviceName, ov::available_devices); - if (deviceIDs.size() < deviceNames.size()) + auto deviceIDs = ie.get_property(cleartarget_device, ov::available_devices); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - for (size_t i = 0; i < deviceNames.size(); ++i) { - OV_ASSERT_NO_THROW(ie.set_property(deviceNames[i], ov::enable_profiling(true))); + for (size_t i = 0; i < target_devices.size(); ++i) { + OV_ASSERT_NO_THROW(ie.set_property(target_devices[i], ov::enable_profiling(true))); } bool res; - for (size_t i = 0; i < deviceNames.size(); ++i) { - OV_ASSERT_NO_THROW(res = ie.get_property(deviceNames[i], ov::enable_profiling)); + for (size_t i = 0; i < target_devices.size(); ++i) { + OV_ASSERT_NO_THROW(res = ie.get_property(target_devices[i], ov::enable_profiling)); ASSERT_TRUE(res); } } diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/life_time.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/life_time.hpp index ee849f8cda3..f8390522011 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/life_time.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/life_time.hpp @@ -10,30 +10,27 @@ namespace ov { namespace test { namespace behavior { -class OVHoldersTest : public CommonTestUtils::TestsCommon, +class OVHoldersTest : public OVPluginTestBase, public ::testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; - void TearDown() override; +protected: std::string deathTestStyle; std::shared_ptr function; - std::string targetDevice; }; -class OVHoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon, +class OVHoldersTestOnImportedNetwork : public OVPluginTestBase, public ::testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; void TearDown() override; +protected: std::shared_ptr function; - std::string targetDevice; std::string deathTestStyle; }; } // namespace behavior diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/properties_tests.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/properties_tests.hpp index eb16d67dc36..51bb556513c 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/properties_tests.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/properties_tests.hpp @@ -16,11 +16,10 @@ namespace ov { namespace test { namespace behavior { -class OVPropertiesBase : public CommonTestUtils::TestsCommon { +class OVPropertiesBase : public OVPluginTestBase { public: std::shared_ptr core = utils::PluginCache::get().core(); std::shared_ptr model; - std::string device_name; AnyMap properties; }; diff --git a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/remote.hpp b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/remote.hpp index 781f2f95647..7b3f61ef445 100644 --- a/src/tests/functional/plugin/shared/include/behavior/ov_plugin/remote.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/ov_plugin/remote.hpp @@ -10,6 +10,7 @@ #include "openvino/runtime/compiled_model.hpp" #include "openvino/op/parameter.hpp" #include "functional_test_utils/ov_plugin_cache.hpp" +#include "base/ov_behavior_test_utils.hpp" namespace ov { namespace test { @@ -20,7 +21,7 @@ using RemoteTensorParams = std::tuple>; // remote context and tensor parameters class OVRemoteTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public ov::test::behavior::OVPluginTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj); protected: @@ -28,7 +29,6 @@ protected: void TearDown() override; element::Type element_type; - std::string target_device; ov::AnyMap config; ov::AnyMap context_parameters; ov::AnyMap tensor_parameters; diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/auto_batching_tests.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/auto_batching_tests.hpp index bfe8a065c69..270eed3dad0 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/auto_batching_tests.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/auto_batching_tests.hpp @@ -13,6 +13,7 @@ #include "ngraph_functions/subgraph_builders.hpp" #include "functional_test_utils/blob_utils.hpp" +#include "base/behavior_test_utils.hpp" using namespace ::testing; using namespace InferenceEngine; @@ -25,10 +26,10 @@ using AutoBatchTwoNetsParams = std::tuple< size_t, // number of requests size_t>; // batch size> -class AutoBatching_Test : public CommonTestUtils::TestsCommon, +class AutoBatching_Test : public BehaviorTestsUtils::IEPluginTestBase, public testing::WithParamInterface { void SetUp() override { - std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); + std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); fn_ptrs = {ngraph::builder::subgraph::makeSingleConv(), ngraph::builder::subgraph::makeMultiSingleConv()}; }; @@ -36,15 +37,14 @@ public: static std::string getTestCaseName(const testing::TestParamInfo &obj) { size_t streams, requests, batch; bool use_get_blob; - std::string device_name; - std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param; - return device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" + + std::string target_device; + std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param; + return target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" + std::to_string(batch) + "_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests); } protected: - std::string device_name; bool use_get_blob; size_t num_streams; size_t num_requests; @@ -70,16 +70,16 @@ protected: n.second->setPrecision(Precision::FP32); } std::map config; - if (device_name.find("GPU") != std::string::npos) + if (target_device.find("GPU") != std::string::npos) config[CONFIG_KEY(GPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams); - if (device_name.find("CPU") != std::string::npos) { + if (target_device.find("CPU") != std::string::npos) { config[CONFIG_KEY(CPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams); config[CONFIG_KEY(ENFORCE_BF16)] = CONFIG_VALUE(NO); } // minimize timeout to reduce test time config[CONFIG_KEY(AUTO_BATCH_TIMEOUT)] = std::to_string(1); auto exec_net_ref = ie.LoadNetwork(net, std::string(CommonTestUtils::DEVICE_BATCH) + ":" + - device_name + "(" + std::to_string(num_batch) + ")", + target_device + "(" + std::to_string(num_batch) + ")", config); auto network_outputs = net.getOutputsInfo(); @@ -144,7 +144,7 @@ protected: class AutoBatching_Test_DetectionOutput : public AutoBatching_Test { public: void SetUp() override { - std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); + std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); fn_ptrs = {ngraph::builder::subgraph::makeDetectionOutput(), ngraph::builder::subgraph::makeDetectionOutput()}; }; @@ -152,9 +152,9 @@ public: static std::string getTestCaseName(const testing::TestParamInfo &obj) { size_t streams, requests, batch; bool use_get_blob; - std::string device_name; - std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param; - return "DetectionOutput_HETERO_" + device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") + + std::string target_device; + std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param; + return "DetectionOutput_HETERO_" + target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" + std::to_string(batch) + "_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests); } diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/caching_tests.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/caching_tests.hpp index 11c90150ab3..835ef07d475 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/caching_tests.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/caching_tests.hpp @@ -13,6 +13,7 @@ #include "functional_test_utils/plugin_cache.hpp" #include "common_test_utils/unicode_utils.hpp" #include "openvino/util/common_util.hpp" +#include "base/behavior_test_utils.hpp" #include #include @@ -30,6 +31,7 @@ using loadNetworkCacheParams = std::tuple< namespace LayerTestsDefinitions { class LoadNetworkCacheTestBase : public testing::WithParamInterface, + virtual public BehaviorTestsUtils::IEPluginTestBase, virtual public LayerTestsUtils::LayerTestsCommon { std::string m_cacheFolderName; std::string m_functionName; @@ -52,18 +54,21 @@ using compileKernelsCacheParams = std::tuple< std::pair, std::string> // device and cache configuration >; class LoadNetworkCompiledKernelsCacheTest : virtual public LayerTestsUtils::LayerTestsCommon, - public testing::WithParamInterface { + virtual public BehaviorTestsUtils::IEPluginTestBase, + public testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); protected: std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); - std::shared_ptr function; std::string cache_path; std::vector m_extList; + void SetUp() override { - function = ngraph::builder::subgraph::makeConvPoolRelu(); std::pair, std::string> userConfig; std::tie(targetDevice, userConfig) = GetParam(); + target_device = targetDevice; + APIBaseTest::SetUp(); + function = ngraph::builder::subgraph::makeConvPoolRelu(); configuration = userConfig.first; std::string ext = userConfig.second; std::string::size_type pos = 0; diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/configuration_tests.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/configuration_tests.hpp index 05196021bcc..63cb255bd60 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/configuration_tests.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/configuration_tests.hpp @@ -7,6 +7,7 @@ #include #include #include +#include #include #include @@ -17,6 +18,7 @@ #include "common_test_utils/test_common.hpp" #include "common_test_utils/file_utils.hpp" #include "functional_test_utils/plugin_cache.hpp" +#include "base/behavior_test_utils.hpp" namespace BehaviorTestsDefinitions { @@ -34,7 +36,8 @@ using DefaultConfigurationParameters = std::tuple< DefaultParameter // default parameter key value comparator >; -struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public ::testing::WithParamInterface { +struct DefaultConfigurationTest : public BehaviorTestsUtils::IEPluginTestBase, + public ::testing::WithParamInterface { enum { DeviceName, DefaultParamterId }; @@ -43,16 +46,14 @@ struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public :: protected: std::shared_ptr _core = PluginCache::get().ie(); - std::string targetDevice; DefaultParameter defaultParameter; }; -class ConfigBase : public CommonTestUtils::TestsCommon { +class ConfigBase : public BehaviorTestsUtils::IEPluginTestBase { public: std::shared_ptr ie = PluginCache::get().ie(); std::shared_ptr function; InferenceEngine::CNNNetwork cnnNet; - std::string targetDevice; std::map configuration; }; @@ -60,17 +61,19 @@ class BehaviorTestsEmptyConfig : public testing::WithParamInterface public ConfigBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { - std::string targetDevice; - targetDevice = obj.param; + std::string target_device; + target_device = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "targetDevice=" << targetDevice; + result << "target_device=" << target_device; return result.str(); } void SetUp() override { // Skip test according to plugin specific disabledTestPatterns() (if any) - SKIP_IF_CURRENT_TEST_IS_DISABLED() // Create CNNNetwork from ngrpah::Function - targetDevice = this->GetParam(); + target_device = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); } @@ -85,20 +88,24 @@ class BehaviorTestsSingleOptionDefault : public testing::WithParamInterface obj) { - std::string targetDevice; + std::string target_device; std::pair configuration; - std::tie(targetDevice, configuration) = obj.param; + std::tie(target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; - result << "config=" << "(" << configuration.first << "_" << configuration.second.as() << ")"; + result << "target_device=" << target_device << "_"; + std::string config_value = configuration.second.as(); + std::replace(config_value.begin(), config_value.end(), '-', '_'); + result << "config=" << "(" << configuration.first << "_" << config_value << ")"; return result.str(); } void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); std::pair entry; - std::tie(targetDevice, entry) = this->GetParam(); + std::tie(target_device, entry) = this->GetParam(); std::tie(key, value) = entry; + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); } std::string key; @@ -114,11 +121,12 @@ class CorrectConfigTests : public testing::WithParamInterface obj) { - std::string targetDevice; + std::string target_device; std::map configuration; - std::tie(targetDevice, configuration) = obj.param; + std::tie(target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { using namespace CommonTestUtils; result << "config=" << (configuration); @@ -127,9 +135,10 @@ public: } void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); std::map entry; - std::tie(targetDevice, configuration) = this->GetParam(); + std::tie(target_device, configuration) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); } @@ -138,6 +147,7 @@ public: if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::TearDown(); } }; @@ -152,7 +162,7 @@ public: void SetUp() override { SKIP_IF_CURRENT_TEST_IS_DISABLED(); std::tuple entry; - std::tie(targetDevice, entry) = this->GetParam(); + std::tie(target_device, entry) = this->GetParam(); std::tie(key, value, reference) = entry; function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); @@ -172,8 +182,9 @@ class BehaviorTestsSingleOption : public testing::WithParamInterfaceGetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(targetDevice, key) = this->GetParam(); + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); } @@ -191,12 +202,13 @@ class SetPropLoadNetWorkGetPropTests : public testing::WithParamInterface obj) { - std::string targetDevice; + std::string target_device; std::map configuration; std::map loadNetWorkConfig; - std::tie(targetDevice, configuration, loadNetWorkConfig) = obj.param; + std::tie(target_device, configuration, loadNetWorkConfig) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { result << "configItem="; for (auto& configItem : configuration) { @@ -215,9 +227,10 @@ public: } void SetUp() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); std::map entry; - std::tie(targetDevice, configuration, loadNetWorkConfig) = this->GetParam(); + std::tie(target_device, configuration, loadNetWorkConfig) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); cnnNet = InferenceEngine::CNNNetwork(function); } diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/core_integration.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/core_integration.hpp index e30f22b12d7..18d5eb0ca3d 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/core_integration.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/core_integration.hpp @@ -25,19 +25,22 @@ namespace BehaviorTestsDefinitions { #define ASSERT_METRIC_SUPPORTED_IE(metricName) \ { \ std::vector metrics = \ - ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)); \ + ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)); \ auto it = std::find(metrics.begin(), metrics.end(), metricName); \ ASSERT_NE(metrics.end(), it); \ } -class IEClassBasicTestP : public ::testing::Test, public ::testing::WithParamInterface > { +class IEClassBasicTestP : public BehaviorTestsUtils::IEPluginTestBase, + public ::testing::WithParamInterface > { protected: std::string deviceName; std::string pluginName; + public: void SetUp() override { + std::tie(pluginName, target_device) = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(pluginName, deviceName) = GetParam(); + ov::test::behavior::APIBaseTest::SetUp(); pluginName += IE_BUILD_POSTFIX; if (pluginName == (std::string("openvino_template_plugin") + IE_BUILD_POSTFIX)) { pluginName = ov::util::make_plugin_library_name(CommonTestUtils::getExecutableDirectory(), pluginName); @@ -45,14 +48,14 @@ public: } }; -class IEClassSetDefaultDeviceIDTest : public ::testing::Test, +class IEClassSetDefaultDeviceIDTest : public BehaviorTestsUtils::IEPluginTestBase, public ::testing::WithParamInterface> { protected: - std::string deviceName; std::string deviceID; + public: void SetUp() override { - std::tie(deviceName, deviceID) = GetParam(); + std::tie(target_device, deviceID) = GetParam(); } }; @@ -78,31 +81,35 @@ using IEClassGetMetricTest_RANGE_FOR_STREAMS = BehaviorTestsUtils::IEClassBaseTe using IEClassSetGlobalConfigTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassSpecificDeviceTestSetConfig = BehaviorTestsUtils::IEClassBaseTestP; using IEClassSpecificDeviceTestGetConfig = BehaviorTestsUtils::IEClassBaseTestP; - using IEClassLoadNetworkAfterCoreRecreateTest = BehaviorTestsUtils::IEClassBaseTestP; -class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEClassNetworkTest, +class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEPluginTestBase, + public BehaviorTestsUtils::IEClassNetworkTest, public ::testing::WithParamInterface> { public: - std::vector deviceNames; + std::vector target_devices; void SetUp() override { + target_device = CommonTestUtils::DEVICE_MULTI; + SKIP_IF_CURRENT_TEST_IS_DISABLED() + ov::test::behavior::APIBaseTest::SetUp(); IEClassNetworkTest::SetUp(); - deviceNames = GetParam(); + target_devices = GetParam(); } }; + using IEClassSeveralDevicesTestLoadNetwork = IEClassSeveralDevicesTest; using IEClassSeveralDevicesTestQueryNetwork = IEClassSeveralDevicesTest; using IEClassSeveralDevicesTestDefaultCore = IEClassSeveralDevicesTest; -bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &deviceName) { - auto supportedMetricKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)).as>(); +bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &target_device) { + auto supportedMetricKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)).as>(); return supportedMetricKeys.end() != std::find(std::begin(supportedMetricKeys), std::end(supportedMetricKeys), METRIC_KEY(AVAILABLE_DEVICES)); } -bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &deviceName) { - auto supportedConfigKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as>(); +bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &target_device) { + auto supportedConfigKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as>(); return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY(DEVICE_ID)); @@ -117,7 +124,7 @@ TEST(IEClassBasicTest, smoke_createDefault) { TEST_P(IEClassBasicTestP, registerExistingPluginThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_THROW(ie.RegisterPlugin(pluginName, deviceName), InferenceEngine::Exception); + ASSERT_THROW(ie.RegisterPlugin(pluginName, target_device), InferenceEngine::Exception); } TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) { @@ -159,7 +166,6 @@ TEST(IEClassBasicTest, smoke_createMockEngineConfigThrows) { ASSERT_THROW(InferenceEngine::Core ie(filename), InferenceEngine::Exception); CommonTestUtils::removeFile(filename.c_str()); } - #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { @@ -184,7 +190,7 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { ASSERT_NO_THROW(ie.RegisterPlugins(ov::util::wstring_to_string(pluginsXmlW))); CommonTestUtils::removeFile(pluginsXmlW); ASSERT_NO_THROW(ie.GetVersions("mock")); // from pluginXM - ASSERT_NO_THROW(ie.GetVersions(deviceName)); + ASSERT_NO_THROW(ie.GetVersions(target_device)); GTEST_COUT << "Plugin created " << testIndex << std::endl; ASSERT_NO_THROW(ie.RegisterPlugin(pluginName, "TEST_DEVICE")); @@ -211,17 +217,17 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { TEST_P(IEClassBasicTestP, getVersionsByExactDeviceNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.GetVersions(deviceName + ".0")); + ASSERT_NO_THROW(ie.GetVersions(target_device + ".0")); } TEST_P(IEClassBasicTestP, getVersionsByDeviceClassNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.GetVersions(deviceName)); + ASSERT_NO_THROW(ie.GetVersions(target_device)); } TEST_P(IEClassBasicTestP, getVersionsNonEmpty) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName).size()); + ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device).size()); } // @@ -231,22 +237,22 @@ TEST_P(IEClassBasicTestP, getVersionsNonEmpty) { TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); // device instance is not created yet - ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception); + ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception); // make the first call to IE which created device instance - ie.GetVersions(deviceName); + ie.GetVersions(target_device); // now, we can unregister device - ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); + ASSERT_NO_THROW(ie.UnregisterPlugin(target_device)); } TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception); - ASSERT_NO_THROW(ie.GetVersions(deviceName)); - ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); - ASSERT_NO_THROW(ie.SetConfig({}, deviceName)); - ASSERT_NO_THROW(ie.GetVersions(deviceName)); - ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); + ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception); + ASSERT_NO_THROW(ie.GetVersions(target_device)); + ASSERT_NO_THROW(ie.UnregisterPlugin(target_device)); + ASSERT_NO_THROW(ie.SetConfig({}, target_device)); + ASSERT_NO_THROW(ie.GetVersions(target_device)); + ASSERT_NO_THROW(ie.UnregisterPlugin(target_device)); } TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) { @@ -261,7 +267,7 @@ TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) { TEST_P(IEClassBasicTestP, SetConfigAllThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); ASSERT_NO_THROW(ie.SetConfig({{"unsupported_key", "4"}})); - ASSERT_ANY_THROW(ie.GetVersions(deviceName)); + ASSERT_ANY_THROW(ie.GetVersions(target_device)); } TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { @@ -272,13 +278,13 @@ TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { TEST_P(IEClassBasicTestP, SetConfigNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, - deviceName)); + target_device)); } TEST_P(IEClassBasicTestP, SetConfigAllNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}})); - ASSERT_NO_THROW(ie.GetVersions(deviceName)); + ASSERT_NO_THROW(ie.GetVersions(target_device)); } TEST(IEClassBasicTest, smoke_SetConfigHeteroThrows) { @@ -291,17 +297,17 @@ TEST_P(IEClassBasicTestP, SetGetConfigForTbbTerminateThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); bool value = false; ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(YES)}})); - ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as()); + ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as()); ASSERT_TRUE(value); ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(NO)}})); - ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as()); + ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as()); ASSERT_FALSE(value); } TEST_P(IEClassBasicTestP, SetConfigHeteroTargetFallbackThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", deviceName}}, CommonTestUtils::DEVICE_HETERO)); + ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", target_device}}, CommonTestUtils::DEVICE_HETERO)); } TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) { @@ -322,23 +328,23 @@ TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) { TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - std::string deviceID, clearDeviceName; - auto pos = deviceName.find('.'); + std::string deviceID, cleartarget_device; + auto pos = target_device.find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceName.substr(0, pos); - deviceID = deviceName.substr(pos + 1, deviceName.size()); + cleartarget_device = target_device.substr(0, pos); + deviceID = target_device.substr(pos + 1, target_device.size()); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - std::vector deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); + std::vector deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES)); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } - ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, deviceName)); + ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, target_device)); std::string value; - ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as()); + ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as()); ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES); } @@ -349,8 +355,8 @@ TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) { TEST_P(IEClassBasicTestP, ImportNetworkThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (deviceName == CommonTestUtils::DEVICE_GPU) { - ASSERT_THROW(ie.ImportNetwork("model", deviceName), InferenceEngine::NetworkNotRead); + if (target_device == CommonTestUtils::DEVICE_GPU) { + ASSERT_THROW(ie.ImportNetwork("model", target_device), InferenceEngine::NetworkNotRead); const std::string modelName = "compiled_blob.blob"; { @@ -358,7 +364,7 @@ TEST_P(IEClassBasicTestP, ImportNetworkThrows) { file << "content"; } - EXPECT_THROW(ie.ImportNetwork(modelName, deviceName), InferenceEngine::NotImplemented); + EXPECT_THROW(ie.ImportNetwork(modelName, target_device), InferenceEngine::NotImplemented); ASSERT_EQ(0, std::remove(modelName.c_str())); } } @@ -387,14 +393,14 @@ TEST_P(IEClassBasicTestP, ImportNetworkWithNullContextThrows) { TEST_P(IEClassNetworkTestP, QueryNetworkActualThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); + ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device)); } TEST_P(IEClassNetworkTestP, QueryNetworkActualNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); try { - ie.QueryNetwork(actualCnnNetwork, deviceName); + ie.QueryNetwork(actualCnnNetwork, target_device); } catch (const InferenceEngine::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -405,12 +411,12 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); try { - auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName); + auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device); auto rl_map = rres.supportedLayersMap; auto func = ksoCnnNetwork.getFunction(); for (const auto & op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } } catch (const InferenceEngine::Exception& ex) { @@ -422,26 +428,26 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) { TEST_P(IEClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - std::vector deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); - if (deviceIDs.size() < deviceNames.size()) + std::vector deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES)); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); - for (auto& dev_name : deviceNames) { - multiDeviceName += dev_name; - if (&dev_name != &(deviceNames.back())) { - multiDeviceName += ","; + std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":"); + for (auto& dev_name : target_devices) { + multitarget_device += dev_name; + if (&dev_name != &(target_devices.back())) { + multitarget_device += ","; } } - ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multiDeviceName)); + ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multitarget_device)); } TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) { @@ -477,18 +483,18 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) { } InferenceEngine::CNNNetwork net(func); - auto rres = ie.QueryNetwork(net, deviceName); + auto rres = ie.QueryNetwork(net, target_device); auto rl_map = rres.supportedLayersMap; for (const auto & op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } for (const auto & op : net.getFunction()->get_ops()) { std::string affinity = rl_map[op->get_friendly_name()]; op->get_rt_info()["affinity"] = affinity; } - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device); } catch (const InferenceEngine::NotImplemented & ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -499,19 +505,19 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); try { - auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName); + auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device); auto rl_map = rres.supportedLayersMap; auto func = ksoCnnNetwork.getFunction(); for (const auto & op : func->get_ops()) { if (!rl_map.count(op->get_friendly_name())) { - FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; + FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device; } } for (const auto & op : ksoCnnNetwork.getFunction()->get_ops()) { std::string affinity = rl_map[op->get_friendly_name()]; op->get_rt_info()["affinity"] = affinity; } - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName); + InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device); } catch (const InferenceEngine::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -521,7 +527,7 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) { TEST_P(IEClassNetworkTestP, QueryNetworkHeteroActualNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::QueryNetworkResult res; - ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}})); + ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}})); ASSERT_LT(0, res.supportedLayersMap.size()); } @@ -533,9 +539,9 @@ TEST_P(IEClassNetworkTestP, QueryNetworkMultiThrows) { TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - std::string deviceName = CommonTestUtils::DEVICE_HETERO; + std::string target_device = CommonTestUtils::DEVICE_HETERO; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS))); std::vector t = p; std::cout << "Supported HETERO metrics: " << std::endl; @@ -549,9 +555,9 @@ TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { TEST(IEClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - std::string deviceName = CommonTestUtils::DEVICE_HETERO; + std::string target_device = CommonTestUtils::DEVICE_HETERO; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector t = p; std::cout << "Supported HETERO config keys: " << std::endl; @@ -573,7 +579,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS))); std::vector t = p; std::cout << "Supported metrics: " << std::endl; @@ -588,7 +594,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector t = p; std::cout << "Supported config values: " << std::endl; @@ -603,7 +609,7 @@ TEST_P(IEClassGetMetricTest_AVAILABLE_DEVICES, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES))); std::vector t = p; std::cout << "Available devices: " << std::endl; @@ -618,7 +624,7 @@ TEST_P(IEClassGetMetricTest_FULL_DEVICE_NAME, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(FULL_DEVICE_NAME))); std::string t = p; std::cout << "Full device name: " << std::endl << t << std::endl; @@ -629,7 +635,7 @@ TEST_P(IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES, GetMetricAndPrintNoThrow) InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); std::vector t = p; std::cout << "Optimization capabilities: " << std::endl; @@ -644,7 +650,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_GOPS, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_GOPS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_GOPS))); std::map t = p; std::cout << "Device GOPS: " << std::endl; @@ -659,7 +665,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_TYPE, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_TYPE))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_TYPE))); InferenceEngine::Metrics::DeviceType t = p; std::cout << "Device Type: " << t << std::endl; @@ -671,7 +677,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_WAITING_INFER_REQUESTS, GetMetricAndPrintN InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS))); unsigned int t = p; std::cout << "Number of waiting infer requests: " << std::endl << t << std::endl; @@ -683,7 +689,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_EXEC_INFER_REQUESTS, GetMetricAndPrintNoTh InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS))); unsigned int t = p; std::cout << "Number of executing infer requests: " << std::endl << t << std::endl; @@ -695,7 +701,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS, GetMetricAndPrintNoT InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS))); std::tuple t = p; unsigned int start = std::get<0>(t); @@ -717,7 +723,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_STREAMS, GetMetricAndPrintNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_STREAMS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_STREAMS))); std::tuple t = p; unsigned int start = std::get<0>(t); @@ -736,19 +742,19 @@ TEST_P(IEClassGetMetricTest_ThrowUnsupported, GetMetricThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_THROW(p = ie.GetMetric(deviceName, "unsupported_metric"), InferenceEngine::Exception); + ASSERT_THROW(p = ie.GetMetric(target_device, "unsupported_metric"), InferenceEngine::Exception); } TEST_P(IEClassGetConfigTest, GetConfigNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector configValues = p; for (auto &&confKey : configValues) { InferenceEngine::Parameter defaultValue; - ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); + ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey)); ASSERT_FALSE(defaultValue.empty()); } } @@ -757,11 +763,11 @@ TEST_P(IEClassGetConfigTest, GetConfigHeteroNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector configValues = p; for (auto &&confKey : configValues) { - ASSERT_NO_THROW(ie.GetConfig(deviceName, confKey)); + ASSERT_NO_THROW(ie.GetConfig(target_device, confKey)); } } @@ -776,7 +782,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)), + ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)), InferenceEngine::Exception); } @@ -784,33 +790,33 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), InferenceEngine::Exception); + ASSERT_THROW(p = ie.GetConfig(target_device, "unsupported_config"), InferenceEngine::Exception); } TEST_P(IEClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Parameter p; - std::string deviceID, clearDeviceName; - auto pos = deviceName.find('.'); + std::string deviceID, cleartarget_device; + auto pos = target_device.find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceName.substr(0, pos); - deviceID = deviceName.substr(pos + 1, deviceName.size()); + cleartarget_device = target_device.substr(0, pos); + deviceID = target_device.substr(pos + 1, target_device.size()); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - std::vector deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); + std::vector deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES)); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } - ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector configValues = p; for (auto &&confKey : configValues) { InferenceEngine::Parameter defaultValue; - ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); + ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey)); ASSERT_FALSE(defaultValue.empty()); } } @@ -824,7 +830,7 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) { bool deviceFound = false; std::cout << "Available devices: " << std::endl; for (auto &&device : devices) { - if (device.find(deviceName) != std::string::npos) { + if (device.find(target_device) != std::string::npos) { deviceFound = true; } @@ -842,12 +848,12 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) { TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); if (deviceIDs.empty()) GTEST_SKIP(); ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, - {{"TARGET_FALLBACK", deviceName + "." + deviceIDs[0] + "," + deviceName}})); + {{"TARGET_FALLBACK", target_device + "." + deviceIDs[0] + "," + target_device}})); } else { GTEST_SKIP(); } @@ -856,9 +862,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { try { - ie.QueryNetwork(simpleCnnNetwork, deviceName + ".0"); + ie.QueryNetwork(simpleCnnNetwork, target_device + ".0"); } catch (const InferenceEngine::Exception& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); @@ -871,8 +877,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".110"), InferenceEngine::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".110"), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -881,8 +887,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -891,9 +897,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, - {{"TARGET_FALLBACK", deviceName + ".100," + deviceName}}), InferenceEngine::Exception); + {{"TARGET_FALLBACK", target_device + ".100," + target_device}}), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -915,22 +921,22 @@ TEST(IEClassBasicTest, smoke_LoadNetworkToDefaultDeviceNoThrow) { TEST_P(IEClassNetworkTestP, LoadNetworkActualNoThrow) { SKIP_IF_CURRENT_TEST_IS_DISABLED() InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName)); + ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, target_device)); } TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDeviceNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); + ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device)); } TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}})); + ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}})); } TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - auto net = ie.LoadNetwork(actualCnnNetwork, deviceName); + auto net = ie.LoadNetwork(actualCnnNetwork, target_device); auto exec_function = net.GetExecGraphInfo().getFunction(); ASSERT_NE(nullptr, exec_function); auto actual_parameters = exec_function->get_parameters(); @@ -960,32 +966,32 @@ TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { TEST_P(IEClassLoadNetworkTestWithThrow, LoadNetworkActualWithThrow) { SKIP_IF_CURRENT_TEST_IS_DISABLED() InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName), InferenceEngine::Exception); + ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device), InferenceEngine::Exception); } TEST_P(IEClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - std::vector deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); - if (deviceIDs.size() < deviceNames.size()) + std::vector deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES)); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); - for (auto& dev_name : deviceNames) { - multiDeviceName += dev_name; - if (&dev_name != &(deviceNames.back())) { - multiDeviceName += ","; + std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":"); + for (auto& dev_name : target_devices) { + multitarget_device += dev_name; + if (&dev_name != &(target_devices.back())) { + multitarget_device += ","; } } - ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multiDeviceName)); + ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multitarget_device)); } using IEClassLoadNetworkTest = IEClassQueryNetworkTest; @@ -995,11 +1001,11 @@ using IEClassLoadNetworkTest = IEClassQueryNetworkTest; TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); if (deviceIDs.empty()) GTEST_SKIP(); - std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + "." + deviceIDs[0] + "," + deviceName; + std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device + "." + deviceIDs[0] + "," + target_device; ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, heteroDevice)); } else { GTEST_SKIP(); @@ -1009,11 +1015,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + if (supportsDeviceID(ie, target_device)) { + auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); if (deviceIDs.empty()) GTEST_SKIP(); - ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, deviceName + "." + deviceIDs[0])); + ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, target_device + "." + deviceIDs[0])); } else { GTEST_SKIP(); } @@ -1022,8 +1028,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".10"), InferenceEngine::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".10"), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -1032,8 +1038,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception); + if (supportsDeviceID(ie, target_device)) { + ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -1042,9 +1048,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, "HETERO", - {{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception); + {{"TARGET_FALLBACK", target_device + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception); } else { GTEST_SKIP(); } @@ -1053,9 +1059,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName)) { + if (supportsDeviceID(ie, target_device)) { ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, - {{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU}, + {{"TARGET_FALLBACK", target_device + "," + CommonTestUtils::DEVICE_CPU}, {CONFIG_KEY(DEVICE_ID), "110"}}), InferenceEngine::Exception); } else { GTEST_SKIP(); @@ -1068,16 +1074,16 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); for (auto &&device : availableDevices) { - devices += deviceName + '.' + device; + devices += target_device + '.' + device; if (&device != &(availableDevices.back())) { devices += ','; } } - std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName); + std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, { {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {"TARGET_FALLBACK", targetFallback}})); @@ -1089,9 +1095,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); for (auto &&device : availableDevices) { devices += CommonTestUtils::DEVICE_HETERO + std::string(".") + device; if (&device != &(availableDevices.back())) { @@ -1100,7 +1106,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { } ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_MULTI, { {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, - {"TARGET_FALLBACK", deviceName + "," + deviceName}})); + {"TARGET_FALLBACK", target_device + "," + target_device}})); } else { GTEST_SKIP(); } @@ -1113,11 +1119,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); for (auto &&device : availableDevices) { - devices += deviceName + '.' + device; + devices += target_device + '.' + device; if (&device != &(availableDevices.back())) { devices += ','; } @@ -1129,7 +1135,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { expectedLayers.emplace(node->get_friendly_name()); } InferenceEngine::QueryNetworkResult result; - std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName); + std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device); ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_HETERO, { {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {"TARGET_FALLBACK", targetFallback}})); @@ -1147,9 +1153,9 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { + if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) { std::string devices; - auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as>(); + auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as>(); for (auto &&device : availableDevices) { devices += "HETERO." + device; if (&device != &(availableDevices.back())) { @@ -1165,7 +1171,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { InferenceEngine::QueryNetworkResult result; ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_MULTI, { {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, - {"TARGET_FALLBACK", deviceName + "," + deviceName}})); + {"TARGET_FALLBACK", target_device + "," + target_device}})); std::unordered_set actualLayers; for (auto &&layer : result.supportedLayersMap) { @@ -1180,50 +1186,50 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { TEST_P(IEClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); { - auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + deviceName + "," + CommonTestUtils::DEVICE_CPU); + auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + target_device + "," + CommonTestUtils::DEVICE_CPU); ASSERT_EQ(3, versions.size()); } std::map config; - if (deviceName == CommonTestUtils::DEVICE_CPU) { + if (target_device == CommonTestUtils::DEVICE_CPU) { config.insert({"CPU_THREADS_NUM", "3"}); } ASSERT_NO_THROW({ InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); std::string name = actualCnnNetwork.getInputsInfo().begin()->first; actualCnnNetwork.getInputsInfo().at(name)->setPrecision(InferenceEngine::Precision::U8); - auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName, config); + auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device, config); }); }; TEST_P(IEClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - std::vector deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)); + std::vector deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)); if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { GTEST_SKIP(); } std::string value; ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID }, { InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, - deviceName)); - ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as()); + target_device)); + ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as()); ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES); } TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) { InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); - std::vector deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)); + std::vector deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)); InferenceEngine::Parameter ref, src; for (auto& dev_id : deviceIDs) { ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }}, - deviceName + "." + dev_id)); + target_device + "." + dev_id)); } - ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, deviceName)); - ASSERT_NO_THROW(ref = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); + ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, target_device)); + ASSERT_NO_THROW(ref = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); for (auto& dev_id : deviceIDs) { - ASSERT_NO_THROW(src = ie.GetConfig(deviceName + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); + ASSERT_NO_THROW(src = ie.GetConfig(target_device + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); ASSERT_EQ(src, ref); } } @@ -1231,24 +1237,24 @@ TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) { TEST_P(IEClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) { InferenceEngine::Core ie; - std::string clearDeviceName; - auto pos = deviceNames.begin()->find('.'); + std::string cleartarget_device; + auto pos = target_devices.begin()->find('.'); if (pos != std::string::npos) { - clearDeviceName = deviceNames.begin()->substr(0, pos); + cleartarget_device = target_devices.begin()->substr(0, pos); } - if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { + if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) { GTEST_SKIP(); } - std::vector deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); - if (deviceIDs.size() < deviceNames.size()) + std::vector deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES)); + if (deviceIDs.size() < target_devices.size()) GTEST_SKIP(); - for (size_t i = 0; i < deviceNames.size(); ++i) { - ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, deviceNames[i])); + for (size_t i = 0; i < target_devices.size(); ++i) { + ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, target_devices[i])); } std::string res; - for (size_t i = 0; i < deviceNames.size(); ++i) { - ASSERT_NO_THROW(res = ie.GetConfig(deviceNames[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as()); + for (size_t i = 0; i < target_devices.size(); ++i) { + ASSERT_NO_THROW(res = ie.GetConfig(target_devices[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as()); ASSERT_EQ(res, std::to_string(i + 2)); } } diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/core_threading.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/core_threading.hpp index 1d2227b2cca..5b54f28a1f8 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/core_threading.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/core_threading.hpp @@ -15,6 +15,7 @@ #include #include #include +#include "base/behavior_test_utils.hpp" #include #include @@ -23,6 +24,7 @@ #include #include #include +#include "base/ov_behavior_test_utils.hpp" using Device = std::string; using Config = std::map; @@ -49,7 +51,7 @@ public: } } - void safePluginUnregister(InferenceEngine::Core & ie) { + void safePluginUnregister(InferenceEngine::Core & ie, const std::string& deviceName) { try { ie.UnregisterPlugin(deviceName); } catch (const InferenceEngine::Exception & ex) { @@ -69,7 +71,6 @@ public: } } - Device deviceName; Config config; }; @@ -77,24 +78,27 @@ public: // Common threading plugin tests // -class CoreThreadingTests : public CoreThreadingTestsBase, - public ::testing::TestWithParam { +class CoreThreadingTests : public testing::WithParamInterface, + public BehaviorTestsUtils::IEPluginTestBase, + public CoreThreadingTestsBase { public: void SetUp() override { + std::tie(target_device, config) = GetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(deviceName, config) = GetParam(); } static std::string getTestCaseName(testing::TestParamInfo obj) { std::string deviceName; Config config; std::tie(deviceName, config) = obj.param; + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); char separator('_'); std::ostringstream result; result << "targetDevice=" << deviceName << separator; result << "config="; for (auto& confItem : config) { - result << confItem.first << ":" << confItem.second << separator; + result << confItem.first << "=" << confItem.second << separator; } return result.str(); } @@ -104,9 +108,9 @@ public: TEST_P(CoreThreadingTests, smoke_GetVersions) { InferenceEngine::Core ie; runParallel([&] () { - auto versions = ie.GetVersions(deviceName); + auto versions = ie.GetVersions(target_device); ASSERT_LE(1u, versions.size()); - safePluginUnregister(ie); + safePluginUnregister(ie, target_device); }); } @@ -115,7 +119,7 @@ TEST_P(CoreThreadingTests, smoke_SetConfigPluginExists) { InferenceEngine::Core ie; ie.SetConfig(config); - auto versions = ie.GetVersions(deviceName); + auto versions = ie.GetVersions(target_device); runParallel([&] () { ie.SetConfig(config); @@ -129,8 +133,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) { ie.SetConfig(config); runParallel([&] () { - ie.GetConfig(deviceName, configKey); - safePluginUnregister(ie); + ie.GetConfig(target_device, configKey); + safePluginUnregister(ie, target_device); }); } @@ -138,8 +142,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) { TEST_P(CoreThreadingTests, smoke_GetMetric) { InferenceEngine::Core ie; runParallel([&] () { - ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)); - safePluginUnregister(ie); + ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)); + safePluginUnregister(ie, target_device); }); } @@ -148,12 +152,12 @@ TEST_P(CoreThreadingTests, smoke_QueryNetwork) { InferenceEngine::Core ie; InferenceEngine::CNNNetwork network(ngraph::builder::subgraph::make2InputSubtract()); - ie.SetConfig(config, deviceName); - InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, deviceName); + ie.SetConfig(config, target_device); + InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, target_device); runParallel([&] () { - const auto result = ie.QueryNetwork(network, deviceName); - safePluginUnregister(ie); + const auto result = ie.QueryNetwork(network, target_device); + safePluginUnregister(ie, target_device); // compare QueryNetworkResult with reference for (auto && r : refResult.supportedLayersMap) { @@ -179,12 +183,13 @@ enum struct ModelClass : unsigned { using CoreThreadingParams = std::tuple; -class CoreThreadingTestsWithIterations : public ::testing::TestWithParam, - public CoreThreadingTestsBase { +class CoreThreadingTestsWithIterations : public testing::WithParamInterface, + public BehaviorTestsUtils::IEPluginTestBase, + public CoreThreadingTestsBase { public: void SetUp() override { SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(deviceName, config) = std::get<0>(GetParam()); + std::tie(target_device, config) = std::get<0>(GetParam()); numThreads = std::get<1>(GetParam()); numIterations = std::get<2>(GetParam()); modelClass = std::get<3>(GetParam()); @@ -195,6 +200,7 @@ public: std::string deviceName; Config config; std::tie(deviceName, config) = std::get<0>(obj.param); + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); numThreads = std::get<1>(obj.param); numIterations = std::get<2>(obj.param); char separator('_'); @@ -202,13 +208,15 @@ public: result << "targetDevice=" << deviceName << separator; result << "config="; for (auto& confItem : config) { - result << confItem.first << ":" << confItem.second << separator; + result << confItem.first << "=" << confItem.second << separator; } result << "numThreads=" << numThreads << separator; result << "numIter=" << numIterations; return result.str(); } + +protected: ModelClass modelClass; unsigned int numIterations; unsigned int numThreads; @@ -236,10 +244,10 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork) { SetupNetworks(); - ie.SetConfig(config, deviceName); + ie.SetConfig(config, target_device); runParallel([&] () { auto value = counter++; - (void)ie.LoadNetwork(networks[value % networks.size()], deviceName); + (void)ie.LoadNetwork(networks[value % networks.size()], target_device); }, numIterations, numThreads); } @@ -250,7 +258,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore) SetupNetworks(); - ie.SetConfig(config, deviceName); + ie.SetConfig(config, target_device); runParallel([&] () { auto value = counter++; @@ -264,7 +272,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore) } auto getOutputBlob = [&](InferenceEngine::Core & core) { - auto exec = core.LoadNetwork(network, deviceName); + auto exec = core.LoadNetwork(network, target_device); auto req = exec.CreateInferRequest(); req.SetInput(blobs); @@ -293,7 +301,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) { SetupNetworks(); - ie.SetConfig(config, deviceName); + ie.SetConfig(config, target_device); runParallel([&] () { auto value = counter++; auto network = networks[value % networks.size()]; @@ -306,7 +314,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) { } auto getOutputBlob = [&](InferenceEngine::Core & core) { - auto exec = core.LoadNetwork(network, deviceName); + auto exec = core.LoadNetwork(network, target_device); auto req = exec.CreateInferRequest(); req.SetInput(blobs); @@ -325,7 +333,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) { // compare actual value using the second Core { InferenceEngine::Core ie2; - ie2.SetConfig(config, deviceName); + ie2.SetConfig(config, target_device); auto outputRef = getOutputBlob(ie2); FuncTestUtils::compareBlobs(outputActual, outputRef); @@ -342,8 +350,8 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_SingleIECore) { runParallel([&] () { auto value = counter++; - ie.SetConfig(config, deviceName); - (void)ie.LoadNetwork(networks[value % networks.size()], deviceName); + ie.SetConfig(config, target_device); + (void)ie.LoadNetwork(networks[value % networks.size()], target_device); }, numIterations, numThreads); } @@ -356,7 +364,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_MultipleIECores) { runParallel([&] () { auto value = counter++; InferenceEngine::Core ie; - ie.SetConfig(config, deviceName); - (void)ie.LoadNetwork(networks[value % networks.size()], deviceName); + ie.SetConfig(config, target_device); + (void)ie.LoadNetwork(networks[value % networks.size()], target_device); }, numIterations, numThreads); } diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/life_time.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/life_time.hpp index ca6c2ff4bdf..369e445965f 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/life_time.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/life_time.hpp @@ -15,10 +15,10 @@ #include #include #include "gtest/gtest.h" -#include "common_test_utils/test_common.hpp" #include "common_test_utils/crash_handler.hpp" #include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/precision_utils.hpp" +#include "base/behavior_test_utils.hpp" #include namespace BehaviorTestsDefinitions { @@ -27,29 +27,27 @@ typedef std::tuple< std::vector> // Order HoldersParams; -class HoldersTest : public CommonTestUtils::TestsCommon, - public ::testing::WithParamInterface { +class HoldersTest : public BehaviorTestsUtils::IEPluginTestBase, + public ::testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; +protected: std::vector order; std::shared_ptr function; - std::string targetDevice; }; using HoldersTestImportNetwork = HoldersTest; -class HoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon, +class HoldersTestOnImportedNetwork : public BehaviorTestsUtils::IEPluginTestBase, public ::testing::WithParamInterface { public: static std::string getTestCaseName(testing::TestParamInfo obj); - void SetUp() override; +protected: std::shared_ptr function; - std::string targetDevice; }; } // namespace BehaviorTestsDefinitions \ No newline at end of file diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp index ca4cc587cbf..4bc3e090099 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp @@ -25,7 +25,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInputInfo) { auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); { @@ -44,7 +44,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInferRequest) { auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo(); @@ -96,7 +96,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessGetBlob) { } preProcess.setVariant(InferenceEngine::MEAN_IMAGE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); auto inBlob = req.GetBlob("param"); @@ -163,7 +163,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessSetBlob) { } preProcess.setVariant(InferenceEngine::MEAN_IMAGE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); @@ -225,7 +225,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessGetBlob) { preProcess[2]->stdScale = 1; preProcess.setVariant(InferenceEngine::MEAN_VALUE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); auto inBlob = req.GetBlob("param"); @@ -285,7 +285,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessSetBlob) { preProcess[2]->stdScale = 1; preProcess.setVariant(InferenceEngine::MEAN_VALUE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); @@ -340,7 +340,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessGetBlob) { auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); auto inBlob = req.GetBlob("param"); @@ -401,7 +401,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessSetBlob) { auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); @@ -472,7 +472,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessGetBlob) { preProcess[2]->meanValue = 0; preProcess.setVariant(InferenceEngine::MEAN_VALUE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); auto inBlob = req.GetBlob("param"); @@ -532,7 +532,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessSetBlob) { preProcess[2]->meanValue = 0; preProcess.setVariant(InferenceEngine::MEAN_VALUE); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); @@ -577,18 +577,19 @@ typedef std::tuple< > PreprocessConversionParams; class InferRequestPreprocessConversionTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEPluginTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { InferenceEngine::Precision netPrecision, iPrecision, oPrecision; InferenceEngine::Layout netLayout, iLayout, oLayout; bool setInputBlob, setOutputBlob; - std::string targetDevice; + std::string target_device; std::map configuration; std::tie(netPrecision, iPrecision, oPrecision, netLayout, iLayout, oLayout, setInputBlob, setOutputBlob, - targetDevice, configuration) = obj.param; + target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '_'); std::ostringstream result; result << "netPRC=" << netPrecision.name() << "_"; result << "iPRC=" << iPrecision.name() << "_"; @@ -598,7 +599,7 @@ public: result << "oLT=" << oLayout << "_"; result << "setIBlob=" << setInputBlob << "_"; result << "setOBlob=" << setOutputBlob << "_"; - result << "targetDevice=" << targetDevice; + result << "target_device=" << target_device; if (!configuration.empty()) { for (auto& configItem : configuration) { result << "configItem=" << configItem.first << "_" << configItem.second << "_"; @@ -626,25 +627,26 @@ public: } void SetUp() override { - // Skip test according to plugin specific disabledTestPatterns() (if any) - SKIP_IF_CURRENT_TEST_IS_DISABLED() std::tie(netPrecision, iPrecision, oPrecision, netLayout, iLayout, oLayout, setInputBlob, setOutputBlob, - targetDevice, configuration) = this->GetParam(); + target_device, configuration) = this->GetParam(); + // Skip test according to plugin specific disabledTestPatterns() (if any) + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); } void TearDown() override { if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::TearDown(); } std::shared_ptr ie = PluginCache::get().ie(); InferenceEngine::Precision netPrecision, iPrecision, oPrecision; InferenceEngine::Layout netLayout, iLayout, oLayout; bool setInputBlob, setOutputBlob; - std::string targetDevice; std::map configuration; }; @@ -676,7 +678,7 @@ TEST_P(InferRequestPreprocessConversionTest, Infer) { cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); @@ -765,7 +767,7 @@ typedef std::tuple< > PreprocessSetBlobCheckParams; class InferRequestPreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEPluginTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { InferenceEngine::Precision netPrecision; @@ -773,12 +775,13 @@ public: bool changeIPrecision, changeOPrecision; bool changeILayout, changeOLayout; bool setInputBlob, setOutputBlob; - std::string targetDevice; + std::string target_device; std::map configuration; std::tie(netPrecision, changeIPrecision, changeOPrecision, netLayout, changeILayout, changeOLayout, setInputBlob, setOutputBlob, - targetDevice, configuration) = obj.param; + target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '_'); std::ostringstream result; result << "netPRC=" << netPrecision.name() << "_"; result << "iPRC=" << changeIPrecision << "_"; @@ -788,7 +791,7 @@ public: result << "oLT=" << changeOLayout << "_"; result << "setIBlob=" << setInputBlob << "_"; result << "setOBlob=" << setOutputBlob << "_"; - result << "targetDevice=" << targetDevice; + result << "target_device=" << target_device; if (!configuration.empty()) { for (auto& configItem : configuration) { result << "configItem=" << configItem.first << "_" << configItem.second << "_"; @@ -821,13 +824,15 @@ public: std::tie(netPrecision, changeIPrecision, changeOPrecision, netLayout, changeILayout, changeOLayout, setInputBlob, setOutputBlob, - targetDevice, configuration) = this->GetParam(); + target_device, configuration) = this->GetParam(); + APIBaseTest::SetUp(); } void TearDown() override { if (!configuration.empty()) { PluginCache::get().reset(); } + APIBaseTest::TearDown(); } std::shared_ptr ie = PluginCache::get().ie(); @@ -836,7 +841,6 @@ public: InferenceEngine::Layout netLayout; bool changeILayout, changeOLayout; bool setInputBlob, setOutputBlob; - std::string targetDevice; std::map configuration; }; @@ -863,7 +867,7 @@ TEST_P(InferRequestPreprocessDynamicallyInSetBlobTest, Infer) { InferenceEngine::CNNNetwork cnnNet(ngraph); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); auto req = execNet.CreateInferRequest(); InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr; @@ -997,7 +1001,7 @@ TEST_P(InferRequestPreprocessTest, InferWithRGB2BGRConversion) { auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); preProcess.setColorFormat(InferenceEngine::ColorFormat::BGR); // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); // Create InferRequest auto req = execNet.CreateInferRequest(); diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/version.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/version.hpp index 1ea959b121a..59b4fdfe494 100644 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/version.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/plugin/version.hpp @@ -15,34 +15,35 @@ namespace BehaviorTestsDefinitions { class VersionTest : public testing::WithParamInterface, - public CommonTestUtils::TestsCommon { + public BehaviorTestsUtils::IEPluginTestBase { public: static std::string getTestCaseName(testing::TestParamInfo obj) { std::string targetDevice; std::map config; targetDevice = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '_'); std::ostringstream result; result << "targetDevice=" << targetDevice; return result.str(); } void SetUp() override { + target_device = this->GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - targetDevice = this->GetParam(); + APIBaseTest::SetUp(); } std::shared_ptr ie = PluginCache::get().ie(); - std::string targetDevice; }; // Load unsupported network type to the Plugin TEST_P(VersionTest, pluginCurrentVersionIsCorrect) { - if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && - targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { - std::map versions = ie->GetVersions(targetDevice); + if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && + target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { + std::map versions = ie->GetVersions(target_device); ASSERT_EQ(versions.size(), 1); - ASSERT_EQ(versions.begin()->first, targetDevice); + ASSERT_EQ(versions.begin()->first, target_device); auto version = versions.begin()->second; IE_SUPPRESS_DEPRECATED_START ASSERT_EQ(version.apiVersion.major, 2); diff --git a/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/op_impl_check.hpp b/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/op_impl_check.hpp index 7cacf4a11d2..13fec261902 100644 --- a/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/op_impl_check.hpp +++ b/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/op_impl_check.hpp @@ -9,7 +9,7 @@ #include "common_test_utils/test_common.hpp" #include "common_test_utils/common_utils.hpp" -#include "functional_test_utils/layer_test_utils/summary.hpp" +#include "functional_test_utils/summary/op_summary.hpp" #include "functional_test_utils/ov_plugin_cache.hpp" namespace ov { @@ -24,7 +24,7 @@ using OpImplParams = std::tuple< class OpImplCheckTest : public testing::WithParamInterface, public CommonTestUtils::TestsCommon { protected: - LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance(); + ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance(); std::shared_ptr core = ov::test::utils::PluginCache::get().core(); std::shared_ptr function; std::string targetDevice; diff --git a/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/single_op_graph.hpp b/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/single_op_graph.hpp index 24fa8e99d28..f31b4528022 100644 --- a/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/single_op_graph.hpp +++ b/src/tests/functional/plugin/shared/include/single_layer_tests/op_impl_check/single_op_graph.hpp @@ -4,7 +4,7 @@ #pragma once -#include +#include #include namespace ov { @@ -16,7 +16,7 @@ OpGenerator getOpGeneratorMap(); static const std::vector>> createFunctions() { std::vector>> res; - auto opsets = LayerTestsUtils::Summary::getInstance().getOpSets(); + auto opsets = ov::test::utils::OpSummary::getInstance().getOpSets(); auto opGenerator = getOpGeneratorMap(); std::set opsInfo; for (const auto& opset : opsets) { diff --git a/src/tests/functional/plugin/shared/src/behavior/executable_network/exec_graph_info.cpp b/src/tests/functional/plugin/shared/src/behavior/executable_network/exec_graph_info.cpp index 83438fd0794..055ac49504f 100644 --- a/src/tests/functional/plugin/shared/src/behavior/executable_network/exec_graph_info.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/executable_network/exec_graph_info.cpp @@ -244,12 +244,17 @@ const char expected_serialized_model[] = R"V0G0N( std::string ExecGraphSerializationTest::getTestCaseName(testing::TestParamInfo obj) { std::ostringstream result; - std::string targetDevice = obj.param; - result << "TargetDevice=" << targetDevice; + std::string target_device = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); + result << "TargetDevice=" << target_device; return result.str(); } void ExecGraphSerializationTest::SetUp() { + target_device = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); + const std::string XML_EXT = ".xml"; const std::string BIN_EXT = ".bin"; @@ -257,11 +262,10 @@ void ExecGraphSerializationTest::SetUp() { m_out_xml_path = model_name + XML_EXT; m_out_bin_path = model_name + BIN_EXT; - - deviceName = this->GetParam(); } void ExecGraphSerializationTest::TearDown() { + APIBaseTest::TearDown(); CommonTestUtils::removeIRFiles(m_out_xml_path, m_out_bin_path); } @@ -340,10 +344,10 @@ std::pair ExecGraphSerializationTest::compare_docs(const pugi } TEST_P(ExecGraphSerializationTest, ExecutionGraph) { - auto ie = PluginCache::get().ie(deviceName); + auto ie = PluginCache::get().ie(target_device); InferenceEngine::Blob::Ptr a; auto cnnNet = ie->ReadNetwork(serialize_test_model, a); - auto execNet = ie->LoadNetwork(cnnNet, deviceName); + auto execNet = ie->LoadNetwork(cnnNet, target_device); auto execGraph = execNet.GetExecGraphInfo(); InferenceEngine::InferRequest req = execNet.CreateInferRequest(); execGraph.serialize(m_out_xml_path, m_out_bin_path); @@ -365,6 +369,7 @@ std::string ExecGraphUniqueNodeNames::getTestCaseName(testing::TestParamInfo inputShape; InferenceEngine::Precision netPrecision; - std::tie(netPrecision, inputShape, targetDevice) = this->GetParam(); + std::tie(netPrecision, inputShape, target_device) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); + + APIBaseTest::SetUp(); auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); auto params = ngraph::builder::makeParams(ngPrc, {inputShape}); @@ -390,15 +396,11 @@ void ExecGraphUniqueNodeNames::SetUp() { fnPtr = std::make_shared(results, params, "SplitConvConcat"); } -void ExecGraphUniqueNodeNames::TearDown() { - fnPtr.reset(); -} - TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) { InferenceEngine::CNNNetwork cnnNet(fnPtr); - auto ie = PluginCache::get().ie(targetDevice); - auto execNet = ie->LoadNetwork(cnnNet, targetDevice); + auto ie = PluginCache::get().ie(target_device); + auto execNet = ie->LoadNetwork(cnnNet, target_device); InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo(); diff --git a/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp b/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp index 9795a6fa490..d2fec300b2b 100644 --- a/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp @@ -3,6 +3,7 @@ // #include "behavior/executable_network/locale.hpp" +#include "functional_test_utils/summary/api_summary.hpp" namespace BehaviorTestsDefinitions { @@ -24,15 +25,19 @@ inline std::shared_ptr makeTestModel(std::vector input std::string CustomLocaleTest::getTestCaseName(const testing::TestParamInfo &obj) { std::ostringstream results; - std::string deviceName, localeName; - std::tie(localeName, deviceName) = obj.param; + std::string targetDevice, localeName; + std::tie(localeName, targetDevice) = obj.param; + std::replace(localeName.begin(), localeName.end(), '-', '.'); + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); results << "locale=" << localeName << "_" - << "targetDevice=" << deviceName; + << "targetDevice=" << targetDevice; return results.str(); } void CustomLocaleTest::SetUp() { - std::tie(localeName, deviceName) = GetParam(); + std::tie(localeName, target_device) = GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); testName = ::testing::UnitTest::GetInstance()->current_test_info()->name(); function = makeTestModel(); } @@ -45,9 +50,9 @@ TEST_P(CustomLocaleTest, CanLoadNetworkWithCustomLocale) { GTEST_SKIP(); } - std::shared_ptr ie = PluginCache::get().ie(deviceName); + std::shared_ptr ie = PluginCache::get().ie(target_device); InferenceEngine::CNNNetwork cnnNet(function); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, deviceName)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device)); std::locale::global(prev); } diff --git a/src/tests/functional/plugin/shared/src/behavior/infer_request/memory_states.cpp b/src/tests/functional/plugin/shared/src/behavior/infer_request/memory_states.cpp index cd8a558568d..759b7ece590 100644 --- a/src/tests/functional/plugin/shared/src/behavior/infer_request/memory_states.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/infer_request/memory_states.cpp @@ -25,9 +25,10 @@ std::string InferRequestVariableStateTest::getTestCaseName(const testing::TestPa } void InferRequestVariableStateTest::SetUp() { + std::tie(net, statesToQuery, deviceName, configuration) = GetParam(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(net, statesToQuery, deviceName, configuration) = GetParam(); + IEInferRequestTestBase::SetUp(); } InferenceEngine::ExecutableNetwork InferRequestVariableStateTest::PrepareNetwork() { diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_executable_network/properties.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_executable_network/properties.cpp index 7068000f690..3a01ed4f717 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_executable_network/properties.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_executable_network/properties.cpp @@ -11,21 +11,23 @@ namespace test { namespace behavior { std::string OVCompiledModelEmptyPropertiesTests::getTestCaseName(testing::TestParamInfo obj) { - return "device_name=" + obj.param; + return "target_device=" + obj.param; } void OVCompiledModelEmptyPropertiesTests::SetUp() { + target_device = this->GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - device_name = this->GetParam(); - model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name); + APIBaseTest::SetUp(); + model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); } std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamInfo obj) { - std::string device_name; + std::string targetDevice; AnyMap properties; - std::tie(device_name, properties) = obj.param; + std::tie(targetDevice, properties) = obj.param; + std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); std::ostringstream result; - result << "device_name=" << device_name << "_"; + result << "targetDevice=" << targetDevice << "_"; if (!properties.empty()) { result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_"); } @@ -33,34 +35,36 @@ std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamIn } void OVCompiledModelPropertiesTests::SetUp() { + std::tie(target_device, properties) = this->GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(device_name, properties) = this->GetParam(); - model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name); + APIBaseTest::SetUp(); + model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); } void OVCompiledModelPropertiesTests::TearDown() { if (!properties.empty()) { utils::PluginCache::get().reset(); } + APIBaseTest::TearDown(); } TEST_P(OVCompiledModelEmptyPropertiesTests, CanCompileModelWithEmptyProperties) { - OV_ASSERT_NO_THROW(core->compile_model(model, device_name, AnyMap{})); + OV_ASSERT_NO_THROW(core->compile_model(model, target_device, AnyMap{})); } TEST_P(OVCompiledModelPropertiesTests, CanCompileModelWithCorrectProperties) { - OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); + OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties)); } TEST_P(OVCompiledModelPropertiesTests, CanUseCache) { core->set_property(ov::cache_dir("./test_cache")); - OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); - OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); + OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties)); + OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties)); CommonTestUtils::removeDir("./test_cache"); } TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetProperty) { - auto compiled_model = core->compile_model(model, device_name, properties); + auto compiled_model = core->compile_model(model, target_device, properties); auto supported_properties = compiled_model.get_property(ov::supported_properties); for (const auto& property_item : properties) { if (util::contains(supported_properties, property_item.first)) { @@ -73,26 +77,26 @@ TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetP } TEST_P(OVCompiledModelPropertiesIncorrectTests, CanNotCompileModelWithIncorrectProperties) { - ASSERT_THROW(core->compile_model(model, device_name, properties), ov::Exception); + ASSERT_THROW(core->compile_model(model, target_device, properties), ov::Exception); } TEST_P(OVCompiledModelPropertiesDefaultTests, CanCompileWithDefaultValueFromPlugin) { std::vector supported_properties; - OV_ASSERT_NO_THROW(supported_properties = core->get_property(device_name, ov::supported_properties)); + OV_ASSERT_NO_THROW(supported_properties = core->get_property(target_device, ov::supported_properties)); AnyMap default_rw_properties; for (auto& supported_property : supported_properties) { if (supported_property.is_mutable()) { Any property; - OV_ASSERT_NO_THROW(property = core->get_property(device_name, supported_property)); + OV_ASSERT_NO_THROW(property = core->get_property(target_device, supported_property)); default_rw_properties.emplace(supported_property, property); std::cout << supported_property << ":" << property.as() << std::endl; } } - OV_ASSERT_NO_THROW(core->compile_model(model, device_name, default_rw_properties)); + OV_ASSERT_NO_THROW(core->compile_model(model, target_device, default_rw_properties)); } TEST_P(OVCompiledModelPropertiesDefaultTests, CheckDefaultValues) { - auto compiled_model = core->compile_model(model, device_name); + auto compiled_model = core->compile_model(model, target_device); std::vector supported_properties; OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); std::cout << "SUPPORTED PROPERTIES: " << std::endl; diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/batched_tensors.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/batched_tensors.cpp index 6d7af6a653c..e9b7c4b9aa0 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/batched_tensors.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/batched_tensors.cpp @@ -14,7 +14,7 @@ namespace test { namespace behavior { std::string OVInferRequestBatchedTests::getTestCaseName(const testing::TestParamInfo& obj) { - return "targetDevice=" + obj.param; + return "target_device=" + obj.param; } std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string& test_name) { @@ -29,8 +29,9 @@ std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string& } void OVInferRequestBatchedTests::SetUp() { + target_device = GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - targetDevice = GetParam(); + APIBaseTest::SetUp(); m_cache_dir = generateCacheDirName(GetTestName()); } @@ -42,6 +43,7 @@ void OVInferRequestBatchedTests::TearDown() { CommonTestUtils::removeFilesWithExt(m_cache_dir, "blob"); CommonTestUtils::removeDir(m_cache_dir); } + APIBaseTest::TearDown(); } std::shared_ptr OVInferRequestBatchedTests::create_n_inputs(size_t n, element::Type type, @@ -74,7 +76,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase) { auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks std::vector buffer(one_shape_size * batch * 2, 0); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -108,7 +110,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsAsync) { auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks std::vector buffer(one_shape_size * batch * 2, 0); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -143,7 +145,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_override_with_set) { auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); std::vector buffer(one_shape_size * batch, 4); std::vector buffer2(one_shape_size * batch, 5); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -179,8 +181,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase_Caching) { auto one_shape_size = ov::shape_size(one_shape); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "N..."); ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}}); - auto execNet_no_cache = ie->compile_model(model, targetDevice); - auto execNet_cache = ie->compile_model(model, targetDevice); + auto execNet_no_cache = ie->compile_model(model, target_device); + auto execNet_cache = ie->compile_model(model, target_device); // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks std::vector buffer(one_shape_size * batch * 2, 0); @@ -219,7 +221,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Multiple_Infer) { auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks std::vector buffer(one_shape_size * batch * 2, 0); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -256,7 +258,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Can_Infer_Dynamic) { auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, PartialShape({-1, 2, 2, 2}), "N..."); // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks std::vector buffer(one_shape_size * batch * 2, 0); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -292,7 +294,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Batch1) { auto one_shape = Shape{1, 3, 10, 10}; auto one_shape_size = ov::shape_size(one_shape); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, one_shape, "N..."); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); @@ -323,7 +325,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Get_Tensor_Not_Allowed) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -337,7 +339,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_No_Batch) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "DCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -350,7 +352,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_Name) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "undefined"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -363,7 +365,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_No_Name) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "undefined"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -376,7 +378,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Friendly_Name) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -388,7 +390,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_index) { auto one_shape = Shape{1, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -400,7 +402,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_no_name_multiple_inputs) { auto one_shape = Shape{1, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch, ov::Tensor(element::f32, one_shape)); @@ -413,7 +415,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_count) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch + 1, ov::Tensor(element::f32, one_shape)); @@ -425,7 +427,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Empty_Array) { auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors; @@ -436,7 +438,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_diff_batches) { auto batch_shape = Shape{3, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors; @@ -451,7 +453,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Correct_all) { auto batch_shape = Shape{2, 3, 3, 3}; std::vector buffer(ov::shape_size(batch_shape), 1); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors; @@ -468,8 +470,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Cache_CheckDeepCopy) { std::vector buffer_out(ov::shape_size(batch_shape), 1); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}}); - auto execNet_no_cache = ie->compile_model(model, targetDevice); - auto execNet = ie->compile_model(model, targetDevice); + auto execNet_no_cache = ie->compile_model(model, target_device); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); model->input(0).set_names({"updated_input0"}); // Change param name of original model @@ -490,7 +492,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_element_type auto batch_shape = Shape{batch, 3, 3, 3}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch - 1, ov::Tensor(element::f32, one_shape)); @@ -504,7 +506,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_shape) { auto batch_shape = Shape{batch, 4, 4, 4}; auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); const std::string tensor_name = "tensor_input0"; - auto execNet = ie->compile_model(model, targetDevice); + auto execNet = ie->compile_model(model, target_device); ov::InferRequest req; req = execNet.create_infer_request(); std::vector tensors(batch - 1, ov::Tensor(element::f32, one_shape)); diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/callback.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/callback.cpp deleted file mode 100644 index acddd1365ab..00000000000 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/callback.cpp +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include "shared_test_classes/subgraph/basic_lstm.hpp" -#include "behavior/ov_infer_request/callback.hpp" - -namespace ov { -namespace test { -namespace behavior { - -std::string OVInferRequestCallbackTests::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - -TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - bool is_called = false; - OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) { - // HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE - ASSERT_EQ(exception_ptr, nullptr); - is_called = true; - })); - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.wait()); - ASSERT_TRUE(is_called); -} - -TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - bool is_called = false; - req.set_callback([&] (std::exception_ptr exception_ptr) { - ASSERT_EQ(nullptr, exception_ptr); - is_called = true; - }); - req.infer(); - ASSERT_FALSE(is_called); -} - -// test that can wait all callbacks on dtor -TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) { - const int NUM_ITER = 10; - struct TestUserData { - std::atomic numIter = {0}; - std::promise promise; - }; - TestUserData data; - - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) { - if (exception_ptr) { - data.promise.set_exception(exception_ptr); - } else { - if (data.numIter.fetch_add(1) != NUM_ITER) { - req.start_async(); - } else { - data.promise.set_value(true); - } - } - })); - auto future = data.promise.get_future(); - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.wait()); - future.wait(); - auto callbackStatus = future.get(); - ASSERT_TRUE(callbackStatus); - auto dataNumIter = data.numIter - 1; - ASSERT_EQ(NUM_ITER, dataNumIter); -} - -TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) { - OPENVINO_UNREACHABLE("Throw"); - })); - OV_ASSERT_NO_THROW(req.start_async()); - ASSERT_THROW(req.wait(), ov::Exception); -} - -TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) { - // GetNetwork(3000, 380) make inference around 20ms on GNA SW - // so increases chances for getting RESULT_NOT_READY - OV_ASSERT_NO_THROW(execNet = core->compile_model( - SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), targetDevice, configuration)); - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - std::promise callbackTimeStamp; - auto callbackTimeStampFuture = callbackTimeStamp.get_future(); - // add a callback to the request and capture the timestamp - OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) { - if (exception_ptr) { - callbackTimeStamp.set_exception(exception_ptr); - } else { - callbackTimeStamp.set_value(std::chrono::system_clock::now()); - } - })); - OV_ASSERT_NO_THROW(req.start_async()); - bool ready = false; - OV_ASSERT_NO_THROW(ready = req.wait_for({})); - // get timestamp taken AFTER return from the wait(STATUS_ONLY) - const auto afterWaitTimeStamp = std::chrono::system_clock::now(); - // IF the callback timestamp is larger than the afterWaitTimeStamp - // then we should observe false ready result - if (afterWaitTimeStamp < callbackTimeStampFuture.get()) { - ASSERT_FALSE(ready); - } - OV_ASSERT_NO_THROW(req.wait()); -} - -TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - { - auto somePtr = std::make_shared(42); - OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) { - ASSERT_EQ(nullptr, exception_ptr); - ASSERT_EQ(1, somePtr.use_count()); - })); - } - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.wait()); -} - -} // namespace behavior -} // namespace test -} // namespace ov \ No newline at end of file diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/cancellation.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/cancellation.cpp deleted file mode 100644 index da29df8558a..00000000000 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/cancellation.cpp +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include "behavior/ov_infer_request/cancellation.hpp" -#include "openvino/runtime/exception.hpp" - -namespace ov { -namespace test { -namespace behavior { - -std::string OVInferRequestCancellationTests::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - -TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.cancel()); - try { - req.wait(); - } catch (const ov::Cancelled&) { - SUCCEED(); - } -} - -TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.cancel()); - try { - req.wait(); - } catch (const ov::Cancelled&) { - SUCCEED(); - } - OV_ASSERT_NO_THROW(req.start_async()); - OV_ASSERT_NO_THROW(req.wait()); -} - -TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req.cancel()); -} - -TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) { - ov::InferRequest req; - OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); - auto infer = std::async(std::launch::async, [&req]{req.infer();}); - while (!req.wait_for({})) { - } - OV_ASSERT_NO_THROW(req.cancel()); - try { - infer.get(); - } catch (const ov::Cancelled&) { - SUCCEED(); - } -} - -} // namespace behavior -} // namespace test -} // namespace ov diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/infer_request_dynamic.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/infer_request_dynamic.cpp index 36264777ea1..69d35450089 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/infer_request_dynamic.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/infer_request_dynamic.cpp @@ -37,9 +37,10 @@ namespace behavior { std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo obj) { std::shared_ptr func; std::vector, std::vector>> inOutShapes; - std::string targetDevice; + std::string target_device; ov::AnyMap configuration; - std::tie(func, inOutShapes, targetDevice, configuration) = obj.param; + std::tie(func, inOutShapes, target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; result << "function=" << func->get_friendly_name() << "_"; result << "inOutShape=("; @@ -47,7 +48,7 @@ std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfoGetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(function, inOutShapes, targetDevice, configuration) = this->GetParam(); + APIBaseTest::SetUp(); } bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual) { @@ -81,13 +83,6 @@ bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, cons return result; } -void OVInferRequestDynamicTests::TearDown() { - if (!configuration.empty()) { - PluginCache::get().reset(); - } - function.reset(); -} - /* We have to check that we don't get a segmentation fault during inference if we set the first two times to the same shape and @@ -106,7 +101,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetwork) { }; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; const std::string outputname = function->outputs().back().get_any_name(); @@ -127,7 +122,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetUnexpectedOutputTensorB shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::runtime::Tensor tensor, otensor; @@ -152,7 +147,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputTensorPreAllocate shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::runtime::Tensor tensor; @@ -177,7 +172,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputShapeBeforeInfer) shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::runtime::Tensor tensor, otensor; @@ -199,7 +194,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithoutSetShape) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -213,7 +208,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkBoundWithoutSetShape) { shapes[tensor_name] = {ov::Dimension(0, 5), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -230,7 +225,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor, otensor; @@ -260,7 +255,7 @@ TEST_P(OVInferRequestDynamicTests, InferUpperBoundNetworkWithGetTensor) { shapes[tensor_name] = {ov::Dimension(0, 19), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor, otensor; @@ -288,7 +283,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithGetTensor) { shapes[tensor_name] = ov::PartialShape::dynamic(); OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor, otensor; @@ -317,7 +312,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorLower shapes[tensor_name] = {ov::Dimension(2, 3), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -336,7 +331,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorUpper shapes[tensor_name] = {ov::Dimension(1, 2), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -357,7 +352,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor2times) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -392,7 +387,7 @@ TEST_P(OVInferRequestDynamicTests, GetSameTensor2times) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor; @@ -412,7 +407,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor(ov::element::f32, refShape); @@ -436,7 +431,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithSetTensor) { shapes[tensor_name] = ov::PartialShape::dynamic(); OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor(ov::element::f32, refShape), otensor; @@ -469,7 +464,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor2times) { OV_ASSERT_NO_THROW(function->reshape(shapes)); const std::string outputName = function->outputs().back().get_any_name(); // Load ov::Model to target plugins - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; ov::Tensor tensor(ov::element::f32, refShape); @@ -504,7 +499,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithLocalCore) { shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins - compiled_model = local_core.compile_model(function, targetDevice, configuration); + compiled_model = local_core.compile_model(function, target_device, configuration); } // Create InferRequest OV_ASSERT_NO_THROW(compiled_model.create_infer_request()); @@ -522,7 +517,7 @@ TEST_P(OVNotSupportRequestDynamicTests, InferDynamicNotSupported) { const std::string outputName = function->outputs().back().get_any_name(); // Load ov::Function to target plugins ov::CompiledModel execNet; - ASSERT_THROW((execNet = ie->compile_model(function, targetDevice, configuration)), ov::Exception); + ASSERT_THROW((execNet = ie->compile_model(function, target_device, configuration)), ov::Exception); } } // namespace behavior } // namespace test diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference.cpp index 4996eafad29..71c4629bbb7 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference.cpp @@ -17,12 +17,10 @@ std::string OVInferRequestInferenceTests::getTestCaseName( } void OVInferRequestInferenceTests::SetUp() { - SKIP_IF_CURRENT_TEST_IS_DISABLED() m_param = std::get<0>(GetParam()); - m_device_name = std::get<1>(GetParam()); -} - -void OVInferRequestInferenceTests::TearDown() { + target_device = std::get<1>(GetParam()); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); } std::shared_ptr OVInferRequestInferenceTests::create_n_inputs(size_t n, @@ -50,7 +48,7 @@ std::shared_ptr OVInferRequestInferenceTests::create_n_inputs(size_t n, TEST_P(OVInferRequestInferenceTests, Inference_ROI_Tensor) { auto shape_size = ov::shape_size(m_param.m_shape); auto model = OVInferRequestInferenceTests::create_n_inputs(1, element::f32, m_param.m_shape); - auto execNet = ie->compile_model(model, m_device_name); + auto execNet = ie->compile_model(model, target_device); // Create InferRequest ov::InferRequest req; req = execNet.create_infer_request(); diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference_chaining.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference_chaining.cpp index 955ee1b10c6..4fb80bd6a4e 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference_chaining.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference_chaining.cpp @@ -83,9 +83,9 @@ std::shared_ptr OVInferenceChaining::getThirdStaticFunction(const ov: void OVInferenceChaining::Run() { ov::CompiledModel execNet0, execNet1, execNet2; - OV_ASSERT_NO_THROW(execNet0 = core->compile_model(function0, targetDevice, configuration)); - OV_ASSERT_NO_THROW(execNet1 = core->compile_model(function1, targetDevice, configuration)); - OV_ASSERT_NO_THROW(execNet2 = core->compile_model(function2, targetDevice, configuration)); + OV_ASSERT_NO_THROW(execNet0 = core->compile_model(function0, target_device, configuration)); + OV_ASSERT_NO_THROW(execNet1 = core->compile_model(function1, target_device, configuration)); + OV_ASSERT_NO_THROW(execNet2 = core->compile_model(function2, target_device, configuration)); ov::InferRequest r0, r1, r2; OV_ASSERT_NO_THROW(r0 = execNet0.create_infer_request()); diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/io_tensor.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/io_tensor.cpp index 519ce6c2de0..325e8dd4369 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/io_tensor.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/io_tensor.cpp @@ -16,10 +16,6 @@ namespace ov { namespace test { namespace behavior { -std::string OVInferRequestIOTensorTest::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - void OVInferRequestIOTensorTest::SetUp() { // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() @@ -187,7 +183,7 @@ TEST_P(OVInferRequestIOTensorTest, InferStaticNetworkSetInputTensor) { OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins std::shared_ptr ie = utils::PluginCache::get().core(); - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); @@ -208,7 +204,7 @@ TEST_P(OVInferRequestIOTensorTest, InferStaticNetworkSetOutputTensor) { OV_ASSERT_NO_THROW(function->reshape(shapes)); // Load ov::Model to target plugins std::shared_ptr ie = utils::PluginCache::get().core(); - auto execNet = ie->compile_model(function, targetDevice, configuration); + auto execNet = ie->compile_model(function, target_device, configuration); // Create InferRequest ov::InferRequest req; OV_ASSERT_NO_THROW(req = execNet.create_infer_request()); @@ -223,12 +219,13 @@ TEST_P(OVInferRequestIOTensorTest, InferStaticNetworkSetOutputTensor) { std::string OVInferRequestIOTensorSetPrecisionTest::getTestCaseName(const testing::TestParamInfo& obj) { element::Type type; - std::string targetDevice; + std::string target_device; ov::AnyMap configuration; - std::tie(type, targetDevice, configuration) = obj.param; + std::tie(type, target_device, configuration) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; result << "type=" << type << "_"; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { using namespace CommonTestUtils; for (auto &configItem : configuration) { @@ -241,8 +238,9 @@ std::string OVInferRequestIOTensorSetPrecisionTest::getTestCaseName(const testin } void OVInferRequestIOTensorSetPrecisionTest::SetUp() { - SKIP_IF_CURRENT_TEST_IS_DISABLED() std::tie(element_type, target_device, config) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); execNet = core->compile_model(function, target_device, config); req = execNet.create_infer_request(); @@ -251,6 +249,7 @@ void OVInferRequestIOTensorSetPrecisionTest::SetUp() { void OVInferRequestIOTensorSetPrecisionTest::TearDown() { execNet = {}; req = {}; + APIBaseTest::TearDown(); } TEST_P(OVInferRequestIOTensorSetPrecisionTest, CanSetInBlobWithDifferentPrecision) { @@ -277,12 +276,12 @@ TEST_P(OVInferRequestIOTensorSetPrecisionTest, CanSetOutBlobWithDifferentPrecisi std::string OVInferRequestCheckTensorPrecision::getTestCaseName(const testing::TestParamInfo& obj) { element::Type type; - std::string targetDevice; + std::string target_device; AnyMap configuration; - std::tie(type, targetDevice, configuration) = obj.param; + std::tie(type, target_device, configuration) = obj.param; std::ostringstream result; result << "type=" << type << "_"; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!configuration.empty()) { using namespace CommonTestUtils; for (auto &configItem : configuration) { @@ -295,8 +294,9 @@ std::string OVInferRequestCheckTensorPrecision::getTestCaseName(const testing::T } void OVInferRequestCheckTensorPrecision::SetUp() { - SKIP_IF_CURRENT_TEST_IS_DISABLED() std::tie(element_type, target_device, config) = this->GetParam(); + SKIP_IF_CURRENT_TEST_IS_DISABLED() + APIBaseTest::SetUp(); { auto parameter1 = std::make_shared(element_type, ov::PartialShape{1, 3, 2, 2}); auto parameter2 = std::make_shared(element_type, ov::PartialShape{1, 3, 2, 2}); @@ -311,6 +311,7 @@ void OVInferRequestCheckTensorPrecision::SetUp() { void OVInferRequestCheckTensorPrecision::TearDown() { compModel = {}; req = {}; + APIBaseTest::TearDown(); } void OVInferRequestCheckTensorPrecision::Run() { diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/multithreading.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/multithreading.cpp deleted file mode 100644 index 61c5d95ce84..00000000000 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/multithreading.cpp +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include "behavior/ov_infer_request/multithreading.hpp" - -namespace ov { -namespace test { -namespace behavior { - -std::string OVInferRequestMultithreadingTests::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - -TEST_P(OVInferRequestMultithreadingTests, canRun3SyncRequestsConsistentlyFromThreads) { - ov::InferRequest req1, req2, req3; - OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); - - auto f1 = std::async(std::launch::async, [&] { req1.infer(); }); - auto f2 = std::async(std::launch::async, [&] { req2.infer(); }); - auto f3 = std::async(std::launch::async, [&] { req3.infer(); }); - - f1.wait(); - f2.wait(); - f3.wait(); - - OV_ASSERT_NO_THROW(f1.get()); - OV_ASSERT_NO_THROW(f2.get()); - OV_ASSERT_NO_THROW(f3.get()); -} - -TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyFromThreadsWithoutWait) { - ov::InferRequest req1, req2, req3; - OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); - - OV_ASSERT_NO_THROW(req1.infer()); - OV_ASSERT_NO_THROW(req2.infer()); - OV_ASSERT_NO_THROW(req3.infer()); - - auto f1 = std::async(std::launch::async, [&] { req1.start_async(); }); - auto f2 = std::async(std::launch::async, [&] { req2.start_async(); }); - auto f3 = std::async(std::launch::async, [&] { req3.start_async(); }); - - f1.wait(); - f2.wait(); - f3.wait(); - - OV_ASSERT_NO_THROW(f1.get()); - OV_ASSERT_NO_THROW(f2.get()); - OV_ASSERT_NO_THROW(f3.get()); -} - -TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyWithWait) { - ov::InferRequest req1, req2, req3; - OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); - - req1.start_async(); - OV_ASSERT_NO_THROW(req1.wait()); - - req2.start_async(); - OV_ASSERT_NO_THROW(req2.wait()); - - req3.start_async(); - OV_ASSERT_NO_THROW(req3.wait()); -} - -TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsParallelWithWait) { - ov::InferRequest req1, req2, req3; - OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request()); - OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request()); - - req1.start_async(); - req2.start_async(); - req3.start_async(); - - OV_ASSERT_NO_THROW(req2.wait()); - OV_ASSERT_NO_THROW(req1.wait()); - OV_ASSERT_NO_THROW(req3.wait()); -} - -} // namespace behavior -} // namespace test -} // namespace ov diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/perf_counters.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/perf_counters.cpp index 1edca5eceeb..46029b8ae49 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/perf_counters.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/perf_counters.cpp @@ -9,17 +9,13 @@ namespace ov { namespace test { namespace behavior { - -std::string OVInferRequestPerfCountersTest::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - void OVInferRequestPerfCountersTest::SetUp() { + std::tie(target_device, configuration) = this->GetParam(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(targetDevice, configuration) = this->GetParam(); - function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); + APIBaseTest::SetUp(); + function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device); configuration.insert(ov::enable_profiling(true)); - execNet = core->compile_model(function, targetDevice, configuration); + execNet = core->compile_model(function, target_device, configuration); req = execNet.create_infer_request(); } diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/wait.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/wait.cpp index b41cb539d18..9b21e1b87e6 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/wait.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_infer_request/wait.cpp @@ -8,15 +8,10 @@ namespace ov { namespace test { namespace behavior { - -std::string OVInferRequestWaitTests::getTestCaseName(const testing::TestParamInfo& obj) { - return OVInferRequestTests::getTestCaseName(obj); -} - void OVInferRequestWaitTests::SetUp() { + OVInferRequestTests::SetUp(); // Skip test according to plugin specific disabledTestPatterns() (if any) SKIP_IF_CURRENT_TEST_IS_DISABLED() - OVInferRequestTests::SetUp(); req = execNet.create_infer_request(); input = execNet.input(); output = execNet.output(); diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/caching_tests.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/caching_tests.cpp index d7096d9338d..682aa58cb54 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/caching_tests.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/caching_tests.cpp @@ -9,6 +9,7 @@ #include "common_test_utils/file_utils.hpp" #include "functional_test_utils/skip_tests_config.hpp" +#include "functional_test_utils/summary/api_summary.hpp" #include "ngraph_functions/builders.hpp" #include "ngraph_functions/subgraph_builders.hpp" @@ -133,12 +134,15 @@ std::string CompileModelCacheTestBase::getTestCaseName(testing::TestParamInfo(param); auto batchSize = std::get<2>(param); auto deviceName = std::get<3>(param); + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); return funcName + "_" + ngraph::element::Type(precision).get_type_name() + "_batch" + std::to_string(batchSize) + "_" + deviceName; } void CompileModelCacheTestBase::SetUp() { ovModelWithName funcPair; std::tie(funcPair, m_precision, m_batchSize, targetDevice, configuration) = GetParam(); + target_device = targetDevice; + APIBaseTest::SetUp(); auto fGen = std::get<0>(funcPair); m_functionName = std::get<1>(funcPair); try { @@ -148,7 +152,7 @@ void CompileModelCacheTestBase::SetUp() { } std::stringstream ss; - auto hash = std::hash()(GetTestName()); + auto hash = std::hash()(SubgraphBaseTest::GetTestName()); ss << "testCache_" << std::to_string(hash) << "_" << std::this_thread::get_id() << "_" << GetTimestamp(); for (auto& iter : configuration) { ss << "_" << iter.first << "_" << iter.second.as() << "_"; @@ -161,6 +165,7 @@ void CompileModelCacheTestBase::TearDown() { CommonTestUtils::removeFilesWithExt(m_cacheFolderName, "blob"); std::remove(m_cacheFolderName.c_str()); core->set_property(ov::cache_dir()); + APIBaseTest::TearDown(); } void CompileModelCacheTestBase::run() { @@ -219,6 +224,7 @@ std::string CompiledKernelsCacheTest::getTestCaseName(testing::TestParamInfo userConfig; std::tie(deviceName, userConfig) = obj.param; + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); auto properties = userConfig.first; std::ostringstream result; result << "device_name=" << deviceName << "_"; @@ -229,6 +235,32 @@ std::string CompiledKernelsCacheTest::getTestCaseName(testing::TestParamInfo userConfig; + std::tie(targetDevice, userConfig) = GetParam(); + target_device = targetDevice; + APIBaseTest::SetUp(); + configuration = userConfig.first; + std::string ext = userConfig.second; + std::string::size_type pos = 0; + if ((pos = ext.find(",", pos)) != std::string::npos) { + m_extList.push_back(ext.substr(0, pos)); + m_extList.push_back(ext.substr(pos + 1)); +} else { + m_extList.push_back(ext); +} + std::replace(test_name.begin(), test_name.end(), '/', '_'); + std::replace(test_name.begin(), test_name.end(), '\\', '_'); + cache_path = "compiledModel" + test_name + "_cache"; +} + +void CompiledKernelsCacheTest::TearDown() { + std::remove(cache_path.c_str()); + core->set_property(ov::cache_dir()); + APIBaseTest::TearDown(); +} + TEST_P(CompiledKernelsCacheTest, CanCreateCacheDirAndDumpBinaries) { core->set_property(ov::cache_dir(cache_path)); try { diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/life_time.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/life_time.cpp index e358a0adde5..ad92daa8745 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/life_time.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/life_time.cpp @@ -12,12 +12,15 @@ namespace ov { namespace test { namespace behavior { std::string OVHoldersTest::getTestCaseName(testing::TestParamInfo obj) { - return "targetDevice=" + obj.param; + std::string target_device = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); + return "target_device=" + target_device; } void OVHoldersTest::SetUp() { + target_device = this->GetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - targetDevice = this->GetParam(); deathTestStyle = ::testing::GTEST_FLAG(death_test_style); if (deathTestStyle == "fast") { ::testing::GTEST_FLAG(death_test_style) = "threadsafe"; @@ -27,6 +30,7 @@ void OVHoldersTest::SetUp() { void OVHoldersTest::TearDown() { ::testing::GTEST_FLAG(death_test_style) = deathTestStyle; + APIBaseTest::TearDown(); } #define EXPECT_NO_CRASH(_statement) \ @@ -56,7 +60,7 @@ TEST_P(OVHoldersTest, Orders) { for (auto&& i : order) { order_str << objects.at(i) << " "; } - EXPECT_NO_CRASH(release_order_test(order, targetDevice, function)) << "for order: " << order_str.str(); + EXPECT_NO_CRASH(release_order_test(order, target_device, function)) << "for order: " << order_str.str(); } while (std::next_permutation(order.begin(), order.end())); } @@ -64,7 +68,7 @@ TEST_P(OVHoldersTest, LoadedState) { std::vector states; { ov::Core core = createCoreWithTemplate(); - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); auto request = compiled_model.create_infer_request(); try { states = request.query_state(); @@ -76,7 +80,7 @@ TEST_P(OVHoldersTest, LoadedTensor) { ov::Tensor tensor; { ov::Core core = createCoreWithTemplate(); - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); auto request = compiled_model.create_infer_request(); tensor = request.get_input_tensor(); } @@ -86,7 +90,7 @@ TEST_P(OVHoldersTest, LoadedAny) { ov::Any any; { ov::Core core = createCoreWithTemplate(); - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); any = compiled_model.get_property(ov::supported_properties.name()); } } @@ -97,7 +101,7 @@ TEST_P(OVHoldersTest, LoadedRemoteContext) { ov::RemoteContext ctx; { ov::Core core = createCoreWithTemplate(); - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); try { ctx = compiled_model.get_context(); } catch(...) {} @@ -106,12 +110,15 @@ TEST_P(OVHoldersTest, LoadedRemoteContext) { std::string OVHoldersTestOnImportedNetwork::getTestCaseName(testing::TestParamInfo obj) { - return "targetDevice=" + obj.param; + std::string target_device = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); + return "target_device=" + target_device; } void OVHoldersTestOnImportedNetwork::SetUp() { + target_device = this->GetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - targetDevice = this->GetParam(); deathTestStyle = ::testing::GTEST_FLAG(death_test_style); if (deathTestStyle == "fast") { ::testing::GTEST_FLAG(death_test_style) = "threadsafe"; @@ -121,16 +128,17 @@ void OVHoldersTestOnImportedNetwork::SetUp() { void OVHoldersTestOnImportedNetwork::TearDown() { ::testing::GTEST_FLAG(death_test_style) = deathTestStyle; + APIBaseTest::TearDown(); } TEST_P(OVHoldersTestOnImportedNetwork, LoadedTensor) { ov::Core core = createCoreWithTemplate(); std::stringstream stream; { - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); compiled_model.export_model(stream); } - auto compiled_model = core.import_model(stream, targetDevice); + auto compiled_model = core.import_model(stream, target_device); auto request = compiled_model.create_infer_request(); ov::Tensor tensor = request.get_input_tensor(); } @@ -139,10 +147,10 @@ TEST_P(OVHoldersTestOnImportedNetwork, CreateRequestWithCoreRemoved) { ov::Core core = createCoreWithTemplate(); std::stringstream stream; { - auto compiled_model = core.compile_model(function, targetDevice); + auto compiled_model = core.compile_model(function, target_device); compiled_model.export_model(stream); } - auto compiled_model = core.import_model(stream, targetDevice); + auto compiled_model = core.import_model(stream, target_device); core = ov::Core{}; auto request = compiled_model.create_infer_request(); } diff --git a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/properties_tests.cpp b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/properties_tests.cpp index 24d2fb86ee1..b7203bc9eee 100644 --- a/src/tests/functional/plugin/shared/src/behavior/ov_plugin/properties_tests.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/ov_plugin/properties_tests.cpp @@ -11,21 +11,25 @@ namespace test { namespace behavior { std::string OVEmptyPropertiesTests::getTestCaseName(testing::TestParamInfo obj) { - return "device_name=" + obj.param; + std::string target_device = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); + return "target_device=" + target_device; } void OVEmptyPropertiesTests::SetUp() { + target_device = this->GetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED() - device_name = this->GetParam(); model = ngraph::builder::subgraph::makeConvPoolRelu(); } std::string OVPropertiesTests::getTestCaseName(testing::TestParamInfo obj) { - std::string device_name; + std::string target_device; AnyMap properties; - std::tie(device_name, properties) = obj.param; + std::tie(target_device, properties) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "device_name=" << device_name << "_"; + result << "target_device=" << target_device << "_"; if (!properties.empty()) { result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_"); } @@ -33,8 +37,9 @@ std::string OVPropertiesTests::getTestCaseName(testing::TestParamInfoGetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(device_name, properties) = this->GetParam(); model = ngraph::builder::subgraph::makeConvPoolRelu(); } @@ -42,15 +47,17 @@ void OVPropertiesTests::TearDown() { if (!properties.empty()) { utils::PluginCache::get().reset(); } + APIBaseTest::TearDown(); } std::string OVSetPropComplieModleGetPropTests::getTestCaseName(testing::TestParamInfo obj) { - std::string device_name; + std::string target_device; AnyMap properties; AnyMap compileModelProperties; - std::tie(device_name, properties, compileModelProperties) = obj.param; + std::tie(target_device, properties, compileModelProperties) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "device_name=" << device_name << "_"; + result << "target_device=" << target_device << "_"; if (!properties.empty()) { result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_"); } @@ -62,67 +69,67 @@ std::string OVSetPropComplieModleGetPropTests::getTestCaseName(testing::TestPara void OVSetPropComplieModleGetPropTests::SetUp() { SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(device_name, properties, compileModelProperties) = this->GetParam(); + std::tie(target_device, properties, compileModelProperties) = this->GetParam(); model = ngraph::builder::subgraph::makeConvPoolRelu(); } TEST_P(OVEmptyPropertiesTests, SetEmptyProperties) { - OV_ASSERT_NO_THROW(core->get_property(device_name, ov::supported_properties)); - OV_ASSERT_NO_THROW(core->set_property(device_name, AnyMap{})); + OV_ASSERT_NO_THROW(core->get_property(target_device, ov::supported_properties)); + OV_ASSERT_NO_THROW(core->set_property(target_device, AnyMap{})); } // Setting correct properties doesn't throw TEST_P(OVPropertiesTests, SetCorrectProperties) { - OV_ASSERT_NO_THROW(core->set_property(device_name, properties)); + OV_ASSERT_NO_THROW(core->set_property(target_device, properties)); } TEST_P(OVPropertiesTests, canSetPropertyAndCheckGetProperty) { - core->set_property(device_name, properties); + core->set_property(target_device, properties); for (const auto& property_item : properties) { Any property; - OV_ASSERT_NO_THROW(property = core->get_property(device_name, property_item.first)); + OV_ASSERT_NO_THROW(property = core->get_property(target_device, property_item.first)); ASSERT_FALSE(property.empty()); std::cout << property_item.first << ":" << property.as() << std::endl; } } TEST_P(OVPropertiesIncorrectTests, SetPropertiesWithIncorrectKey) { - ASSERT_THROW(core->set_property(device_name, properties), ov::Exception); + ASSERT_THROW(core->set_property(target_device, properties), ov::Exception); } TEST_P(OVPropertiesIncorrectTests, CanNotCompileModelWithIncorrectProperties) { - ASSERT_THROW(core->compile_model(model, device_name, properties), ov::Exception); + ASSERT_THROW(core->compile_model(model, target_device, properties), ov::Exception); } TEST_P(OVPropertiesDefaultTests, CanSetDefaultValueBackToPlugin) { std::vector supported_properties; - OV_ASSERT_NO_THROW(supported_properties = core->get_property(device_name, ov::supported_properties)); + OV_ASSERT_NO_THROW(supported_properties = core->get_property(target_device, ov::supported_properties)); for (auto& supported_property : supported_properties) { Any property; - OV_ASSERT_NO_THROW(property = core->get_property(device_name, supported_property)); + OV_ASSERT_NO_THROW(property = core->get_property(target_device, supported_property)); if (supported_property.is_mutable()) { - OV_ASSERT_NO_THROW(core->set_property(device_name, {{ supported_property, property}})); + OV_ASSERT_NO_THROW(core->set_property(target_device, {{ supported_property, property}})); } } } TEST_P(OVPropertiesDefaultTests, CheckDefaultValues) { std::vector supported_properties; - OV_ASSERT_NO_THROW(supported_properties = core->get_property(device_name, ov::supported_properties)); + OV_ASSERT_NO_THROW(supported_properties = core->get_property(target_device, ov::supported_properties)); for (auto&& default_property : properties) { auto supported = util::contains(supported_properties, default_property.first); ASSERT_TRUE(supported) << "default_property=" << default_property.first; Any property; - OV_ASSERT_NO_THROW(property = core->get_property(device_name, default_property.first)); + OV_ASSERT_NO_THROW(property = core->get_property(target_device, default_property.first)); ASSERT_EQ(default_property.second, property); } } TEST_P(OVSetPropComplieModleGetPropTests, SetPropertyComplieModelGetProperty) { - OV_ASSERT_NO_THROW(core->set_property(device_name, properties)); + OV_ASSERT_NO_THROW(core->set_property(target_device, properties)); ov::CompiledModel exeNetWork; - OV_ASSERT_NO_THROW(exeNetWork = core->compile_model(model, device_name, compileModelProperties)); + OV_ASSERT_NO_THROW(exeNetWork = core->compile_model(model, target_device, compileModelProperties)); for (const auto& property_item : compileModelProperties) { Any exeNetProperty; @@ -133,7 +140,7 @@ TEST_P(OVSetPropComplieModleGetPropTests, SetPropertyComplieModelGetProperty) { //the value of get property should be the same as set property for (const auto& property_item : properties) { Any property; - OV_ASSERT_NO_THROW(property = core->get_property(device_name, property_item.first)); + OV_ASSERT_NO_THROW(property = core->get_property(target_device, property_item.first)); ASSERT_EQ(property_item.second.as(), property.as()); } } diff --git a/src/tests/functional/plugin/shared/src/behavior/plugin/caching_tests.cpp b/src/tests/functional/plugin/shared/src/behavior/plugin/caching_tests.cpp index 94912565efc..2437a25789c 100644 --- a/src/tests/functional/plugin/shared/src/behavior/plugin/caching_tests.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/plugin/caching_tests.cpp @@ -131,12 +131,15 @@ std::string LoadNetworkCacheTestBase::getTestCaseName(testing::TestParamInfo(param); auto batchSize = std::get<2>(param); auto deviceName = std::get<3>(param); + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); return funcName + "_" + ngraph::element::Type(precision).get_type_name() + "_batch" + std::to_string(batchSize) + "_" + deviceName; } void LoadNetworkCacheTestBase::SetUp() { nGraphFunctionWithName funcPair; std::tie(funcPair, m_precision, m_batchSize, targetDevice) = GetParam(); + target_device = targetDevice; + APIBaseTest::SetUp(); auto fGen = std::get<0>(funcPair); m_functionName = std::get<1>(funcPair); try { @@ -146,7 +149,7 @@ void LoadNetworkCacheTestBase::SetUp() { } std::stringstream ss; - auto hash = std::hash()(GetTestName()); + auto hash = std::hash()(LayerTestsUtils::LayerTestsCommon::GetTestName()); ss << "testCache_" << std::to_string(hash) << "_" << std::this_thread::get_id() << "_" << GetTimestamp(); for (auto& iter : configuration) { ss << "_" << iter.first << "_" << iter.second << "_"; @@ -159,6 +162,7 @@ void LoadNetworkCacheTestBase::TearDown() { CommonTestUtils::removeFilesWithExt(m_cacheFolderName, "blob"); std::remove(m_cacheFolderName.c_str()); core->SetConfig({{CONFIG_KEY(CACHE_DIR), {}}}); + APIBaseTest::TearDown(); } void LoadNetworkCacheTestBase::Run() { @@ -221,6 +225,7 @@ std::string LoadNetworkCompiledKernelsCacheTest::getTestCaseName(testing::TestPa std::string deviceName; std::pair, std::string> userConfig; std::tie(deviceName, userConfig) = obj.param; + std::replace(deviceName.begin(), deviceName.end(), ':', '.'); std::map confstr = userConfig.first; std::ostringstream result; result << "device_name=" << deviceName << "_"; diff --git a/src/tests/functional/plugin/shared/src/behavior/plugin/configuration_tests.cpp b/src/tests/functional/plugin/shared/src/behavior/plugin/configuration_tests.cpp index c2ff8c4c1a3..6424bba29fd 100644 --- a/src/tests/functional/plugin/shared/src/behavior/plugin/configuration_tests.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/plugin/configuration_tests.cpp @@ -11,6 +11,7 @@ std::string DefaultConfigurationTest::getTestCaseName(const ::testing::TestParam std::string targetName; DefaultParameter defaultParameter; std::tie(targetName, defaultParameter) = obj.param; + std::replace(targetName.begin(), targetName.end(), ':', '.'); std::ostringstream result; result << "configKey=" << defaultParameter._key << "_"; result << "targetDevice=" << targetName; @@ -18,49 +19,49 @@ std::string DefaultConfigurationTest::getTestCaseName(const ::testing::TestParam } TEST_P(DefaultConfigurationTest, checkDeviceDefaultConfigurationValue) { - targetDevice = std::get(GetParam()); + target_device = std::get(GetParam()); std::string key; InferenceEngine::Parameter parameter; CustomComparator customComparator; defaultParameter = std::get(GetParam()); if (defaultParameter._comparator) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key); + auto expected = _core->GetConfig(target_device, defaultParameter._key); auto &actual = parameter; ASSERT_TRUE(defaultParameter._comparator(expected, actual)) << "For Key: " << defaultParameter._key; } else if (defaultParameter._parameter.is()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as(); auto actual = defaultParameter._parameter.as(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as(); auto actual = defaultParameter._parameter.as(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as(); auto actual = defaultParameter._parameter.as(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as(); auto actual = defaultParameter._parameter.as(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as(); auto actual = defaultParameter._parameter.as(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is>()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as>(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as>(); auto actual = defaultParameter._parameter.as>(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is>()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as>(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as>(); auto actual = defaultParameter._parameter.as>(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is>()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as>(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as>(); auto actual = defaultParameter._parameter.as>(); ASSERT_EQ(expected, actual); } else if (defaultParameter._parameter.is>()) { - auto expected = _core->GetConfig(targetDevice, defaultParameter._key).as>(); + auto expected = _core->GetConfig(target_device, defaultParameter._key).as>(); auto actual = defaultParameter._parameter.as>(); ASSERT_EQ(expected, actual); } else { @@ -73,129 +74,129 @@ TEST_P(DefaultConfigurationTest, checkDeviceDefaultConfigurationValue) { // Setting empty config doesn't throw TEST_P(EmptyConfigTests, SetEmptyConfig) { std::map config; - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_NO_THROW(ie->SetConfig(config, targetDevice)); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(ie->SetConfig(config, target_device)); } TEST_P(EmptyConfigTests, CanLoadNetworkWithEmptyConfig) { std::map config; - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, config)); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, config)); } TEST_P(CorrectSingleOptionDefaultValueConfigTests, CheckDefaultValueOfConfig) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_EQ(ie->GetConfig(targetDevice, key), value); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_EQ(ie->GetConfig(target_device, key), value); } // Setting correct config doesn't throw TEST_P(CorrectConfigTests, SetCorrectConfig) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_NO_THROW(ie->SetConfig(configuration, targetDevice)); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(ie->SetConfig(configuration, target_device)); } TEST_P(CorrectConfigTests, CanLoadNetworkWithCorrectConfig) { - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(CorrectConfigTests, CanUseCache) { // Create CNNNetwork from ngrpah::Function InferenceEngine::CNNNetwork cnnNet(function); ie->SetConfig({ { CONFIG_KEY(CACHE_DIR), "./test_cache" } }); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); CommonTestUtils::removeDir("./test_cache"); } TEST_P(CorrectConfigCheck, canSetConfigAndCheckGetConfig) { - ie->SetConfig(configuration, targetDevice); + ie->SetConfig(configuration, target_device); for (const auto& configItem : configuration) { InferenceEngine::Parameter param; - ASSERT_NO_THROW(param = ie->GetConfig(targetDevice, configItem.first)); + ASSERT_NO_THROW(param = ie->GetConfig(target_device, configItem.first)); ASSERT_FALSE(param.empty()); ASSERT_EQ(param, InferenceEngine::Parameter(configItem.second)); } } TEST_P(CorrectConfigCheck, canSetConfigTwiceAndCheckGetConfig) { - ie->SetConfig({}, targetDevice); - ie->SetConfig(configuration, targetDevice); + ie->SetConfig({}, target_device); + ie->SetConfig(configuration, target_device); for (const auto& configItem : configuration) { InferenceEngine::Parameter param; - ASSERT_NO_THROW(param = ie->GetConfig(targetDevice, configItem.first)); + ASSERT_NO_THROW(param = ie->GetConfig(target_device, configItem.first)); ASSERT_FALSE(param.empty()); ASSERT_EQ(param, InferenceEngine::Parameter(configItem.second)); } } TEST_P(CorrectSingleOptionCustomValueConfigTests, CheckCustomValueOfConfig) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::map configuration = {{key, value}}; - ASSERT_NO_THROW(ie->SetConfig(configuration, targetDevice)); - ASSERT_EQ(ie->GetConfig(targetDevice, key), reference); + ASSERT_NO_THROW(ie->SetConfig(configuration, target_device)); + ASSERT_EQ(ie->GetConfig(target_device, key), reference); } TEST_P(CorrectConfigPublicOptionsTests, CanSeePublicOption) { InferenceEngine::Parameter metric; - ASSERT_NO_THROW(metric = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(metric = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); const auto& supportedOptions = metric.as>(); ASSERT_NE(std::find(supportedOptions.cbegin(), supportedOptions.cend(), key), supportedOptions.cend()); } TEST_P(CorrectConfigPrivateOptionsTests, CanNotSeePrivateOption) { InferenceEngine::Parameter metric; - ASSERT_NO_THROW(metric = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(metric = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); const auto& supportedOptions = metric.as>(); ASSERT_EQ(std::find(supportedOptions.cbegin(), supportedOptions.cend(), key), supportedOptions.cend()); } TEST_P(IncorrectConfigTests, SetConfigWithIncorrectKey) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_THROW(ie->SetConfig(configuration, targetDevice), InferenceEngine::Exception); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_THROW(ie->SetConfig(configuration, target_device), InferenceEngine::Exception); } TEST_P(IncorrectConfigTests, CanNotLoadNetworkWithIncorrectConfig) { - ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration), + ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration), InferenceEngine::Exception); } TEST_P(IncorrectConfigSingleOptionTests, CanNotGetConfigWithIncorrectConfig) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_THROW(ie->GetConfig(targetDevice, key), InferenceEngine::Exception); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_THROW(ie->GetConfig(target_device, key), InferenceEngine::Exception); } TEST_P(IncorrectConfigAPITests, SetConfigWithNoExistingKey) { - ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); - ASSERT_THROW(ie->SetConfig(configuration, targetDevice), InferenceEngine::Exception); + ASSERT_NO_THROW(ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_THROW(ie->SetConfig(configuration, target_device), InferenceEngine::Exception); } TEST_P(DefaultValuesConfigTests, CanSetDefaultValueBackToPlugin) { InferenceEngine::Parameter metric; - ASSERT_NO_THROW(metric = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); + ASSERT_NO_THROW(metric = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); std::vector keys = metric; for (auto& key : keys) { InferenceEngine::Parameter configValue; - ASSERT_NO_THROW(configValue = ie->GetConfig(targetDevice, key)); + ASSERT_NO_THROW(configValue = ie->GetConfig(target_device, key)); - ASSERT_NO_THROW(ie->SetConfig({{ key, configValue.as()}}, targetDevice)) - << "device=" << targetDevice << " " + ASSERT_NO_THROW(ie->SetConfig({{ key, configValue.as()}}, target_device)) + << "device=" << target_device << " " << "config key=" << key << " " << "value=" << configValue.as(); } } TEST_P(ExclusiveAsyncReqTests, excluAsyncReqTests) { - ASSERT_NO_THROW(ie->SetConfig(configuration, targetDevice)); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + ASSERT_NO_THROW(ie->SetConfig(configuration, target_device)); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration)); } TEST_P(SetPropLoadNetWorkGetPropTests, SetPropLoadNetWorkGetProperty) { - ASSERT_NO_THROW(ie->SetConfig(configuration, targetDevice)); + ASSERT_NO_THROW(ie->SetConfig(configuration, target_device)); InferenceEngine::ExecutableNetwork exeNetWork; - ASSERT_NO_THROW(exeNetWork = ie->LoadNetwork(cnnNet, targetDevice, loadNetWorkConfig)); + ASSERT_NO_THROW(exeNetWork = ie->LoadNetwork(cnnNet, target_device, loadNetWorkConfig)); for (const auto& property_item : loadNetWorkConfig) { InferenceEngine::Parameter exeNetProperty; ASSERT_NO_THROW(exeNetProperty = exeNetWork.GetConfig(property_item.first)); @@ -205,7 +206,7 @@ TEST_P(SetPropLoadNetWorkGetPropTests, SetPropLoadNetWorkGetProperty) { // the value of GetConfig should be the same as SetConfig for (const auto& property_item : configuration) { InferenceEngine::Parameter property; - ASSERT_NO_THROW(property = ie->GetConfig(targetDevice, property_item.first)); + ASSERT_NO_THROW(property = ie->GetConfig(target_device, property_item.first)); ASSERT_EQ(property_item.second, property.as()); } } diff --git a/src/tests/functional/plugin/shared/src/behavior/plugin/life_time.cpp b/src/tests/functional/plugin/shared/src/behavior/plugin/life_time.cpp index 748e7b7fabc..5f81dd865a5 100644 --- a/src/tests/functional/plugin/shared/src/behavior/plugin/life_time.cpp +++ b/src/tests/functional/plugin/shared/src/behavior/plugin/life_time.cpp @@ -12,13 +12,14 @@ namespace BehaviorTestsDefinitions { std::string HoldersTest::getTestCaseName(testing::TestParamInfo obj) { - std::string targetDevice; + std::string target_device; std::vector order; - std::tie(targetDevice, order) = obj.param; + std::tie(target_device, order) = obj.param; + std::replace(target_device.begin(), target_device.end(), ':', '.'); std::ostringstream result; - result << "targetDevice=" << targetDevice << "_"; + result << "target_device=" << target_device << "_"; if (!order.empty()) { - std::string objects[] = { "core", "exec-net", "request", "state" }; + std::string objects[] = { "core", "exec.net", "request", "state" }; for (auto &Item : order) { result << objects[Item] << "_"; } @@ -27,16 +28,17 @@ namespace BehaviorTestsDefinitions { } void HoldersTest::SetUp() { + std::tie(target_device, order) = this->GetParam(); + APIBaseTest::SetUp(); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - std::tie(targetDevice, order) = this->GetParam(); function = ngraph::builder::subgraph::makeConvPoolRelu(); } - void release_order_test(std::vector order, const std::string &deviceName, + void release_order_test(std::vector order, const std::string &target_device, std::shared_ptr function) { InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::Core core = BehaviorTestsUtils::createIECoreWithTemplate(); - auto exe_net = core.LoadNetwork(cnnNet, deviceName); + auto exe_net = core.LoadNetwork(cnnNet, target_device); auto request = exe_net.CreateInferRequest(); std::vector states; try { @@ -78,7 +80,7 @@ namespace BehaviorTestsDefinitions { #else if (sigsetjmp(CommonTestUtils::env, 1) == CommonTestUtils::JMP_STATUS::ok) { #endif - EXPECT_NO_THROW(release_order_test(order, targetDevice, function)); + EXPECT_NO_THROW(release_order_test(order, target_device, function)); } else { IE_THROW() << "Crash happens"; } @@ -94,20 +96,21 @@ namespace BehaviorTestsDefinitions { #else if (sigsetjmp(CommonTestUtils::env, 1) == CommonTestUtils::JMP_STATUS::ok) { #endif - EXPECT_NO_THROW(release_order_test(order, targetDevice, function)); + EXPECT_NO_THROW(release_order_test(order, target_device, function)); } else { IE_THROW() << "Crash happens"; } } std::string HoldersTestOnImportedNetwork::getTestCaseName(testing::TestParamInfo obj) { - return "targetDevice=" + obj.param; + return "target_device=" + obj.param; } void HoldersTestOnImportedNetwork::SetUp() { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - targetDevice = this->GetParam(); + target_device = this->GetParam(); + APIBaseTest::SetUp(); function = ngraph::builder::subgraph::makeConvPoolRelu(); + SKIP_IF_CURRENT_TEST_IS_DISABLED(); } TEST_P(HoldersTestOnImportedNetwork, CreateRequestWithCoreRemoved) { @@ -115,10 +118,10 @@ namespace BehaviorTestsDefinitions { InferenceEngine::Core core = BehaviorTestsUtils::createIECoreWithTemplate(); std::stringstream stream; { - auto exe_net = core.LoadNetwork(cnnNet, targetDevice); + auto exe_net = core.LoadNetwork(cnnNet, target_device); exe_net.Export(stream); } - auto exe_net = core.ImportNetwork(stream, targetDevice); + auto exe_net = core.ImportNetwork(stream, target_device); core = InferenceEngine::Core(); auto request = exe_net.CreateInferRequest(); } diff --git a/src/tests/functional/plugin/shared/src/main.cpp b/src/tests/functional/plugin/shared/src/main.cpp index 493765ae9f4..74bc7f548fc 100644 --- a/src/tests/functional/plugin/shared/src/main.cpp +++ b/src/tests/functional/plugin/shared/src/main.cpp @@ -4,8 +4,8 @@ #include "gtest/gtest.h" -#include "functional_test_utils/layer_test_utils/environment.hpp" -#include "functional_test_utils/layer_test_utils/summary.hpp" +#include "functional_test_utils/summary/environment.hpp" +#include "functional_test_utils/summary/op_summary.hpp" #include "functional_test_utils/skip_tests_config.hpp" int main(int argc, char *argv[]) { @@ -16,14 +16,14 @@ int main(int argc, char *argv[]) { if (std::string(argv[i]) == "--disable_tests_skipping") { FuncTestUtils::SkipTestsConfig::disable_tests_skipping = true; } else if (std::string(argv[i]) == "--extract_body") { - LayerTestsUtils::Summary::setExtractBody(true); + ov::test::utils::OpSummary::setExtractBody(true); } else if (std::string(argv[i]) == "--help") { print_custom_help = true; } else if (std::string(argv[i]).find("--output_folder") != std::string::npos) { outputFolderPath = std::string(argv[i]).substr(std::string("--output_folder").length() + 1); - LayerTestsUtils::Summary::setOutputFolder(outputFolderPath); + ov::test::utils::OpSummary::setOutputFolder(outputFolderPath); } else if (std::string(argv[i]).find("--report_unique_name") != std::string::npos) { - LayerTestsUtils::Summary::setSaveReportWithUniqueName(true); + ov::test::utils::OpSummary::setSaveReportWithUniqueName(true); } else if (std::string(argv[i]).find("--save_report_timeout") != std::string::npos) { size_t timeout; try { @@ -31,7 +31,7 @@ int main(int argc, char *argv[]) { } catch (...) { throw std::runtime_error("Incorrect value of \"--save_report_timeout\" argument"); } - LayerTestsUtils::Summary::setSaveReportTimeout(timeout); + ov::test::utils::OpSummary::setSaveReportTimeout(timeout); } } @@ -56,13 +56,13 @@ int main(int argc, char *argv[]) { std::cout << std::endl; } - if (LayerTestsUtils::Summary::getSaveReportWithUniqueName() && - LayerTestsUtils::Summary::getExtendReport()) { + if (ov::test::utils::OpSummary::getSaveReportWithUniqueName() && + ov::test::utils::OpSummary::getExtendReport()) { throw std::runtime_error("Using mutually exclusive arguments: --extend_report and --report_unique_name"); } ::testing::InitGoogleTest(&argc, argv); - ::testing::AddGlobalTestEnvironment(new LayerTestsUtils::TestEnvironment); + ::testing::AddGlobalTestEnvironment(new ov::test::utils::TestEnvironment); auto retcode = RUN_ALL_TESTS(); return retcode; diff --git a/src/tests/functional/shared_test_classes/include/shared_test_classes/base/layer_test_utils.hpp b/src/tests/functional/shared_test_classes/include/shared_test_classes/base/layer_test_utils.hpp index 3ead1299065..53941dad822 100644 --- a/src/tests/functional/shared_test_classes/include/shared_test_classes/base/layer_test_utils.hpp +++ b/src/tests/functional/shared_test_classes/include/shared_test_classes/base/layer_test_utils.hpp @@ -27,16 +27,14 @@ #include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/blob_utils.hpp" #include "functional_test_utils/precision_utils.hpp" -#include "functional_test_utils/layer_test_utils/summary.hpp" -#include "functional_test_utils/layer_test_utils/environment.hpp" +#include "functional_test_utils/summary/op_summary.hpp" +#include "functional_test_utils/summary/environment.hpp" #include "ngraph_functions/utils/ngraph_helpers.hpp" #include "ngraph_functions/pass/convert_prc.hpp" namespace LayerTestsUtils { - - using TargetDevice = std::string; typedef std::tuple< diff --git a/src/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp b/src/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp index b5b6713307c..2d1038d272b 100644 --- a/src/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp +++ b/src/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp @@ -8,7 +8,7 @@ #include "common_test_utils/test_common.hpp" #include "functional_test_utils/ov_plugin_cache.hpp" -#include "functional_test_utils/layer_test_utils/summary.hpp" +#include "functional_test_utils/summary/op_summary.hpp" namespace ov { namespace test { @@ -55,7 +55,7 @@ protected: constexpr static const double disable_threshold = std::numeric_limits::max(); double abs_threshold = disable_threshold, rel_threshold = disable_threshold; - LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance(); + ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance(); virtual std::vector calculate_refs(); virtual std::vector get_plugin_outputs(); diff --git a/src/tests/functional/shared_test_classes/src/base/layer_test_utils.cpp b/src/tests/functional/shared_test_classes/src/base/layer_test_utils.cpp index 25f8783808d..70af7a50060 100644 --- a/src/tests/functional/shared_test_classes/src/base/layer_test_utils.cpp +++ b/src/tests/functional/shared_test_classes/src/base/layer_test_utils.cpp @@ -31,14 +31,14 @@ void LayerTestsCommon::Run() { // in case of crash jump will be made and work will be continued auto crashHandler = std::unique_ptr(new CommonTestUtils::CrashHandler()); - auto &s = Summary::getInstance(); + auto &s = ov::test::utils::OpSummary::getInstance(); s.setDeviceName(targetDevice); if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) { - s.updateOPsStats(functionRefs, PassRate::Statuses::SKIPPED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::SKIPPED); GTEST_SKIP() << "Disabled test due to configuration" << std::endl; } else { - s.updateOPsStats(functionRefs, PassRate::Statuses::CRASHED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::CRASHED); } // place to jump in case of a crash @@ -55,22 +55,22 @@ void LayerTestsCommon::Run() { GenerateInputs(); Infer(); Validate(); - s.updateOPsStats(functionRefs, PassRate::Statuses::PASSED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED); } catch (const std::runtime_error &re) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_(re.what()); } catch (const std::exception &ex) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_(ex.what()); } catch (...) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_("Unknown failure occurred."); } } else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) { IE_THROW() << "Crash happens"; } else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) { - s.updateOPsStats(functionRefs, PassRate::Statuses::HANGED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::HANGED); IE_THROW() << "Crash happens"; } } diff --git a/src/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp b/src/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp index 57654ceccfd..41b0ce45d5f 100644 --- a/src/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp +++ b/src/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp @@ -39,9 +39,9 @@ std::ostream& operator <<(std::ostream& os, const InputShape& inputShape) { void SubgraphBaseTest::run() { bool isCurrentTestDisabled = FuncTestUtils::SkipTestsConfig::currentTestIsDisabled(); - LayerTestsUtils::PassRate::Statuses status = isCurrentTestDisabled ? - LayerTestsUtils::PassRate::Statuses::SKIPPED : - LayerTestsUtils::PassRate::Statuses::CRASHED; + ov::test::utils::PassRate::Statuses status = isCurrentTestDisabled ? + ov::test::utils::PassRate::Statuses::SKIPPED : + ov::test::utils::PassRate::Statuses::CRASHED; summary.setDeviceName(targetDevice); summary.updateOPsStats(function, status); @@ -81,22 +81,22 @@ void SubgraphBaseTest::run() { infer(); validate(); } - status = LayerTestsUtils::PassRate::Statuses::PASSED; + status = ov::test::utils::PassRate::Statuses::PASSED; } catch (const std::exception& ex) { - status = LayerTestsUtils::PassRate::Statuses::FAILED; + status = ov::test::utils::PassRate::Statuses::FAILED; errorMessage = ex.what(); } catch (...) { - status = LayerTestsUtils::PassRate::Statuses::FAILED; + status = ov::test::utils::PassRate::Statuses::FAILED; errorMessage = "Unknown failure occurred."; } summary.updateOPsStats(function, status); - if (status != LayerTestsUtils::PassRate::Statuses::PASSED) { + if (status != ov::test::utils::PassRate::Statuses::PASSED) { GTEST_FATAL_FAILURE_(errorMessage.c_str()); } } else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) { IE_THROW() << "Crash happens"; } else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) { - summary.updateOPsStats(function, LayerTestsUtils::PassRate::Statuses::HANGED); + summary.updateOPsStats(function, ov::test::utils::PassRate::Statuses::HANGED); IE_THROW() << "Crash happens"; } } diff --git a/src/tests/functional/shared_test_classes/src/single_layer/memory.cpp b/src/tests/functional/shared_test_classes/src/single_layer/memory.cpp index 08259c59bb4..cdc93db7412 100644 --- a/src/tests/functional/shared_test_classes/src/single_layer/memory.cpp +++ b/src/tests/functional/shared_test_classes/src/single_layer/memory.cpp @@ -58,20 +58,20 @@ namespace LayerTestsDefinitions { SKIP_IF_CURRENT_TEST_IS_DISABLED() using namespace LayerTestsUtils; auto crashHandler = [](int errCode) { - auto &s = Summary::getInstance(); + auto &s = ov::test::utils::OpSummary::getInstance(); s.saveReport(); std::cout << "Unexpected application crash!" << std::endl; std::abort(); }; signal(SIGSEGV, crashHandler); - auto &s = LayerTestsUtils::Summary::getInstance(); + auto &s = ov::test::utils::OpSummary::getInstance(); s.setDeviceName(targetDevice); if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) { - s.updateOPsStats(function, PassRate::Statuses::SKIPPED); + s.updateOPsStats(function, ov::test::utils::PassRate::Statuses::SKIPPED); GTEST_SKIP() << "Disabled test due to configuration" << std::endl; } else { - s.updateOPsStats(function, PassRate::Statuses::CRASHED); + s.updateOPsStats(function, ov::test::utils::PassRate::Statuses::CRASHED); } try { @@ -88,16 +88,16 @@ namespace LayerTestsDefinitions { Infer(); Validate(); } - s.updateOPsStats(functionRefs, PassRate::Statuses::PASSED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED); } catch (const std::runtime_error &re) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_(re.what()); } catch (const std::exception &ex) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_(ex.what()); } catch (...) { - s.updateOPsStats(functionRefs, PassRate::Statuses::FAILED); + s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED); GTEST_FATAL_FAILURE_("Unknown failure occurred."); } } diff --git a/src/tests/ie_test_utils/common_test_utils/test_constants.hpp b/src/tests/ie_test_utils/common_test_utils/test_constants.hpp index f538c5319e4..2a63db71ddb 100644 --- a/src/tests/ie_test_utils/common_test_utils/test_constants.hpp +++ b/src/tests/ie_test_utils/common_test_utils/test_constants.hpp @@ -18,7 +18,8 @@ const char DEVICE_MULTI[] = "MULTI"; const char DEVICE_TEMPLATE[] = "TEMPLATE"; const char DEVICE_HETERO[] = "HETERO"; -const char REPORT_FILENAME[] = "report"; +const char OP_REPORT_FILENAME[] = "report_op"; +const char API_REPORT_FILENAME[] = "report_api"; const char REPORT_EXTENSION[] = ".xml"; const unsigned int maxFileNameLength = 140; diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/blob_utils.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/blob_utils.hpp index 0375387e480..bdfd8231cb9 100644 --- a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/blob_utils.hpp +++ b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/blob_utils.hpp @@ -735,7 +735,7 @@ enum class BlobType { Memory, Batched, Compound, -// Remote, + Remote, I420, NV12, }; @@ -748,8 +748,8 @@ inline std::ostream& operator<<(std::ostream& os, BlobType type) { return os << "Batched"; case BlobType::Compound: return os << "Compound"; -// case BlobType::Remote: -// return os << "Remote"; + case BlobType::Remote: + return os << "Remote"; case BlobType::I420: return os << "I40"; case BlobType::NV12: diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/environment.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/environment.hpp deleted file mode 100644 index a2994060b49..00000000000 --- a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/environment.hpp +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include - -#include "ngraph/ngraph.hpp" - -#include "functional_test_utils/layer_test_utils/summary.hpp" - -namespace LayerTestsUtils { - -class TestEnvironment : public ::testing::Environment { -public: - void TearDown() override { - Summary::getInstance().saveReport(); - }; -}; -} // namespace LayerTestsUtils diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/summary.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/summary.hpp deleted file mode 100644 index 9737cdf97ec..00000000000 --- a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/summary.hpp +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include -#include - -#include "ngraph/ngraph.hpp" - -#include "common_test_utils/test_constants.hpp" - -namespace LayerTestsUtils { - -class Summary; - -class SummaryDestroyer { -private: - Summary *p_instance; -public: - ~SummaryDestroyer(); - - void initialize(Summary *p); -}; - - -struct PassRate { - enum Statuses { - PASSED, - FAILED, - SKIPPED, - CRASHED, - HANGED - }; - unsigned long passed = 0; - unsigned long failed = 0; - unsigned long skipped = 0; - unsigned long crashed = 0; - unsigned long hanged = 0; - bool isImplemented = false; - - PassRate() = default; - - PassRate(unsigned long p, unsigned long f, unsigned long s, unsigned long c, unsigned long h) { - passed = p; - failed = f; - skipped = s; - crashed = c; - hanged = h; - if (!isImplemented && passed > 0) { - isImplemented = true; - } - } - - void setImplementationStatus(bool implStatus) { - isImplemented = implStatus; - } - - float getPassrate() const { - if (passed + failed + crashed + hanged == 0) { - return 0.f; - } else { - return passed * 100.f / (passed + failed + skipped + crashed + hanged); - } - } -}; - -class Summary { -private: - static Summary *p_instance; - static SummaryDestroyer destroyer; - std::map opsStats = {}; - std::string deviceName; - bool isReported = false; - static size_t saveReportTimeout; - static bool extendReport; - static bool extractBody; - static bool saveReportWithUniqueName; - static const char *outputFolder; - std::vector opsets; - - friend class SummaryDestroyer; - - std::string getOpVersion(const ngraph::NodeTypeInfo &type_info); - -protected: - Summary(); - - ~Summary() = default; - -public: - void setDeviceName(std::string device) { deviceName = device; } - - std::map getOpStatisticFromReport(); - - std::string getDeviceName() const { return deviceName; } - - std::map getOPsStats() { return opsStats; } - - void updateOPsStats(const std::shared_ptr &function, const PassRate::Statuses &status); - void updateOPsImplStatus(const std::shared_ptr &function, const bool implStatus); - - void updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status); - void updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus); - - static Summary &getInstance(); - std::vector getOpSets() { - return opsets; - } - - // #define IE_TEST_DEBUG - - #ifdef IE_TEST_DEBUG - void saveDebugReport(const char* className, const char* opName, unsigned long passed, unsigned long failed, - unsigned long skipped, unsigned long crashed, unsigned long hanged); - #endif //IE_TEST_DEBUG - - void saveReport(); - - static void setExtractBody(bool val) { extractBody = val; } - static bool getExtractBody() { return extractBody; } - - static void setExtendReport(bool val) { extendReport = val; } - static bool getExtendReport() { return extendReport; } - - static void setSaveReportWithUniqueName(bool val) { saveReportWithUniqueName = val; } - static bool getSaveReportWithUniqueName() { return saveReportWithUniqueName; } - - static void setSaveReportTimeout(size_t val) { saveReportTimeout = val; } - static size_t getSaveReportTimeout() { return saveReportTimeout; } - - static void setOutputFolder(const std::string &val) { outputFolder = val.c_str(); } -}; - -} // namespace LayerTestsUtils diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/api_summary.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/api_summary.hpp new file mode 100644 index 00000000000..e86595b7f04 --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/api_summary.hpp @@ -0,0 +1,58 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "summary.hpp" + +namespace ov { +namespace test { +namespace utils { + +enum class ov_entity { + ie_plugin, + ie_executable_network, + ie_infer_request, + ov_plugin, + ov_compiled_model, + ov_infer_request, + undefined +}; + +class ApiSummary; + +class ApiSummaryDestroyer { +private: + ApiSummary *p_instance; + +public: + ~ApiSummaryDestroyer(); + + void initialize(ApiSummary *p); +}; + +class ApiSummary : public virtual Summary { +private: + static ApiSummary *p_instance; + std::map> apiStats; + static const std::map apiInfo; + ov_entity getOvEntityByName(const std::string& name); + +protected: + ApiSummary(); + static ApiSummaryDestroyer destroyer; + friend class ApiSummaryDestroyer; + +public: + static ApiSummary &getInstance(); + inline void getStatisticFromReport(const std::string& filePath); + std::map> getApiStats() { return apiStats; } + void updateStat(ov_entity, const std::string& device, PassRate::Statuses); + void saveReport() override; +}; + + +} // namespace utils +} // namespace test +} // namespace ov \ No newline at end of file diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/environment.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/environment.hpp new file mode 100644 index 00000000000..9dce0982f6f --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/environment.hpp @@ -0,0 +1,28 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include + +#include "ngraph/ngraph.hpp" + +#include "functional_test_utils/summary/op_summary.hpp" +#include "functional_test_utils/summary/api_summary.hpp" + +namespace ov { +namespace test { +namespace utils { + +class TestEnvironment : public ::testing::Environment { +public: + void TearDown() override { + OpSummary::getInstance().saveReport(); + ApiSummary::getInstance().saveReport(); + }; +}; + +} // namespace utils +} // namespace test +} // namespace ov diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/op_info.hpp similarity index 100% rename from src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp rename to src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/op_info.hpp diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/op_summary.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/op_summary.hpp new file mode 100644 index 00000000000..f9eed73e159 --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/op_summary.hpp @@ -0,0 +1,62 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "summary.hpp" + +namespace ov { +namespace test { +namespace utils { + +class OpSummary; + +class OpSummaryDestroyer { +private: + OpSummary *p_instance; +public: + ~OpSummaryDestroyer(); + + void initialize(OpSummary *p); +}; + +class OpSummary : public virtual Summary { +private: + static OpSummary *p_instance; + static bool extractBody; + std::vector opsets; + std::map opsStats = {}; + + std::string getOpVersion(const ngraph::NodeTypeInfo &type_info); + +protected: + OpSummary(); + static OpSummaryDestroyer destroyer; + friend class OpSummaryDestroyer; + +public: + static OpSummary &getInstance(); + + std::map getOPsStats() { return opsStats; } + + std::vector getOpSets() { + return opsets; + } + + static void setExtractBody(bool val) { extractBody = val; } + static bool getExtractBody() { return extractBody; } + + std::map getStatisticFromReport(); + void saveReport() override; + + void updateOPsStats(const std::shared_ptr &function, const PassRate::Statuses &status); + void updateOPsImplStatus(const std::shared_ptr &function, const bool implStatus); + + void updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status); + void updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus); +}; + +} // namespace utils +} // namespace test +} // namespace ov diff --git a/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/summary.hpp b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/summary.hpp new file mode 100644 index 00000000000..4713a0e3ab5 --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/include/functional_test_utils/summary/summary.hpp @@ -0,0 +1,109 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include "ngraph/ngraph.hpp" + +#include "common_test_utils/test_constants.hpp" +#include "common_test_utils/common_utils.hpp" + +namespace ov { +namespace test { +namespace utils { + +struct PassRate { + enum Statuses { + PASSED, + FAILED, + SKIPPED, + CRASHED, + HANGED + }; + unsigned long passed = 0; + unsigned long failed = 0; + unsigned long skipped = 0; + unsigned long crashed = 0; + unsigned long hanged = 0; + bool isImplemented = false; + + PassRate() = default; + + PassRate(unsigned long p, unsigned long f, unsigned long s, unsigned long c, unsigned long h) { + passed = p; + failed = f; + skipped = s; + crashed = c; + hanged = h; + if (!isImplemented && passed > 0) { + isImplemented = true; + } + } + + void setImplementationStatus(bool implStatus) { + isImplemented = implStatus; + } + + float getPassrate() const { + if (passed + failed + crashed + hanged == 0) { + return 0.f; + } else { + return passed * 100.f / (passed + failed + skipped + crashed + hanged); + } + } +}; + +class Summary { +protected: + std::string deviceName; + const char* reportFilename; + bool isReported = false; + std::string ts = CommonTestUtils::GetTimestamp(); + + static size_t saveReportTimeout; + static bool isCrashReported; + static bool isHangReported; + static bool extendReport; + static bool saveReportWithUniqueName; + static const char *outputFolder; + + Summary() = default; + + virtual ~Summary() = default; + +public: + void setDeviceName(std::string device); + + std::string getDeviceName() const; + + + // #define IE_TEST_DEBUG + +#ifdef IE_TEST_DEBUG + void saveDebugReport(const char* className, const char* opName, unsigned long passed, unsigned long failed, + unsigned long skipped, unsigned long crashed, unsigned long hanged); +#endif //IE_TEST_DEBUG + + virtual void saveReport() {} + + void setReportFilename(const std::string& val); + + static void setExtendReport(bool val); + static bool getExtendReport(); + + static void setSaveReportWithUniqueName(bool val); + static bool getSaveReportWithUniqueName(); + + static void setSaveReportTimeout(size_t val); + static size_t getSaveReportTimeout(); + + static void setOutputFolder(const std::string &val); +}; + +} // namespace utils +} // namespace test +} // namespace ov diff --git a/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/merge_xmls.py b/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/merge_xmls.py index 713bf8ffb5b..5e029467f93 100644 --- a/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/merge_xmls.py +++ b/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/merge_xmls.py @@ -17,16 +17,35 @@ def parse_arguments(): input_folders_help = "Paths to folders with reports to merge" output_folders_help = "Path to folder to save report" output_filename_help = "Output report filename" + report_type_help = "Report type: OP or API" parser.add_argument("-i", "--input_folders", help=input_folders_help, nargs="*", required=True) - parser.add_argument("-o", "--output_folder", help=output_folders_help, default="") + parser.add_argument("-o", "--output_folder", help=output_folders_help, default=".") parser.add_argument("-f", "--output_filename", help=output_filename_help, default="report") + parser.add_argument("-t", "--report_type", help=report_type_help, default="OP") return parser.parse_args() -def aggregate_test_results(results: ET.SubElement, xml_reports: list): - timestamp = None +def update_result_node(xml_node: ET.SubElement, aggregated_res: ET.SubElement): + for attr_name in xml_node.attrib: + if attr_name == "passrate": + continue + if attr_name == "implemented": + xml_value = xml_node.attrib.get(attr_name) == "true" + aggregated_value = aggregated_res.attrib.get(attr_name) == "true" + str_value = "true" if xml_value or aggregated_value else "false" + aggregated_res.set(attr_name, str_value) + continue + xml_value = int(xml_node.attrib.get(attr_name)) + aggregated_value = int(aggregated_res.attrib.get(attr_name)) + # if attr_name == "crashed" and xml_value > 0: + # print("f") + aggregated_res.set(attr_name, str(xml_value + aggregated_value)) + + +def aggregate_test_results(aggregated_results: ET.SubElement, xml_reports: list, report_type: str): + aggregated_timestamp = None for xml in xml_reports: logger.info(f" Processing: {xml}") try: @@ -34,41 +53,47 @@ def aggregate_test_results(results: ET.SubElement, xml_reports: list): except ET.ParseError: logger.error(f' {xml} is corrupted and skipped') continue + xml_results = xml_root.find("results") xml_timestamp = xml_root.get("timestamp") - if (timestamp is None) or (xml_timestamp < timestamp): - timestamp = xml_timestamp - for device in xml_root.find("results"): - device_results = results.find(device.tag) - if device_results is None: - results.append(device) - else: - device_results_report = xml_root.find("results").find(device.tag) - for op in device_results_report: - if device_results.find(op.tag) is not None: - entry = device_results.find(op.tag) - for attr_name in device_results.find(op.tag).attrib: - if attr_name == "passrate": - continue - if attr_name == "implemented": - xml_value = op.attrib.get(attr_name) == "true" - aggregated_value = entry.attrib.get(attr_name) == "true" - str_value = "true" if xml_value or aggregated_value else "false" - device_results.find(entry.tag).set(attr_name, str_value) - continue - xml_value = int(op.attrib.get(attr_name)) - aggregated_value = int(entry.attrib.get(attr_name)) - device_results.find(entry.tag).set(attr_name, str(xml_value + aggregated_value)) - else: - device_results.append(op) - return timestamp + if aggregated_timestamp is None or xml_timestamp < aggregated_timestamp: + aggregated_timestamp = xml_timestamp + for xml_device_entry in xml_results: + aggregated_device_results = aggregated_results.find(xml_device_entry.tag) + if aggregated_device_results is None: + aggregated_results.append(xml_device_entry) + continue + # op or api_type + for xml_results_entry in xml_device_entry: + aggregated_results_entry = aggregated_device_results.find(xml_results_entry.tag) + if aggregated_results_entry is None: + aggregated_device_results.append(xml_results_entry) + continue + if report_type == "OP": + update_result_node(xml_results_entry, aggregated_results_entry) + else: + for xml_real_device_entry in xml_results_entry: + aggregated_real_device_api_report = aggregated_results_entry.find(xml_real_device_entry.tag) + if aggregated_real_device_api_report is None: + aggregated_results_entry.append(xml_real_device_entry) + continue + update_result_node(xml_real_device_entry, aggregated_real_device_api_report) + return aggregated_timestamp -def merge_xml(input_folder_paths: list, output_folder_paths: str, output_filename: str): +def merge_xml(input_folder_paths: list, output_folder_paths: str, output_filename: str, report_type: str): logger.info(f" Processing is finished") summary = ET.Element("report") results = ET.SubElement(summary, "results") - ops_list = ET.SubElement(summary, "ops_list") + entity_name = None + if report_type == "OP": + entity_name = "ops_list" + elif report_type == "API": + entity_name = "api_list" + else: + raise Exception(f"Error to create aggregated report. Incorrect report type: {report_type}") + + entity_list = ET.SubElement(summary, entity_name) for folder_path in input_folder_paths: if not os.path.exists(folder_path): @@ -78,7 +103,12 @@ def merge_xml(input_folder_paths: list, output_folder_paths: str, output_filenam logger.error(f" {folder_path} is not a directory!") continue - xml_reports = glob.glob(os.path.join(folder_path, 'report*.xml')) + xml_reports = None + if report_type == "OP": + xml_reports = glob.glob(os.path.join(folder_path, 'report_op*.xml')) + elif report_type == "API": + xml_reports = glob.glob(os.path.join(folder_path, 'report_api*.xml')) + print(len(xml_reports)) xml_root = None for xml_report in xml_reports: @@ -89,12 +119,15 @@ def merge_xml(input_folder_paths: list, output_folder_paths: str, output_filenam logger.error(f'{xml_report} is incorrect! Error to get a xml root') if xml_root is None: logger.error(f'{folder_path} does not contain the correct xml files') - for op in xml_root.find("ops_list"): - if ops_list.find(op.tag) is None: - ET.SubElement(ops_list, op.tag) - - timestamp = aggregate_test_results(results, xml_reports) - utils.update_passrates(results) + for entity in xml_root.find(entity_name): + if entity_list.find(entity.tag) is None: + ET.SubElement(entity_list, entity.tag) + timestamp = aggregate_test_results(results, xml_reports, report_type) + if report_type == "OP": + utils.update_passrates(results) + else: + for sub_result in results: + utils.update_passrates(sub_result) summary.set("timestamp", timestamp) logger.info(f" Processing is finished") @@ -110,4 +143,4 @@ def merge_xml(input_folder_paths: list, output_folder_paths: str, output_filenam if __name__ == "__main__": arguments = parse_arguments() - merge_xml(arguments.input_folders, arguments.output_folder, arguments.output_filename) + merge_xml(arguments.input_folders, arguments.output_folder, arguments.output_filename, arguments.report_type) \ No newline at end of file diff --git a/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/update_skip_test_config.py b/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/update_skip_test_config.py index 2b696cfc4df..8b93d4563b8 100644 --- a/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/update_skip_test_config.py +++ b/src/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/update_skip_test_config.py @@ -58,7 +58,8 @@ def get_conformance_hung_test(test_log_dirs: list): for log_file in glob.glob(os.path.join(test_log_dir, '*/*')): with open(log_file) as log: content = log.read() - if not (is_hung_test(content) and is_conformance(content)): + if not (is_hung_test(content) and is_conformance(content)):: + print(log_file) continue device = get_device_name(content) if 'arm' in content or 'arm' in log_file: diff --git a/src/tests/ie_test_utils/functional_test_utils/src/plugin_cache.cpp b/src/tests/ie_test_utils/functional_test_utils/src/plugin_cache.cpp index 23cd9a21095..62596d39f26 100644 --- a/src/tests/ie_test_utils/functional_test_utils/src/plugin_cache.cpp +++ b/src/tests/ie_test_utils/functional_test_utils/src/plugin_cache.cpp @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // +#include "common_test_utils/test_constants.hpp" #include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/ov_plugin_cache.hpp" #include "common_test_utils/file_utils.hpp" @@ -64,8 +65,13 @@ std::shared_ptr PluginCache::ie(const std::string &device } catch (...) {} if (!deviceToCheck.empty()) { - std::vector metrics = ie_core->GetMetric(deviceToCheck, METRIC_KEY(SUPPORTED_METRICS)); - + std::vector metrics; + if (deviceToCheck.find(':') != std::string::npos) { + std::string realDevice = deviceToCheck.substr(0, deviceToCheck.find(':')); + metrics = {ie_core->GetMetric(realDevice, METRIC_KEY(SUPPORTED_METRICS))}; + } else { + metrics = {ie_core->GetMetric(deviceToCheck, METRIC_KEY(SUPPORTED_METRICS))}; + } if (std::find(metrics.begin(), metrics.end(), METRIC_KEY(AVAILABLE_DEVICES)) != metrics.end()) { std::vector availableDevices = ie_core->GetMetric(deviceToCheck, METRIC_KEY(AVAILABLE_DEVICES)); diff --git a/src/tests/ie_test_utils/functional_test_utils/src/summary/api_summary.cpp b/src/tests/ie_test_utils/functional_test_utils/src/summary/api_summary.cpp new file mode 100644 index 00000000000..6a85fb32951 --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/src/summary/api_summary.cpp @@ -0,0 +1,225 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "functional_test_utils/summary/api_summary.hpp" +#include "common_test_utils/file_utils.hpp" + +using namespace ov::test::utils; + +#ifdef _WIN32 +# define getpid _getpid +#endif + +ApiSummary *ApiSummary::p_instance = nullptr; +ApiSummaryDestroyer ApiSummary::destroyer; +const std::map ApiSummary::apiInfo({ + { ov_entity::ov_infer_request, "ov_infer_request"}, + { ov_entity::ov_plugin, "ov_plugin"}, + { ov_entity::ov_compiled_model, "ov_compiled_model"}, + { ov_entity::ie_infer_request, "ie_infer_request"}, + { ov_entity::ie_plugin, "ie_plugin"}, + { ov_entity::ie_executable_network, "ie_executable_network"}, + { ov_entity::undefined, "undefined"}, +}); + +ApiSummaryDestroyer::~ApiSummaryDestroyer() { + delete p_instance; +} + +void ApiSummaryDestroyer::initialize(ApiSummary *p) { + p_instance = p; +} + +ApiSummary::ApiSummary() : apiStats() { + reportFilename = CommonTestUtils::API_REPORT_FILENAME; + isCrashReported = false; + isHangReported = false; +} + +ApiSummary &ApiSummary::getInstance() { + if (!p_instance) { + p_instance = new ApiSummary(); + destroyer.initialize(p_instance); + } + return *p_instance; +} + +void ApiSummary::updateStat(ov_entity entity, const std::string& target_device, PassRate::Statuses status) { + if (apiStats.empty()) { + std::string outputFilePath = outputFolder + std::string(CommonTestUtils::FileSeparator) + reportFilename + CommonTestUtils::REPORT_EXTENSION; + const bool fileExists = CommonTestUtils::fileExists(outputFilePath); + if (extendReport && !isReported && fileExists) { + getStatisticFromReport(outputFilePath); + } + } + std::string real_device = target_device.substr(0, target_device.find(':')); + if (deviceName.empty()) { + deviceName = real_device == target_device ? target_device : target_device.substr(target_device.find(':') + 1); + } + if (apiStats.find(entity) == apiStats.end()) { + apiStats.insert({entity, {{real_device, PassRate()}}}); + } + auto& cur_stat = apiStats[entity]; + if (cur_stat.find(real_device) == cur_stat.end()) { + cur_stat.insert({real_device, PassRate()}); + } + if (isCrashReported) { + cur_stat[real_device].crashed--; + isCrashReported = false; + } + if (isHangReported) { + isHangReported = false; + return; + } + switch (status) { + case PassRate::Statuses::SKIPPED: { + cur_stat[real_device].skipped++; + break; + } + case PassRate::Statuses::PASSED: { + if (!cur_stat[real_device].isImplemented) { + cur_stat[real_device].isImplemented = true; + } + cur_stat[real_device].passed++; + break; + } + case PassRate::Statuses::HANGED: { + cur_stat[real_device].hanged++; + isHangReported = true; + break; + } + case PassRate::Statuses::FAILED: { + cur_stat[real_device].failed++; + break; + } + case PassRate::Statuses::CRASHED: + cur_stat[real_device].crashed++; + isCrashReported = true; + break; + } +} + +ov_entity ApiSummary::getOvEntityByName(const std::string& name) { + for (const auto& api : apiInfo) { + if (name == api.second) { + return api.first; + } + } + return ov_entity::undefined; +} + +void ApiSummary::getStatisticFromReport(const std::string& filePath) { + pugi::xml_document doc; + + doc.load_file(filePath.c_str()); + pugi::xml_node root = doc.child("report"); + + pugi::xml_node resultsNode = root.child("results"); + pugi::xml_node currentDeviceNode = resultsNode.child(deviceName.c_str()); + for (auto &entityNode : currentDeviceNode.children()) { + std::string entityName = entityNode.name(); + ov_entity entity = getOvEntityByName(entityName); + for (const auto& realDeviceNode : entityNode.children()) { + std::string realDeviceName = realDeviceNode.name(); + auto p = std::stoi(realDeviceNode.attribute("passed").value()); + auto f = std::stoi(realDeviceNode.attribute("failed").value()); + auto s = std::stoi(realDeviceNode.attribute("skipped").value()); + auto c = std::stoi(realDeviceNode.attribute("crashed").value()); + auto h = std::stoi(realDeviceNode.attribute("hanged").value()); + PassRate entity_stat(p, f, s, c, h); + if (apiStats.find(entity) == apiStats.end()) { + apiStats.insert({entity, {}}); + } + apiStats[entity].insert({realDeviceName, entity_stat}); + } + } +} + +void ApiSummary::saveReport() { + std::string filename = reportFilename; + if (saveReportWithUniqueName) { + auto processId = std::to_string(getpid()); + filename += "_" + processId + "_" + ts; + } + filename += CommonTestUtils::REPORT_EXTENSION; + + if (!CommonTestUtils::directoryExists(outputFolder)) { + CommonTestUtils::createDirectoryRecursive(outputFolder); + } + + std::string outputFilePath = outputFolder + std::string(CommonTestUtils::FileSeparator) + filename; + + auto &summary = ApiSummary::getInstance(); + auto stats = summary.getApiStats(); + + pugi::xml_document doc; + + const bool fileExists = CommonTestUtils::fileExists(outputFilePath); + + time_t rawtime; + struct tm *timeinfo; + char timeNow[80]; + + time(&rawtime); + // cpplint require to use localtime_r instead which is not available in C++11 + timeinfo = localtime(&rawtime); // NOLINT + + strftime(timeNow, sizeof(timeNow), "%d-%m-%Y %H:%M:%S", timeinfo); + + pugi::xml_node root; + if (fileExists) { + doc.load_file(outputFilePath.c_str()); + root = doc.child("report"); + //Ugly but shorter than to write predicate for find_atrribute() to update existing one + root.remove_attribute("timestamp"); + root.append_attribute("timestamp").set_value(timeNow); + + root.remove_child("api_list"); + root.child("results").remove_child(deviceName.c_str()); + } else { + root = doc.append_child("report"); + root.append_attribute("timestamp").set_value(timeNow); + root.append_child("results"); + } + + pugi::xml_node opsNode = root.append_child("api_list"); + for (const auto &api : apiInfo) { + std::string name = api.second; + pugi::xml_node entry = opsNode.append_child(name.c_str()); + (void) entry; + } + + pugi::xml_node resultsNode = root.child("results"); + pugi::xml_node currentDeviceNode = resultsNode.append_child(summary.deviceName.c_str()); + std::unordered_set opList; + for (const auto &stat_entity : stats) { + pugi::xml_node currentEntity = currentDeviceNode.append_child(apiInfo.at(stat_entity.first).c_str()); + for (const auto& stat_device : stat_entity.second) { + pugi::xml_node entry = currentEntity.append_child(stat_device.first.c_str()); + entry.append_attribute("implemented").set_value(stat_device.second.isImplemented); + entry.append_attribute("passed").set_value(stat_device.second.passed); + entry.append_attribute("failed").set_value(stat_device.second.failed); + entry.append_attribute("skipped").set_value(stat_device.second.skipped); + entry.append_attribute("crashed").set_value(stat_device.second.crashed); + entry.append_attribute("hanged").set_value(stat_device.second.hanged); + entry.append_attribute("passrate").set_value(stat_device.second.getPassrate()); + } + } + + auto exitTime = std::chrono::system_clock::now() + std::chrono::seconds(saveReportTimeout); + bool result = false; + do { + result = doc.save_file(outputFilePath.c_str()); + } while (!result && std::chrono::system_clock::now() < exitTime); + + if (!result) { + std::string errMessage = "Failed to write report to " + outputFilePath; + throw std::runtime_error(errMessage); + } else { + isReported = true; + } +} + diff --git a/src/tests/ie_test_utils/functional_test_utils/src/layer_test_utils/summary.cpp b/src/tests/ie_test_utils/functional_test_utils/src/summary/op_summary.cpp similarity index 77% rename from src/tests/ie_test_utils/functional_test_utils/src/layer_test_utils/summary.cpp rename to src/tests/ie_test_utils/functional_test_utils/src/summary/op_summary.cpp index d1e47158cff..2f0cc99f58f 100644 --- a/src/tests/ie_test_utils/functional_test_utils/src/layer_test_utils/summary.cpp +++ b/src/tests/ie_test_utils/functional_test_utils/src/summary/op_summary.cpp @@ -4,32 +4,29 @@ #include -#include "functional_test_utils/layer_test_utils/summary.hpp" +#include "functional_test_utils/summary/op_summary.hpp" #include "common_test_utils/file_utils.hpp" -using namespace LayerTestsUtils; +using namespace ov::test::utils; #ifdef _WIN32 # define getpid _getpid #endif -Summary *Summary::p_instance = nullptr; -bool Summary::extendReport = false; -bool Summary::extractBody = false; -bool Summary::saveReportWithUniqueName = false; -size_t Summary::saveReportTimeout = 0; -const char* Summary::outputFolder = "."; -SummaryDestroyer Summary::destroyer; +OpSummary *OpSummary::p_instance = nullptr; +bool OpSummary::extractBody = false; +OpSummaryDestroyer OpSummary::destroyer; -SummaryDestroyer::~SummaryDestroyer() { +OpSummaryDestroyer::~OpSummaryDestroyer() { delete p_instance; } -void SummaryDestroyer::initialize(Summary *p) { +void OpSummaryDestroyer::initialize(OpSummary *p) { p_instance = p; } -Summary::Summary() { +OpSummary::OpSummary() { + reportFilename = CommonTestUtils::OP_REPORT_FILENAME; opsets.push_back(ngraph::get_opset1()); opsets.push_back(ngraph::get_opset2()); opsets.push_back(ngraph::get_opset3()); @@ -41,75 +38,67 @@ Summary::Summary() { opsets.push_back(ngraph::get_opset9()); } -Summary &Summary::getInstance() { +OpSummary &OpSummary::getInstance() { if (!p_instance) { - p_instance = new Summary(); + p_instance = new OpSummary(); destroyer.initialize(p_instance); } return *p_instance; } -void Summary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status) { +void OpSummary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status) { auto it = opsStats.find(op); - if (it != opsStats.end()) { - auto &passrate = it->second; - switch (status) { - case PassRate::PASSED: - if (!passrate.isImplemented) { - passrate.isImplemented = true; - } - passrate.passed++; - passrate.crashed--; - break; - case PassRate::FAILED: - passrate.failed++; - passrate.crashed--; - break; - case PassRate::SKIPPED: - passrate.skipped++; - break; - case PassRate::CRASHED: - passrate.crashed++; - break; - case PassRate::HANGED: - passrate.hanged++; - passrate.crashed--; - break; + if (opsStats.find(op) == opsStats.end()) { + opsStats.insert({op, PassRate()}); + } + auto &passrate = opsStats[op]; + if (isCrashReported) { + isCrashReported = false; + passrate.crashed--; + } + if (isHangReported) { + isHangReported = false; + return; + } + switch (status) { + case PassRate::PASSED: + if (!passrate.isImplemented) { + passrate.isImplemented = true; + } + passrate.passed++; + break; + case PassRate::FAILED: + passrate.failed++; + break; + case PassRate::SKIPPED: + passrate.skipped++; + break; + case PassRate::CRASHED: { + passrate.crashed++; + isCrashReported = true; + return; } - } else { - switch (status) { - case PassRate::PASSED: - opsStats[op] = PassRate(1, 0, 0, 0, 0); - break; - case PassRate::FAILED: - opsStats[op] = PassRate(0, 1, 0, 0, 0); - break; - case PassRate::SKIPPED: - opsStats[op] = PassRate(0, 0, 1, 0, 0); - break; - case PassRate::CRASHED: - opsStats[op] = PassRate(0, 0, 0, 1, 0); - break; - case PassRate::HANGED: - opsStats[op] = PassRate(0, 0, 0, 0, 1); - break; + case PassRate::HANGED: { + passrate.hanged++; + isHangReported = true; + break; } } } -void Summary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus) { +void OpSummary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus) { auto it = opsStats.find(op); if (it != opsStats.end()) { if (!it->second.isImplemented && implStatus) { it->second.isImplemented = true; } } else { - opsStats[op] = PassRate(0, 0, 0, 0, 0); + opsStats[op] = PassRate(); opsStats[op].isImplemented = implStatus; } } -std::string Summary::getOpVersion(const ngraph::NodeTypeInfo &type_info) { +std::string OpSummary::getOpVersion(const ngraph::NodeTypeInfo &type_info) { for (size_t i = 0; i < opsets.size(); i++) { if (opsets[i].contains_type(type_info)) { return std::to_string(i+1); @@ -118,14 +107,14 @@ std::string Summary::getOpVersion(const ngraph::NodeTypeInfo &type_info) { return "undefined"; } -std::map Summary::getOpStatisticFromReport() { +std::map OpSummary::getStatisticFromReport() { pugi::xml_document doc; std::ifstream file; - file.open(CommonTestUtils::REPORT_FILENAME); + file.open(reportFilename); pugi::xml_node root; - doc.load_file(CommonTestUtils::REPORT_FILENAME); + doc.load_file(reportFilename); root = doc.child("report"); pugi::xml_node resultsNode = root.child("results"); @@ -144,7 +133,7 @@ std::map Summary::getOpStatisticFromReport() { return oldOpsStat; } -void Summary::updateOPsStats(const std::shared_ptr &function, const PassRate::Statuses &status) { +void OpSummary::updateOPsStats(const std::shared_ptr &function, const PassRate::Statuses &status) { if (function->get_parameters().empty()) { return; } @@ -160,8 +149,8 @@ void Summary::updateOPsStats(const std::shared_ptr &function, for (const auto &op : function->get_ordered_ops()) { if ((ngraph::is_type(op) || - ngraph::is_type(op) || - ngraph::is_type(op)) && isFunctionalGraph) { + ngraph::is_type(op) || + ngraph::is_type(op)) && isFunctionalGraph) { continue; } if (extractBody) { @@ -189,7 +178,7 @@ void Summary::updateOPsStats(const std::shared_ptr &function, } } -void Summary::updateOPsImplStatus(const std::shared_ptr &function, const bool implStatus) { +void OpSummary::updateOPsImplStatus(const std::shared_ptr &function, const bool implStatus) { if (function->get_parameters().empty()) { return; } @@ -235,15 +224,19 @@ void Summary::saveDebugReport(const char* className, const char* opName, unsigne } #endif //IE_TEST_DEBUG -void Summary::saveReport() { +void OpSummary::saveReport() { if (isReported) { return; } - std::string filename = CommonTestUtils::REPORT_FILENAME; + if (opsStats.empty()) { + return; + } + + std::string filename = reportFilename; if (saveReportWithUniqueName) { auto processId = std::to_string(getpid()); - filename += "_" + processId + "_" + std::string(CommonTestUtils::GetTimestamp()); + filename += "_" + processId + "_" + ts; } filename += CommonTestUtils::REPORT_EXTENSION; @@ -259,7 +252,7 @@ void Summary::saveReport() { opsInfo.insert(type_info_set.begin(), type_info_set.end()); } - auto &summary = Summary::getInstance(); + auto &summary = OpSummary::getInstance(); auto stats = summary.getOPsStats(); pugi::xml_document doc; @@ -316,7 +309,7 @@ void Summary::saveReport() { } if (extendReport && fileExists) { - auto opStataFromReport = summary.getOpStatisticFromReport(); + auto opStataFromReport = summary.getStatisticFromReport(); for (auto &item : opStataFromReport) { pugi::xml_node entry; if (opList.find(item.first) == opList.end()) { @@ -339,8 +332,8 @@ void Summary::saveReport() { PassRate obj(p, f, s, c, h); (implStatus || obj.isImplemented) - ? entry.attribute("implemented").set_value(true) - : entry.attribute("implemented").set_value(false); + ? entry.attribute("implemented").set_value(true) + : entry.attribute("implemented").set_value(false); entry.attribute("passed").set_value(obj.passed); entry.attribute("failed").set_value(obj.failed); entry.attribute("skipped").set_value(obj.skipped); diff --git a/src/tests/ie_test_utils/functional_test_utils/src/summary/summary.cpp b/src/tests/ie_test_utils/functional_test_utils/src/summary/summary.cpp new file mode 100644 index 00000000000..c290e234f4e --- /dev/null +++ b/src/tests/ie_test_utils/functional_test_utils/src/summary/summary.cpp @@ -0,0 +1,37 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "functional_test_utils/summary/summary.hpp" + +using namespace ov::test::utils; + +bool Summary::extendReport = false; +bool Summary::saveReportWithUniqueName = false; +bool Summary::isCrashReported = false; +bool Summary::isHangReported = false; +size_t Summary::saveReportTimeout = 0; +const char* Summary::outputFolder = "."; + +void Summary::setDeviceName(std::string device) { + deviceName = device; +} + +std::string Summary::getDeviceName() const { + return deviceName; +} + +void Summary::setReportFilename(const std::string& val) { + reportFilename = val.c_str(); +} + +void Summary::setExtendReport(bool val) { extendReport = val; } +bool Summary::getExtendReport() { return extendReport; } + +void Summary::setSaveReportWithUniqueName(bool val) { saveReportWithUniqueName = val; } +bool Summary::getSaveReportWithUniqueName() { return saveReportWithUniqueName; } + +void Summary::setSaveReportTimeout(size_t val) { saveReportTimeout = val; } +size_t Summary::getSaveReportTimeout() { return saveReportTimeout; } + +void Summary::setOutputFolder(const std::string &val) { outputFolder = val.c_str(); } \ No newline at end of file