API Conformance report: C++ & Merge XML Part (#11245)

* Separate Summary, OpSummary, ApiSummary

* final separatin

* ChangeNamespaces

* git status

* filename

* link error

* Fix linking

* Fix compilation

* Report - ieplugin

* Next step

* Fix build

* Changing inheritance + fix build

* Fix run

* Summary

* Fix comments

* Fix the run

* fix build

* #Extend report

* Fix build

* fix template

* api_report flag

* rebase to master branch

* fix

* fix build

* myriad

* fix problem with crash

* Fix some mistakes

* python merge

* fix tests

* tmp

* Update Merge_xml script

* Fix op

* fix build

* Fix bug with --report_unique_name

* build

* remove extra

* gg

* gpu build

* c

* Fix issue with win

* infer_req

* compiled + exec net

* ov_plugin

* ie_plugin

* Fix comments

* ff

* fix last comment

* fix build

* fix template func

* Apply commnets

* Apply commnets

* fix ci

* build

* build

* build

* inl

* Remove extra

* fix merge_xml

* fix build

* remarks

* skip one test

Co-authored-by: Alexander Zhogov <alexander.zhogov@intel.com>
This commit is contained in:
Irina Efode 2022-08-15 18:57:10 +04:00 committed by GitHub
parent dd55f434c3
commit d04521a7c3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
132 changed files with 2561 additions and 2015 deletions

View File

@ -17,7 +17,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_Non_0) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "CNHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "CNHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -31,7 +31,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_remote_tensor_default) {
auto batch_shape = Shape{batch, 4, 4, 4}; auto batch_shape = Shape{batch, 4, 4, 4};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));
@ -49,7 +49,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Strides) {
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
std::vector<float> buffer1(one_shape_size_stride, 10); std::vector<float> buffer1(one_shape_size_stride, 10);
std::vector<float> buffer2(one_shape_size_stride, 20); std::vector<float> buffer2(one_shape_size_stride, 20);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();

View File

@ -69,7 +69,13 @@ INSTANTIATE_TEST_SUITE_P(
// IE Class SetConfig // IE Class SetConfig
// //
using IEClassSetConfigTestHETERO = BehaviorTestsUtils::IEClassNetworkTest; class IEClassSetConfigTestHETERO : public BehaviorTestsUtils::IEClassNetworkTest,
public BehaviorTestsUtils::IEPluginTestBase {
void SetUp() override {
IEClassNetworkTest::SetUp();
IEPluginTestBase::SetUp();
}
};
TEST_F(IEClassSetConfigTestHETERO, smoke_SetConfigNoThrow) { TEST_F(IEClassSetConfigTestHETERO, smoke_SetConfigNoThrow) {
{ {
@ -115,7 +121,13 @@ INSTANTIATE_TEST_SUITE_P(
smoke_IEClassGetConfigTest, IEClassGetConfigTest, smoke_IEClassGetConfigTest, IEClassGetConfigTest,
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)); ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
using IEClassGetConfigTestTEMPLATE = BehaviorTestsUtils::IEClassNetworkTest; class IEClassGetConfigTestTEMPLATE : public BehaviorTestsUtils::IEClassNetworkTest,
public BehaviorTestsUtils::IEPluginTestBase {
void SetUp() override {
IEClassNetworkTest::SetUp();
IEPluginTestBase::SetUp();
}
};
TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) { TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();

View File

@ -9,7 +9,7 @@
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "pugixml.hpp" #include "pugixml.hpp"
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" #include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
namespace SubgraphsDumper { namespace SubgraphsDumper {

View File

@ -10,7 +10,7 @@
#include <memory> #include <memory>
#include <ngraph/ngraph.hpp> #include <ngraph/ngraph.hpp>
#include "matchers/matchers_manager.hpp" #include "matchers/matchers_manager.hpp"
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" #include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
namespace SubgraphsDumper { namespace SubgraphsDumper {

View File

@ -5,7 +5,7 @@
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "matchers/convolutions.hpp" #include "matchers/convolutions.hpp"
#include "ngraph/ops.hpp" #include "ngraph/ops.hpp"
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" #include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
class ConvolutionMatcherTest : public ::testing::Test { class ConvolutionMatcherTest : public ::testing::Test {
protected: protected:

View File

@ -5,7 +5,7 @@
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "matchers/single_op.hpp" #include "matchers/single_op.hpp"
#include "ngraph/ops.hpp" #include "ngraph/ops.hpp"
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp" #include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
class SingleOpMatcherTest : public ::testing::Test { class SingleOpMatcherTest : public ::testing::Test {
protected: protected:

View File

@ -71,7 +71,7 @@ inline const std::vector<std::map<std::string, std::string>> generate_configs(co
return resultConfig; return resultConfig;
} }
inline const std::string generate_complex_device_name(const std::string& deviceName) { inline const std::string generate_complex_device_name(const std::string deviceName) {
return deviceName + ":" + ov::test::conformance::targetDevice; return deviceName + ":" + ov::test::conformance::targetDevice;
} }
@ -85,9 +85,27 @@ inline const std::vector<std::string> return_all_possible_device_combination() {
return res; return res;
} }
const std::vector<std::map<std::string, std::string>> empty_config = { inline std::vector<std::pair<std::string, std::string>> generate_pairs_plugin_name_by_device() {
{}, std::vector<std::pair<std::string, std::string>> res;
}; for (const auto& device : return_all_possible_device_combination()) {
std::string real_device = device.substr(0, device.find(':'));
res.push_back(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice),
real_device));
}
return res;
}
inline std::map<std::string, std::string> AnyMap2StringMap(const AnyMap& config) {
if (config.empty())
return {};
std::map<std::string, std::string> result;
for (const auto& configItem : config) {
result.insert({configItem.first, configItem.second.as<std::string>()});
}
return result;
}
const std::map<std::string, std::string> ie_config = AnyMap2StringMap(ov::test::conformance::pluginConfig);
} // namespace conformance } // namespace conformance
} // namespace test } // namespace test

View File

@ -34,9 +34,7 @@ inline const std::vector<ov::AnyMap> generate_ov_configs(const std::string& targ
return resultConfig; return resultConfig;
} }
const std::vector<ov::AnyMap> empty_ov_config = { const ov::AnyMap ov_config = ov::test::conformance::pluginConfig;
{},
};
} // namespace conformance } // namespace conformance
} // namespace test } // namespace test

View File

@ -11,7 +11,7 @@ namespace {
using namespace ExecutionGraphTests; using namespace ExecutionGraphTests;
INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphSerializationTest, INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphSerializationTest,
::testing::Values(ov::test::conformance::targetDevice), ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
ExecGraphSerializationTest::getTestCaseName); ExecGraphSerializationTest::getTestCaseName);
const std::vector<InferenceEngine::Precision> execGraphInfoElemTypes = { const std::vector<InferenceEngine::Precision> execGraphInfoElemTypes = {
@ -22,7 +22,7 @@ INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphUniqueNodeNames,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(execGraphInfoElemTypes), ::testing::ValuesIn(execGraphInfoElemTypes),
::testing::Values(InferenceEngine::SizeVector({1, 2, 5, 5})), ::testing::Values(InferenceEngine::SizeVector({1, 2, 5, 5})),
::testing::Values(ov::test::conformance::targetDevice)), ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination())),
ExecGraphUniqueNodeNames::getTestCaseName); ExecGraphUniqueNodeNames::getTestCaseName);
} // namespace } // namespace

View File

@ -14,7 +14,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecutableNetworkBaseTest, INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecutableNetworkBaseTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
ExecutableNetworkBaseTest::getTestCaseName); ExecutableNetworkBaseTest::getTestCaseName);
const std::vector<InferenceEngine::Precision> execNetBaseElemTypes = { const std::vector<InferenceEngine::Precision> execNetBaseElemTypes = {
@ -28,6 +28,6 @@ namespace {
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(execNetBaseElemTypes), ::testing::ValuesIn(execNetBaseElemTypes),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
ExecNetSetPrecision::getTestCaseName); ExecNetSetPrecision::getTestCaseName);
} // namespace } // namespace

View File

@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P(
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_IEClassHeteroExecutableNetworkGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, smoke_IEClassHeteroExecutableNetworkGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
::testing::Values(ov::test::conformance::targetDevice)); ::testing::ValuesIn(return_all_possible_device_combination()));
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ie_executable_network, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME, ie_executable_network, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME,

View File

@ -12,6 +12,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCallbackTests, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCallbackTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestCallbackTests::getTestCaseName); InferRequestCallbackTests::getTestCaseName);
} // namespace } // namespace

View File

@ -12,6 +12,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCancellationTests, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCancellationTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestCancellationTests::getTestCaseName); InferRequestCancellationTests::getTestCaseName);
} // namespace } // namespace

View File

@ -15,6 +15,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestIOBBlobTest, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestIOBBlobTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestIOBBlobTest::getTestCaseName); InferRequestIOBBlobTest::getTestCaseName);
} // namespace } // namespace

View File

@ -16,7 +16,7 @@ using namespace BehaviorTestsDefinitions;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestMultithreadingTests, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestMultithreadingTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestMultithreadingTests::getTestCaseName); InferRequestMultithreadingTests::getTestCaseName);
} // namespace } // namespace

View File

@ -12,7 +12,7 @@ using namespace BehaviorTestsDefinitions;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestPerfCountersTest, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestPerfCountersTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestPerfCountersTest::getTestCaseName); InferRequestPerfCountersTest::getTestCaseName);
} // namespace } // namespace

View File

@ -13,16 +13,14 @@ const std::vector<FuncTestUtils::BlobType> setBlobTypes = {
FuncTestUtils::BlobType::Compound, FuncTestUtils::BlobType::Compound,
FuncTestUtils::BlobType::Batched, FuncTestUtils::BlobType::Batched,
FuncTestUtils::BlobType::Memory, FuncTestUtils::BlobType::Memory,
// FuncTestUtils::BlobType::Remote, FuncTestUtils::BlobType::Remote,
FuncTestUtils::BlobType::I420, FuncTestUtils::BlobType::I420,
FuncTestUtils::BlobType::NV12 FuncTestUtils::BlobType::NV12
}; };
const std::map<std::string, std::string> ConfigBlobType{}; //nothing special
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestSetBlobByType, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestSetBlobByType,
::testing::Combine(::testing::ValuesIn(setBlobTypes), ::testing::Combine(::testing::ValuesIn(setBlobTypes),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestSetBlobByType::getTestCaseName); InferRequestSetBlobByType::getTestCaseName);
} // namespace } // namespace

View File

@ -15,6 +15,6 @@ using namespace BehaviorTestsDefinitions;
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestWaitTests, INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestWaitTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestWaitTests::getTestCaseName); InferRequestWaitTests::getTestCaseName);
} // namespace } // namespace

View File

@ -30,6 +30,6 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(ovExecGraphInfoElemTypes), ::testing::ValuesIn(ovExecGraphInfoElemTypes),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVExecGraphImportExportTest::getTestCaseName); OVExecGraphImportExportTest::getTestCaseName);
} // namespace } // namespace

View File

@ -14,6 +14,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVExecutableNetworkBaseTest, INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVExecutableNetworkBaseTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVExecutableNetworkBaseTest::getTestCaseName); OVExecutableNetworkBaseTest::getTestCaseName);
} // namespace } // namespace

View File

@ -18,7 +18,7 @@ using namespace InferenceEngine::PluginConfigParams;
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ov_compiled_model, OVClassImportExportTestP, ov_compiled_model, OVClassExecutableNetworkImportExportTestP,
::testing::ValuesIn(return_all_possible_device_combination())); ::testing::ValuesIn(return_all_possible_device_combination()));
// //
@ -55,7 +55,7 @@ INSTANTIATE_TEST_SUITE_P(
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ov_compiled_model, OVClassExecutableNetworkSetConfigTest, ov_compiled_model, OVClassExecutableNetworkSetConfigTest,
::testing::Values(ov::test::conformance::targetDevice)); ::testing::ValuesIn(return_all_possible_device_combination()));
//// ////
//// Hetero Executable Network GetMetric //// Hetero Executable Network GetMetric

View File

@ -16,34 +16,15 @@ const std::vector<ov::AnyMap> inproperties = {
}; };
const std::vector<ov::AnyMap> auto_batch_inproperties = { const std::vector<ov::AnyMap> auto_batch_inproperties = {
{{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}},
{{ov::auto_batch_timeout(-1)}}, {{ov::auto_batch_timeout(-1)}},
}; };
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesIncorrectTests, INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesIncorrectTests,
::testing::Combine( ::testing::Combine(
::testing::Values(ov::test::conformance::targetDevice), ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
::testing::ValuesIn(inproperties)), ::testing::ValuesIn(inproperties)),
OVCompiledModelPropertiesIncorrectTests::getTestCaseName); OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_HETERO),
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))),
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_MULTI),
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))),
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))),
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesIncorrectTests, INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesIncorrectTests,
::testing::Combine( ::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_BATCH), ::testing::Values(CommonTestUtils::DEVICE_BATCH),
@ -63,35 +44,16 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesDefaultTest
OVCompiledModelPropertiesDefaultTests::getTestCaseName); OVCompiledModelPropertiesDefaultTests::getTestCaseName);
const std::vector<ov::AnyMap> auto_batch_properties = { const std::vector<ov::AnyMap> auto_batch_properties = {
{},
{{CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}}, {{CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}},
{{ov::auto_batch_timeout(10)}}, {{ov::auto_batch_timeout(10)}},
}; };
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesTests, INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesTests,
::testing::Combine( ::testing::Combine(
::testing::Values(ov::test::conformance::targetDevice), ::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
::testing::ValuesIn(default_properties)), ::testing::ValuesIn(default_properties)),
OVCompiledModelPropertiesTests::getTestCaseName); OVCompiledModelPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_HETERO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))),
OVCompiledModelPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_MULTI),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))),
OVCompiledModelPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))),
OVCompiledModelPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesTests, INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesTests,
::testing::Combine( ::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_BATCH), ::testing::Values(CommonTestUtils::DEVICE_BATCH),

View File

@ -15,7 +15,7 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCallbackTests, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCallbackTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestCallbackTests::getTestCaseName); OVInferRequestCallbackTests::getTestCaseName);
} // namespace } // namespace

View File

@ -12,6 +12,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCancellationTests, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCancellationTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestCancellationTests::getTestCaseName); OVInferRequestCancellationTests::getTestCaseName);
} // namespace } // namespace

View File

@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_1, OVInferRequestDynamicTests,
{{1, 4, 20, 20}, {1, 4, 20, 20}}, {{1, 4, 20, 20}, {1, 4, 20, 20}},
{{2, 4, 20, 20}, {2, 4, 20, 20}}}), {{2, 4, 20, 20}, {2, 4, 20, 20}}}),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestDynamicTests::getTestCaseName); OVInferRequestDynamicTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests, INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests,
@ -71,6 +71,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests,
{{1, 4, 20, 20}, {1, 2, 20, 40}}, {{1, 4, 20, 20}, {1, 2, 20, 40}},
{{2, 4, 20, 20}, {2, 2, 20, 40}}}), {{2, 4, 20, 20}, {2, 2, 20, 40}}}),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestDynamicTests::getTestCaseName); OVInferRequestDynamicTests::getTestCaseName);
} // namespace } // namespace

View File

@ -13,6 +13,6 @@ using namespace ov::test::conformance;
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferenceChaining, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferenceChaining,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferenceChaining::getTestCaseName); OVInferenceChaining::getTestCaseName);
} // namespace } // namespace

View File

@ -15,7 +15,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorTest, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestIOTensorTest::getTestCaseName); OVInferRequestIOTensorTest::getTestCaseName);
std::vector<ov::element::Type> ovIOTensorElemTypes = { std::vector<ov::element::Type> ovIOTensorElemTypes = {
@ -41,6 +41,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorSetPrecisionTes
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(ovIOTensorElemTypes), ::testing::ValuesIn(ovIOTensorElemTypes),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestIOTensorSetPrecisionTest::getTestCaseName); OVInferRequestIOTensorSetPrecisionTest::getTestCaseName);
} // namespace } // namespace

View File

@ -16,7 +16,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestMultithreadingTests, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestMultithreadingTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestMultithreadingTests::getTestCaseName); OVInferRequestMultithreadingTests::getTestCaseName);
} // namespace } // namespace

View File

@ -13,7 +13,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestPerfCountersTest, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestPerfCountersTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestPerfCountersTest::getTestCaseName); OVInferRequestPerfCountersTest::getTestCaseName);
} // namespace } // namespace

View File

@ -16,7 +16,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestWaitTests, INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestWaitTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_ov_config)), ::testing::Values(ov_config)),
OVInferRequestWaitTests::getTestCaseName); OVInferRequestWaitTests::getTestCaseName);
} // namespace } // namespace

View File

@ -17,14 +17,14 @@ namespace {
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ov_plugin, OVClassBasicTestP, ov_plugin, OVClassBasicTestP,
::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice))); ::testing::ValuesIn(generate_pairs_plugin_name_by_device()));
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ov_plugin, OVClassNetworkTestP, ov_plugin, OVClassNetworkTestP,
::testing::ValuesIn(return_all_possible_device_combination())); ::testing::ValuesIn(return_all_possible_device_combination()));
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ov_plugin, OVClassImportExportTestP, smoke_OVClassImportExportTestP, OVClassImportExportTestP,
::testing::ValuesIn(return_all_possible_device_combination())); ::testing::ValuesIn(return_all_possible_device_combination()));
// //

View File

@ -3,10 +3,12 @@
// //
#include "behavior/ov_plugin/properties_tests.hpp" #include "behavior/ov_plugin/properties_tests.hpp"
#include "base/ov_behavior_test_utils.hpp"
#include "openvino/runtime/properties.hpp" #include "openvino/runtime/properties.hpp"
#include "ov_api_conformance_helpers.hpp" #include "ov_api_conformance_helpers.hpp"
using namespace ov::test::behavior; using namespace ov::test::behavior;
using namespace ov::test::conformance;
namespace { namespace {
@ -15,34 +17,15 @@ const std::vector<ov::AnyMap> inproperties = {
}; };
const std::vector<ov::AnyMap> auto_batch_inproperties = { const std::vector<ov::AnyMap> auto_batch_inproperties = {
{{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}},
{{ov::auto_batch_timeout(-1)}}, {{ov::auto_batch_timeout(-1)}},
}; };
INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesIncorrectTests, INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesIncorrectTests,
::testing::Combine( ::testing::Combine(
::testing::Values(ov::test::conformance::targetDevice), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(inproperties)), ::testing::ValuesIn(inproperties)),
OVPropertiesIncorrectTests::getTestCaseName); OVPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_HETERO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))),
OVPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_MULTI),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))),
OVPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesIncorrectTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))),
OVPropertiesIncorrectTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesIncorrectTests, INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesIncorrectTests,
::testing::Combine( ::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_BATCH), ::testing::Values(CommonTestUtils::DEVICE_BATCH),
@ -62,28 +45,10 @@ const std::vector<ov::AnyMap> auto_batch_properties = {
INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesTests, INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesTests,
::testing::Combine( ::testing::Combine(
::testing::Values(ov::test::conformance::targetDevice), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(default_properties)), ::testing::ValuesIn(default_properties)),
OVPropertiesTests::getTestCaseName); OVPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_HETERO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))),
OVPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_MULTI),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))),
OVPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))),
OVPropertiesTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesTests, INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesTests,
::testing::Combine( ::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_BATCH), ::testing::Values(CommonTestUtils::DEVICE_BATCH),

View File

@ -24,8 +24,16 @@ namespace {
#else #else
auto defaultBindThreadParameter = InferenceEngine::Parameter{std::string{CONFIG_VALUE(YES)}}; auto defaultBindThreadParameter = InferenceEngine::Parameter{std::string{CONFIG_VALUE(YES)}};
#endif #endif
INSTANTIATE_TEST_SUITE_P(
ie_plugin,
DefaultConfigurationTest,
::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()),
::testing::Values(DefaultParameter{CONFIG_KEY(PERF_COUNT), CONFIG_VALUE(YES)})),
DefaultConfigurationTest::getTestCaseName);
const std::vector<std::map<std::string, std::string>> pluginConfigs = { const std::vector<std::map<std::string, std::string>> pluginConfigs = {
{}, {{}},
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}},
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}},
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}, {{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY},
@ -161,7 +169,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin_Hetero, CorrectConfigTests,
{{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}} {{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
}; };
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, CorrectConfigCheck, INSTANTIATE_TEST_SUITE_P(ie_plugin, CorrectConfigCheck,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(pluginConfigsCheck)), ::testing::ValuesIn(pluginConfigsCheck)),

View File

@ -16,7 +16,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ie_plugin, IEClassBasicTestP, ie_plugin, IEClassBasicTestP,
::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice))); ::testing::ValuesIn(generate_pairs_plugin_name_by_device()));
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
ie_plugin, IEClassNetworkTestP, ie_plugin, IEClassNetworkTestP,

View File

@ -16,7 +16,10 @@ const Params coreThreadingParams[] = {
std::tuple<Device, Config>{ CommonTestUtils::DEVICE_BATCH, generate_configs(CommonTestUtils::DEVICE_BATCH).front() }, std::tuple<Device, Config>{ CommonTestUtils::DEVICE_BATCH, generate_configs(CommonTestUtils::DEVICE_BATCH).front() },
}; };
INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests, testing::ValuesIn(coreThreadingParams), CoreThreadingTests::getTestCaseName); INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests,
testing::ValuesIn(coreThreadingParams),
CoreThreadingTests::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ie_plugin, CoreThreadingTests, INSTANTIATE_TEST_SUITE_P(ie_plugin, CoreThreadingTests,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),

View File

@ -19,7 +19,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(netPrecisionsPreprocess), ::testing::ValuesIn(netPrecisionsPreprocess),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestPreprocessTest::getTestCaseName); InferRequestPreprocessTest::getTestCaseName);
const std::vector<InferenceEngine::Precision> ioPrecisionsPreprocess = { const std::vector<InferenceEngine::Precision> ioPrecisionsPreprocess = {
@ -47,7 +47,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessConversionTest,
::testing::Bool(), ::testing::Bool(),
::testing::Bool(), ::testing::Bool(),
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestPreprocessConversionTest::getTestCaseName); InferRequestPreprocessConversionTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTest, INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTest,
@ -61,6 +61,6 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTe
::testing::Values(true), // only SetBlob ::testing::Values(true), // only SetBlob
::testing::Values(true), // only SetBlob ::testing::Values(true), // only SetBlob
::testing::ValuesIn(return_all_possible_device_combination()), ::testing::ValuesIn(return_all_possible_device_combination()),
::testing::ValuesIn(empty_config)), ::testing::Values(ie_config)),
InferRequestPreprocessDynamicallyInSetBlobTest::getTestCaseName); InferRequestPreprocessDynamicallyInSetBlobTest::getTestCaseName);
} // namespace } // namespace

View File

@ -11,7 +11,7 @@
#include "common_test_utils/file_utils.hpp" #include "common_test_utils/file_utils.hpp"
#include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/skip_tests_config.hpp"
#include "functional_test_utils/layer_test_utils/environment.hpp" #include "functional_test_utils/summary/environment.hpp"
#include "read_ir_test/read_ir.hpp" #include "read_ir_test/read_ir.hpp"
#include "gflag_config.hpp" #include "gflag_config.hpp"
@ -45,11 +45,15 @@ int main(int argc, char* argv[]) {
} }
FuncTestUtils::SkipTestsConfig::disable_tests_skipping = FLAGS_disable_test_config; FuncTestUtils::SkipTestsConfig::disable_tests_skipping = FLAGS_disable_test_config;
LayerTestsUtils::Summary::setExtendReport(FLAGS_extend_report); ov::test::utils::OpSummary::setExtendReport(FLAGS_extend_report);
LayerTestsUtils::Summary::setExtractBody(FLAGS_extract_body); ov::test::utils::OpSummary::setExtractBody(FLAGS_extract_body);
LayerTestsUtils::Summary::setSaveReportWithUniqueName(FLAGS_report_unique_name); ov::test::utils::OpSummary::setSaveReportWithUniqueName(FLAGS_report_unique_name);
LayerTestsUtils::Summary::setOutputFolder(FLAGS_output_folder); ov::test::utils::OpSummary::setOutputFolder(FLAGS_output_folder);
LayerTestsUtils::Summary::setSaveReportTimeout(FLAGS_save_report_timeout); ov::test::utils::OpSummary::setSaveReportTimeout(FLAGS_save_report_timeout);
{
auto &apiSummary = ov::test::utils::ApiSummary::getInstance();
apiSummary.setDeviceName(FLAGS_device);
}
if (FLAGS_shape_mode == std::string("static")) { if (FLAGS_shape_mode == std::string("static")) {
ov::test::subgraph::shapeMode = ov::test::subgraph::ShapeMode::STATIC; ov::test::subgraph::shapeMode = ov::test::subgraph::ShapeMode::STATIC;
} else if (FLAGS_shape_mode == std::string("dynamic")) { } else if (FLAGS_shape_mode == std::string("dynamic")) {
@ -75,25 +79,29 @@ int main(int argc, char* argv[]) {
} }
::testing::InitGoogleTest(&argc, argv); ::testing::InitGoogleTest(&argc, argv);
::testing::AddGlobalTestEnvironment(new LayerTestsUtils::TestEnvironment); ::testing::AddGlobalTestEnvironment(new ov::test::utils::TestEnvironment);
auto exernalSignalHandler = [](int errCode) { auto exernalSignalHandler = [](int errCode) {
std::cerr << "Unexpected application crash with code: " << errCode << std::endl; std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
auto& op_summary = ov::test::utils::OpSummary::getInstance();
auto& api_summary = ov::test::utils::ApiSummary::getInstance();
op_summary.saveReport();
api_summary.saveReport();
// set default handler for crash // set default handler for crash
signal(SIGABRT, SIG_DFL);
signal(SIGSEGV, SIG_DFL);
signal(SIGINT, SIG_DFL); signal(SIGINT, SIG_DFL);
signal(SIGTERM, SIG_DFL); signal(SIGTERM, SIG_DFL);
if (errCode == SIGINT || errCode == SIGTERM) { exit(1);
auto& s = LayerTestsUtils::Summary::getInstance();
s.saveReport();
exit(1);
}
}; };
// killed by extarnal // killed by external
signal(SIGINT, exernalSignalHandler); signal(SIGINT, exernalSignalHandler);
signal(SIGTERM , exernalSignalHandler); signal(SIGTERM , exernalSignalHandler);
signal(SIGSEGV, exernalSignalHandler);
signal(SIGABRT, exernalSignalHandler);
return RUN_ALL_TESTS(); return RUN_ALL_TESTS();
} }

View File

@ -14,7 +14,7 @@
#include "common_test_utils/data_utils.hpp" #include "common_test_utils/data_utils.hpp"
#include "common_test_utils/common_utils.hpp" #include "common_test_utils/common_utils.hpp"
#include "common_test_utils/crash_handler.hpp" #include "common_test_utils/crash_handler.hpp"
#include "functional_test_utils/layer_test_utils/op_info.hpp" #include "functional_test_utils/summary/op_info.hpp"
#include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/skip_tests_config.hpp"
#include "read_ir_test/read_ir.hpp" #include "read_ir_test/read_ir.hpp"
@ -56,7 +56,7 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo<ReadIRParam
void ReadIRTest::query_model() { void ReadIRTest::query_model() {
// in case of crash jump will be made and work will be continued // in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler()); auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
auto &s = LayerTestsUtils::Summary::getInstance(); auto &s = ov::test::utils::OpSummary::getInstance();
// place to jump in case of a crash // place to jump in case of a crash
int jmpRes = 0; int jmpRes = 0;
@ -74,21 +74,21 @@ void ReadIRTest::query_model() {
s.setDeviceName(targetDevice); s.setDeviceName(targetDevice);
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) { if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::SKIPPED); s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::SKIPPED);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl; GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else { } else {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::CRASHED); s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::CRASHED);
} }
try { try {
SubgraphBaseTest::query_model(); SubgraphBaseTest::query_model();
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::PASSED); s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED);
} catch (...) { } catch (...) {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED); s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED);
} }
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) { } else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens"; IE_THROW() << "Crash happens";
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) { } else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::HANGED); s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::HANGED);
IE_THROW() << "Crash happens"; IE_THROW() << "Crash happens";
} }
} }

View File

@ -18,7 +18,7 @@ TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) {
InferenceEngine::CNNNetwork cnnNet(fnPtr); InferenceEngine::CNNNetwork cnnNet(fnPtr);
auto ie = PluginCache::get().ie(); auto ie = PluginCache::get().ie();
auto execNet = ie->LoadNetwork(cnnNet, targetDevice); auto execNet = ie->LoadNetwork(cnnNet, target_device);
InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo(); InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo();

View File

@ -17,7 +17,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_OVClassImportExportTestP, OVClassImportExportTestP, smoke_OVClassImportExportTestP, OVClassExecutableNetworkImportExportTestP,
::testing::Values("HETERO:CPU")); ::testing::Values("HETERO:CPU"));
// //

View File

@ -78,7 +78,7 @@ TEST_P(IEClassExecutableNetworkSetConfigFromFp32Test, SetConfigFromFp32Throws) {
std::map<std::string, std::string> initialConfig; std::map<std::string, std::string> initialConfig;
initialConfig[GNA_CONFIG_KEY(DEVICE_MODE)] = InferenceEngine::GNAConfigParams::GNA_SW_FP32; initialConfig[GNA_CONFIG_KEY(DEVICE_MODE)] = InferenceEngine::GNAConfigParams::GNA_SW_FP32;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName, initialConfig); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device, initialConfig);
ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception); ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception);
} }

View File

@ -18,7 +18,7 @@
using namespace ov::test::behavior; using namespace ov::test::behavior;
namespace { namespace {
// IE Class Common tests with <pluginName, deviceName params> // IE Class Common tests with <pluginName, target_device params>
// //
INSTANTIATE_TEST_SUITE_P(nightly_OVClassCommon, INSTANTIATE_TEST_SUITE_P(nightly_OVClassCommon,
@ -110,7 +110,7 @@ TEST_P(OVClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow)
ov::Core ie; ov::Core ie;
ov::Any p; ov::Any p;
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
uint64_t t = p; uint64_t t = p;
std::cout << "GPU device total memory size: " << t << std::endl; std::cout << "GPU device total memory size: " << t << std::endl;
@ -127,7 +127,7 @@ TEST_P(OVClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
ov::Any p; ov::Any p;
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(UARCH_VERSION))); ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(UARCH_VERSION)));
std::string t = p; std::string t = p;
std::cout << "GPU device uarch: " << t << std::endl; std::cout << "GPU device uarch: " << t << std::endl;
@ -143,7 +143,7 @@ TEST_P(OVClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow)
ov::Core ie; ov::Core ie;
ov::Any p; ov::Any p;
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
int t = p; int t = p;
std::cout << "GPU EUs count: " << t << std::endl; std::cout << "GPU EUs count: " << t << std::endl;
@ -160,7 +160,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricAvailableDevicesAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::vector<std::string> properties; std::vector<std::string> properties;
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::available_devices)); ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::available_devices));
std::cout << "AVAILABLE_DEVICES: "; std::cout << "AVAILABLE_DEVICES: ";
for (const auto& prop : properties) { for (const auto& prop : properties) {
@ -175,7 +175,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForAsyncInferRequestsAndPrintNo
ov::Core ie; ov::Core ie;
std::tuple<unsigned int, unsigned int, unsigned int> property; std::tuple<unsigned int, unsigned int, unsigned int> property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_async_infer_requests)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_async_infer_requests));
std::cout << "RANGE_FOR_ASYNC_INFER_REQUESTS: " << std::get<0>(property) << " " << std::cout << "RANGE_FOR_ASYNC_INFER_REQUESTS: " << std::get<0>(property) << " " <<
std::get<1>(property) << " " << std::get<1>(property) << " " <<
@ -188,7 +188,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForStreamsAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::tuple<unsigned int, unsigned int> property; std::tuple<unsigned int, unsigned int> property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_streams)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_streams));
std::cout << "RANGE_FOR_STREAMS: " << std::get<0>(property) << " " << std::cout << "RANGE_FOR_STREAMS: " << std::get<0>(property) << " " <<
std::get<1>(property) << std::endl; std::get<1>(property) << std::endl;
@ -200,7 +200,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricOptimalBatchSizeAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
unsigned int property; unsigned int property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::optimal_batch_size)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::optimal_batch_size));
std::cout << "OPTIMAL_BATCH_SIZE: " << property << std::endl; std::cout << "OPTIMAL_BATCH_SIZE: " << property << std::endl;
@ -211,7 +211,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricFullNameAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::string property; std::string property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::full_name)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::full_name));
std::cout << "FULL_DEVICE_NAME: " << property << std::endl; std::cout << "FULL_DEVICE_NAME: " << property << std::endl;
@ -222,7 +222,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricTypeAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
ov::device::Type property = ov::device::Type::INTEGRATED; ov::device::Type property = ov::device::Type::INTEGRATED;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::type)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::type));
std::cout << "DEVICE_TYPE: " << property << std::endl; std::cout << "DEVICE_TYPE: " << property << std::endl;
@ -233,7 +233,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricGopsAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::map<ov::element::Type, float> properties; std::map<ov::element::Type, float> properties;
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::gops)); ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::gops));
std::cout << "DEVICE_GOPS: " << std::endl; std::cout << "DEVICE_GOPS: " << std::endl;
for (const auto& prop : properties) { for (const auto& prop : properties) {
@ -247,7 +247,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricCapabilitiesAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::vector<std::string> properties; std::vector<std::string> properties;
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::capabilities)); ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::capabilities));
std::cout << "OPTIMIZATION_CAPABILITIES: " << std::endl; std::cout << "OPTIMIZATION_CAPABILITIES: " << std::endl;
for (const auto& prop : properties) { for (const auto& prop : properties) {
@ -261,7 +261,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricDeviceTotalMemSizeAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
uint64_t property; uint64_t property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::device_total_mem_size)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::device_total_mem_size));
std::cout << "GPU_DEVICE_TOTAL_MEM_SIZE: " << property << std::endl; std::cout << "GPU_DEVICE_TOTAL_MEM_SIZE: " << property << std::endl;
@ -272,7 +272,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricUarchVersionAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::string property; std::string property;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::uarch_version)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::uarch_version));
std::cout << "GPU_UARCH_VERSION: " << property << std::endl; std::cout << "GPU_UARCH_VERSION: " << property << std::endl;
@ -283,7 +283,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricExecutionUnitsCountAndPrintNoThrow)
ov::Core ie; ov::Core ie;
int32_t property = 0; int32_t property = 0;
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::execution_units_count)); ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::execution_units_count));
std::cout << "GPU_EXECUTION_UNITS_COUNT: " << property << std::endl; std::cout << "GPU_EXECUTION_UNITS_COUNT: " << property << std::endl;
@ -294,7 +294,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricMemoryStatisticsAndPrintNoThrow) {
ov::Core ie; ov::Core ie;
std::map<std::string, uint64_t> properties; std::map<std::string, uint64_t> properties;
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
std::cout << "GPU_MEMORY_STATISTICS: " << std::endl; std::cout << "GPU_MEMORY_STATISTICS: " << std::endl;
for (const auto& prop : properties) { for (const auto& prop : properties) {
@ -308,16 +308,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetPerformanceModeNoThrow) {
ov::Core ie; ov::Core ie;
ov::hint::PerformanceMode defaultMode{}; ov::hint::PerformanceMode defaultMode{};
ASSERT_NO_THROW(defaultMode = ie.get_property(deviceName, ov::hint::performance_mode)); ASSERT_NO_THROW(defaultMode = ie.get_property(target_device, ov::hint::performance_mode));
std::cout << "Default PERFORMANCE_HINT: \"" << defaultMode << "\"" << std::endl; std::cout << "Default PERFORMANCE_HINT: \"" << defaultMode << "\"" << std::endl;
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED)); ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED));
ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(deviceName, ov::hint::performance_mode)); ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(target_device, ov::hint::performance_mode));
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)); ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(deviceName, ov::hint::performance_mode)); ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(target_device, ov::hint::performance_mode));
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT));
ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(deviceName, ov::hint::performance_mode)); ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(target_device, ov::hint::performance_mode));
OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::performance_mode); OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::performance_mode);
} }
@ -326,12 +326,12 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetEnableProfilingNoThrow) {
ov::Core ie; ov::Core ie;
bool defaultValue = false; bool defaultValue = false;
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::enable_profiling)); ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::enable_profiling));
std::cout << "Default PERF_COUNT: " << defaultValue << std::endl; std::cout << "Default PERF_COUNT: " << defaultValue << std::endl;
ie.set_property(deviceName, ov::enable_profiling(true)); ie.set_property(target_device, ov::enable_profiling(true));
ASSERT_EQ(true, ie.get_property(deviceName, ov::enable_profiling)); ASSERT_EQ(true, ie.get_property(target_device, ov::enable_profiling));
OV_ASSERT_PROPERTY_SUPPORTED(ov::enable_profiling); OV_ASSERT_PROPERTY_SUPPORTED(ov::enable_profiling);
@ -356,19 +356,19 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetModelPriorityNoThrow) {
ov::Core ie; ov::Core ie;
ov::hint::Priority defaultValue; ov::hint::Priority defaultValue;
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::hint::model_priority)); ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::hint::model_priority));
std::cout << "Default PERF_COUNT: " << defaultValue << std::endl; std::cout << "Default PERF_COUNT: " << defaultValue << std::endl;
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::HIGH)); ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::HIGH));
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::hint::model_priority)); ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::hint::model_priority));
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::LOW)); ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::LOW));
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::hint::model_priority)); ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::hint::model_priority));
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::MEDIUM)); ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::MEDIUM));
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::hint::model_priority)); ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::hint::model_priority));
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::model_priority); OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::model_priority);
} }
@ -377,16 +377,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetQueuePriorityNoThrow) {
ov::Core ie; ov::Core ie;
ov::hint::Priority defaultValue; ov::hint::Priority defaultValue;
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
std::cout << "Default GPU_QUEUE_PRIORITY: " << defaultValue << std::endl; std::cout << "Default GPU_QUEUE_PRIORITY: " << defaultValue << std::endl;
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH)); ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH));
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW)); ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW));
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM)); ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM));
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority)); ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_priority); OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_priority);
} }
@ -395,16 +395,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetThrottleLevelNoThrow) {
ov::Core ie; ov::Core ie;
ov::intel_gpu::hint::ThrottleLevel defaultValue; ov::intel_gpu::hint::ThrottleLevel defaultValue;
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
std::cout << "Default GPU_QUEUE_THROTTLE: " << defaultValue << std::endl; std::cout << "Default GPU_QUEUE_THROTTLE: " << defaultValue << std::endl;
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH)); ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH));
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW)); ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW));
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM)); ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM));
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle)); ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_throttle); OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_throttle);
} }
@ -413,20 +413,20 @@ TEST_P(OVClassGetPropertyTest_GPU, CanSetDefaultValueBackToPluginNewAPI) {
ov::Core ie; ov::Core ie;
std::vector<ov::PropertyName> properties; std::vector<ov::PropertyName> properties;
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::supported_properties)); ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::supported_properties));
std::cout << "SUPPORTED_PROPERTIES:" << std::endl; std::cout << "SUPPORTED_PROPERTIES:" << std::endl;
for (const auto& property : properties) { for (const auto& property : properties) {
ov::Any prop; ov::Any prop;
if (property.is_mutable()) { if (property.is_mutable()) {
std::cout << "RW: " << property << " "; std::cout << "RW: " << property << " ";
ASSERT_NO_THROW(prop = ie.get_property(deviceName, property)); ASSERT_NO_THROW(prop = ie.get_property(target_device, property));
prop.print(std::cout); prop.print(std::cout);
std::cout << std::endl; std::cout << std::endl;
ASSERT_NO_THROW(ie.set_property(deviceName, {{property, prop}})); ASSERT_NO_THROW(ie.set_property(target_device, {{property, prop}}));
} else { } else {
std::cout << "RO: " << property << " "; std::cout << "RO: " << property << " ";
ASSERT_NO_THROW(prop = ie.get_property(deviceName, property)); ASSERT_NO_THROW(prop = ie.get_property(target_device, property));
prop.print(std::cout); prop.print(std::cout);
std::cout << std::endl; std::cout << std::endl;
} }
@ -446,7 +446,7 @@ TEST_P(OVClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) {
unsigned int p; unsigned int p;
ov::AnyMap _options = {ov::hint::model(simpleNetwork)}; ov::AnyMap _options = {ov::hint::model(simpleNetwork)};
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::optimal_batch_size.name(), _options)); ASSERT_NO_THROW(p = ie.get_property(target_device, ov::optimal_batch_size.name(), _options));
std::cout << "GPU device optimal batch size: " << p << std::endl; std::cout << "GPU device optimal batch size: " << p << std::endl;
@ -465,7 +465,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow
unsigned int p; unsigned int p;
ov::AnyMap _options = {ov::hint::model(simpleNetwork)}; ov::AnyMap _options = {ov::hint::model(simpleNetwork)};
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options)); ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options));
std::cout << "GPU device max available batch size: " << p << std::endl; std::cout << "GPU device max available batch size: " << p << std::endl;
@ -482,7 +482,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::Core ie; ov::Core ie;
unsigned int p; unsigned int p;
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
uint32_t n_streams = 2; uint32_t n_streams = 2;
int64_t available_device_mem_size = 1073741824; int64_t available_device_mem_size = 1073741824;
@ -490,7 +490,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
ov::num_streams(n_streams), ov::num_streams(n_streams),
ov::intel_gpu::hint::available_device_mem(available_device_mem_size)}; ov::intel_gpu::hint::available_device_mem(available_device_mem_size)};
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options)); ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options));
std::cout << "GPU device max available batch size: " << p << std::endl; std::cout << "GPU device max available batch size: " << p << std::endl;
@ -508,9 +508,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh
ov::Core ie; ov::Core ie;
std::map<std::string, uint64_t> p; std::map<std::string, uint64_t> p;
auto exec_net = ie.compile_model(simpleNetwork, deviceName); auto exec_net = ie.compile_model(simpleNetwork, target_device);
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(p = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(p.empty()); ASSERT_FALSE(p.empty());
std::cout << "Memory Statistics: " << std::endl; std::cout << "Memory Statistics: " << std::endl;
@ -534,18 +534,18 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
std::map<std::string, uint64_t> t1; std::map<std::string, uint64_t> t1;
std::map<std::string, uint64_t> t2; std::map<std::string, uint64_t> t2;
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t1.empty()); ASSERT_FALSE(t1.empty());
for (auto &&kv : t1) { for (auto &&kv : t1) {
ASSERT_NE(kv.second, 0); ASSERT_NE(kv.second, 0);
} }
auto exec_net2 = ie.compile_model(simpleNetwork, deviceName); auto exec_net2 = ie.compile_model(simpleNetwork, target_device);
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());
for (auto &&kv : t2) { for (auto &&kv : t2) {
@ -570,24 +570,24 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
ov::Core ie; ov::Core ie;
std::map<std::string, uint64_t> t1; std::map<std::string, uint64_t> t1;
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_TRUE(t1.empty()); ASSERT_TRUE(t1.empty());
{ {
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
std::map<std::string, uint64_t> t2; std::map<std::string, uint64_t> t2;
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());
for (auto &&kv : t2) { for (auto &&kv : t2) {
ASSERT_NE(kv.second, 0); ASSERT_NE(kv.second, 0);
} }
{ {
auto exec_net2 = ie.compile_model(actualNetwork, deviceName); auto exec_net2 = ie.compile_model(actualNetwork, target_device);
std::map<std::string, uint64_t> t3; std::map<std::string, uint64_t> t3;
ASSERT_NO_THROW(t3 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t3 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t3.empty()); ASSERT_FALSE(t3.empty());
for (auto &&kv : t3) { for (auto &&kv : t3) {
@ -595,7 +595,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
} }
} }
std::map<std::string, uint64_t> t4; std::map<std::string, uint64_t> t4;
ASSERT_NO_THROW(t4 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t4 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t4.empty()); ASSERT_FALSE(t4.empty());
for (auto &&kv : t4) { for (auto &&kv : t4) {
@ -609,7 +609,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
} }
} }
std::map<std::string, uint64_t> t5; std::map<std::string, uint64_t> t5;
ASSERT_NO_THROW(t5 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t5 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t5.empty()); ASSERT_FALSE(t5.empty());
for (auto &&kv : t5) { for (auto &&kv : t5) {
@ -641,9 +641,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
networks.emplace_back(simpleNetwork); networks.emplace_back(simpleNetwork);
networks.emplace_back(simpleNetwork); networks.emplace_back(simpleNetwork);
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName); auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t1.empty()); ASSERT_FALSE(t1.empty());
for (auto &&kv : t1) { for (auto &&kv : t1) {
@ -653,7 +653,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
for (auto & thread : threads) { for (auto & thread : threads) {
thread = std::thread([&](){ thread = std::thread([&](){
auto value = counter++; auto value = counter++;
exec_net_map[value] = ie.compile_model(networks[value], deviceName); exec_net_map[value] = ie.compile_model(networks[value], target_device);
}); });
} }
@ -663,7 +663,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
} }
} }
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics)); ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());
for (auto &&kv : t2) { for (auto &&kv : t2) {

View File

@ -16,7 +16,7 @@
using namespace BehaviorTestsDefinitions; using namespace BehaviorTestsDefinitions;
namespace { namespace {
// IE Class Common tests with <pluginName, deviceName params> // IE Class Common tests with <pluginName, target_device params>
// //
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
@ -102,7 +102,7 @@ TEST_P(IEClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow)
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
uint64_t t = p; uint64_t t = p;
std::cout << "GPU device total memory size: " << t << std::endl; std::cout << "GPU device total memory size: " << t << std::endl;
@ -122,7 +122,7 @@ TEST_P(IEClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) {
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}}; std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}};
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as<unsigned int>()); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as<unsigned int>());
unsigned int t = p; unsigned int t = p;
std::cout << "GPU device optimal batch size: " << t << std::endl; std::cout << "GPU device optimal batch size: " << t << std::endl;
@ -142,7 +142,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}}; std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}};
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>()); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
uint32_t t = p; uint32_t t = p;
std::cout << "GPU device max available batch size: " << t << std::endl; std::cout << "GPU device max available batch size: " << t << std::endl;
@ -166,7 +166,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
_options.insert(std::make_pair("GPU_THROUGHPUT_STREAMS", n_streams)); _options.insert(std::make_pair("GPU_THROUGHPUT_STREAMS", n_streams));
_options.insert(std::make_pair("AVAILABLE_DEVICE_MEM_SIZE", available_device_mem_size)); _options.insert(std::make_pair("AVAILABLE_DEVICE_MEM_SIZE", available_device_mem_size));
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>()); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
uint32_t t = p; uint32_t t = p;
@ -186,7 +186,7 @@ TEST_P(IEClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(UARCH_VERSION))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(UARCH_VERSION)));
std::string t = p; std::string t = p;
std::cout << "GPU device uarch: " << t << std::endl; std::cout << "GPU device uarch: " << t << std::endl;
@ -205,7 +205,7 @@ TEST_P(IEClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow)
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
int t = p; int t = p;
std::cout << "GPU EUs count: " << t << std::endl; std::cout << "GPU EUs count: " << t << std::endl;
@ -224,9 +224,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t = p; std::map<std::string, uint64_t> t = p;
ASSERT_FALSE(t.empty()); ASSERT_FALSE(t.empty());
@ -250,9 +250,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t1 = p; std::map<std::string, uint64_t> t1 = p;
ASSERT_FALSE(t1.empty()); ASSERT_FALSE(t1.empty());
@ -260,9 +260,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
ASSERT_NE(kv.second, 0); ASSERT_NE(kv.second, 0);
} }
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t2 = p; std::map<std::string, uint64_t> t2 = p;
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());
@ -288,14 +288,14 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t1 = p; std::map<std::string, uint64_t> t1 = p;
ASSERT_TRUE(t1.empty()); ASSERT_TRUE(t1.empty());
{ {
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t2 = p; std::map<std::string, uint64_t> t2 = p;
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());
@ -303,9 +303,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
ASSERT_NE(kv.second, 0); ASSERT_NE(kv.second, 0);
} }
{ {
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t3 = p; std::map<std::string, uint64_t> t3 = p;
ASSERT_FALSE(t3.empty()); ASSERT_FALSE(t3.empty());
@ -313,7 +313,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
ASSERT_NE(kv.second, 0); ASSERT_NE(kv.second, 0);
} }
} }
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t4 = p; std::map<std::string, uint64_t> t4 = p;
ASSERT_FALSE(t4.empty()); ASSERT_FALSE(t4.empty());
@ -327,7 +327,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
} }
} }
} }
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t5 = p; std::map<std::string, uint64_t> t5 = p;
ASSERT_FALSE(t5.empty()); ASSERT_FALSE(t5.empty());
@ -358,9 +358,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
networks.emplace_back(simpleCnnNetwork); networks.emplace_back(simpleCnnNetwork);
networks.emplace_back(simpleCnnNetwork); networks.emplace_back(simpleCnnNetwork);
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t1 = p; std::map<std::string, uint64_t> t1 = p;
ASSERT_FALSE(t1.empty()); ASSERT_FALSE(t1.empty());
@ -371,7 +371,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
for (auto & thread : threads) { for (auto & thread : threads) {
thread = std::thread([&](){ thread = std::thread([&](){
auto value = counter++; auto value = counter++;
exec_net_map[value] = ie.LoadNetwork(networks[value], deviceName); exec_net_map[value] = ie.LoadNetwork(networks[value], target_device);
}); });
} }
@ -381,7 +381,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
} }
} }
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
std::map<std::string, uint64_t> t2 = p; std::map<std::string, uint64_t> t2 = p;
ASSERT_FALSE(t2.empty()); ASSERT_FALSE(t2.empty());

View File

@ -31,7 +31,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_RemoteContext) {
networks.emplace_back(InferenceEngine::CNNNetwork(ngraph::builder::subgraph::makeSplitMultiConvConcat())); networks.emplace_back(InferenceEngine::CNNNetwork(ngraph::builder::subgraph::makeSplitMultiConvConcat()));
auto ocl_instance = std::make_shared<OpenCL>(); auto ocl_instance = std::make_shared<OpenCL>();
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
auto remote_context = make_shared_context(ie, CommonTestUtils::DEVICE_GPU, ocl_instance->_context.get()); auto remote_context = make_shared_context(ie, CommonTestUtils::DEVICE_GPU, ocl_instance->_context.get());

View File

@ -95,5 +95,6 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*smoke_VirtualPlugin_BehaviorTests.*LoadedRemoteContext.*)", R"(.*smoke_VirtualPlugin_BehaviorTests.*LoadedRemoteContext.*)",
// Issue: CVS-88667 - Need to verify hetero interoperability // Issue: CVS-88667 - Need to verify hetero interoperability
R"(.*nightly_OVClassHeteroExecutableNetworlGetMetricTest.*SUPPORTED_(CONFIG_KEYS|METRICS).*)", R"(.*nightly_OVClassHeteroExecutableNetworlGetMetricTest.*SUPPORTED_(CONFIG_KEYS|METRICS).*)",
R"(.*VirtualPlugin.*BehaviorTests.*OVHoldersTest.*LoadedTensor.*target_device=AUTO.*)",
}; };
} }

View File

@ -20,14 +20,14 @@ std::pair<std::string, std::string> plugins[] = {
}; };
INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP, INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP,
OVClassImportExportTestP, OVClassExecutableNetworkImportExportTestP,
::testing::Values(std::string(CommonTestUtils::DEVICE_MYRIAD), ::testing::Values(std::string(CommonTestUtils::DEVICE_MYRIAD),
"HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD))); "HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD)));
#if defined(ENABLE_INTEL_CPU) && ENABLE_INTEL_CPU #if defined(ENABLE_INTEL_CPU) && ENABLE_INTEL_CPU
INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP_HETERO_CPU, INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP_HETERO_CPU,
OVClassImportExportTestP, OVClassExecutableNetworkImportExportTestP,
::testing::Values("HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD) + ",CPU")); ::testing::Values("HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD) + ",CPU"));
#endif #endif

View File

@ -22,7 +22,7 @@ std::pair<std::string, std::string> plugins[] = {
}; };
// //
// IE Class Common tests with <pluginName, deviceName params> // IE Class Common tests with <pluginName, target_device params>
// //
INSTANTIATE_TEST_SUITE_P(OVClassBasicTestP_smoke, OVClassBasicTestP, ::testing::ValuesIn(plugins)); INSTANTIATE_TEST_SUITE_P(OVClassBasicTestP_smoke, OVClassBasicTestP, ::testing::ValuesIn(plugins));
@ -39,7 +39,7 @@ TEST_P(OVClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF
ov::Core ie; ov::Core ie;
OV_ASSERT_PROPERTY_SUPPORTED(ov::device::capabilities) OV_ASSERT_PROPERTY_SUPPORTED(ov::device::capabilities)
std::vector<std::string> device_capabilities; std::vector<std::string> device_capabilities;
ASSERT_NO_THROW(device_capabilities = ie.get_property(deviceName, ov::device::capabilities)); ASSERT_NO_THROW(device_capabilities = ie.get_property(target_device, ov::device::capabilities));
ASSERT_EQ(device_capabilities.size(), 2); ASSERT_EQ(device_capabilities.size(), 2);
ASSERT_NE(std::find(device_capabilities.begin(), device_capabilities.end(), ov::device::capability::EXPORT_IMPORT), ASSERT_NE(std::find(device_capabilities.begin(), device_capabilities.end(), ov::device::capability::EXPORT_IMPORT),
device_capabilities.end()); device_capabilities.end());

View File

@ -21,7 +21,7 @@ std::pair<std::string, std::string> plugins[] = {
}; };
// //
// IE Class Common tests with <pluginName, deviceName params> // IE Class Common tests with <pluginName, target_device params>
// //
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
@ -43,7 +43,7 @@ TEST_P(IEClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF
ASSERT_METRIC_SUPPORTED_IE(METRIC_KEY(OPTIMIZATION_CAPABILITIES)) ASSERT_METRIC_SUPPORTED_IE(METRIC_KEY(OPTIMIZATION_CAPABILITIES))
InferenceEngine::Parameter optimizationCapabilitiesParameter; InferenceEngine::Parameter optimizationCapabilitiesParameter;
ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
const auto optimizationCapabilities = optimizationCapabilitiesParameter.as<std::vector<std::string>>(); const auto optimizationCapabilities = optimizationCapabilitiesParameter.as<std::vector<std::string>>();
ASSERT_EQ(optimizationCapabilities.size(), 2); ASSERT_EQ(optimizationCapabilities.size(), 2);

View File

@ -9,51 +9,31 @@
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "common_test_utils/file_utils.hpp" #include "common_test_utils/file_utils.hpp"
#include "openvino/util/file_util.hpp" #include "openvino/util/file_util.hpp"
#include "functional_test_utils/summary/api_summary.hpp"
namespace BehaviorTestsUtils { namespace BehaviorTestsUtils {
using namespace CommonTestUtils; using namespace CommonTestUtils;
typedef std::tuple< class IEInferRequestTestBase : public ov::test::behavior::APIBaseTest {
InferenceEngine::Precision, // Network precision private:
std::string, // Device name void set_api_entity() override {
std::map<std::string, std::string> // Config api_entity = ov::test::utils::ov_entity::ie_infer_request;
> BehaviorBasicParams; };
};
class BehaviorTestsBasic : public testing::WithParamInterface<BehaviorBasicParams>, class IEExecutableNetworkTestBase : public ov::test::behavior::APIBaseTest {
public CommonTestUtils::TestsCommon { private:
public: void set_api_entity() override {
static std::string getTestCaseName(testing::TestParamInfo<BehaviorBasicParams> obj) { api_entity = ov::test::utils::ov_entity::ie_executable_network;
InferenceEngine::Precision netPrecision; };
std::string targetDevice; };
std::map<std::string, std::string> configuration;
std::tie(netPrecision, targetDevice, configuration) = obj.param;
std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_";
result << "targetDevice=" << targetDevice;
if (!configuration.empty()) {
result << "config=" << configuration;
}
return result.str();
}
void SetUp() override { class IEPluginTestBase : public ov::test::behavior::APIBaseTest {
SKIP_IF_CURRENT_TEST_IS_DISABLED() private:
std::tie(netPrecision, targetDevice, configuration) = this->GetParam(); void set_api_entity() override {
function = ngraph::builder::subgraph::makeConvPoolRelu(); api_entity = ov::test::utils::ov_entity::ie_plugin;
} };
void TearDown() override {
if (!configuration.empty()) {
PluginCache::get().reset();
}
}
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::shared_ptr<ngraph::Function> function;
InferenceEngine::Precision netPrecision;
std::string targetDevice;
std::map<std::string, std::string> configuration;
}; };
typedef std::tuple< typedef std::tuple<
@ -62,13 +42,14 @@ typedef std::tuple<
> InferRequestParams; > InferRequestParams;
class InferRequestTests : public testing::WithParamInterface<InferRequestParams>, class InferRequestTests : public testing::WithParamInterface<InferRequestParams>,
public CommonTestUtils::TestsCommon { public IEInferRequestTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
std::string targetDevice; std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(targetDevice, configuration) = obj.param;
std::ostringstream result; std::ostringstream result;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
result << "targetDevice=" << targetDevice << "_"; result << "targetDevice=" << targetDevice << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
for (auto &configItem : configuration) { for (auto &configItem : configuration) {
@ -79,19 +60,21 @@ public:
} }
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
protected: protected:
@ -99,8 +82,7 @@ protected:
InferenceEngine::ExecutableNetwork execNet; InferenceEngine::ExecutableNetwork execNet;
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice; std::map<std::string, std::string> configuration;;
std::map<std::string, std::string> configuration;
}; };
inline InferenceEngine::Core createIECoreWithTemplate() { inline InferenceEngine::Core createIECoreWithTemplate() {
@ -118,7 +100,7 @@ class IEClassNetworkTest : public ov::test::behavior::OVClassNetworkTest {
public: public:
InferenceEngine::CNNNetwork actualCnnNetwork, simpleCnnNetwork, multinputCnnNetwork, ksoCnnNetwork; InferenceEngine::CNNNetwork actualCnnNetwork, simpleCnnNetwork, multinputCnnNetwork, ksoCnnNetwork;
void SetUp() override { void SetUp() {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
OVClassNetworkTest::SetUp(); OVClassNetworkTest::SetUp();
// Generic network // Generic network
@ -132,13 +114,73 @@ public:
} }
}; };
class IEClassBaseTestP : public IEClassNetworkTest, public ::testing::WithParamInterface<std::string> { class IEClassBaseTestP : public IEClassNetworkTest,
public ::testing::WithParamInterface<std::string>,
public IEPluginTestBase {
public: public:
std::string deviceName;
void SetUp() override { void SetUp() override {
target_device = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
IEClassNetworkTest::SetUp(); IEClassNetworkTest::SetUp();
deviceName = GetParam(); }
};
class IEExecNetClassBaseTestP : public IEClassNetworkTest,
public ::testing::WithParamInterface<std::string>,
public IEExecutableNetworkTestBase {
public:
void SetUp() override {
target_device = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
IEClassNetworkTest::SetUp();
}
};
typedef std::tuple<
InferenceEngine::Precision, // Network precision
std::string, // Device name
std::map<std::string, std::string> // Config
> BehaviorBasicParams;
class BehaviorTestsBasicBase : public testing::WithParamInterface<BehaviorBasicParams> {
public:
static std::string getTestCaseName(testing::TestParamInfo<BehaviorBasicParams> obj) {
InferenceEngine::Precision netPrecision;
std::string targetDevice;
std::map<std::string, std::string> configuration;
std::tie(netPrecision, targetDevice, configuration) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_";
result << "targetDevice=" << targetDevice << "_";
if (!configuration.empty()) {
result << "config=" << configuration;
}
return result.str();
}
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::shared_ptr<ngraph::Function> function;
InferenceEngine::Precision netPrecision;
std::map<std::string, std::string> configuration;
};
class BehaviorTestsBasic : public BehaviorTestsBasicBase,
public IEPluginTestBase {
protected:
void SetUp() override {
std::tie(netPrecision, target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu();
}
void TearDown() override {
if (!configuration.empty()) {
PluginCache::get().reset();
}
APIBaseTest::TearDown();
} }
}; };
} // namespace BehaviorTestsUtils } // namespace BehaviorTestsUtils

View File

@ -4,6 +4,13 @@
#pragma once #pragma once
#include <signal.h>
#include <setjmp.h>
#ifdef _WIN32
#include <process.h>
#endif
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "ngraph_functions/subgraph_builders.hpp" #include "ngraph_functions/subgraph_builders.hpp"
@ -11,13 +18,15 @@
#include "common_test_utils/test_common.hpp" #include "common_test_utils/test_common.hpp"
#include "common_test_utils/test_constants.hpp" #include "common_test_utils/test_constants.hpp"
#include "common_test_utils/common_utils.hpp" #include "common_test_utils/common_utils.hpp"
#include "common_test_utils/crash_handler.hpp"
#include "common_test_utils/file_utils.hpp" #include "common_test_utils/file_utils.hpp"
#include "openvino/util/file_util.hpp"
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "functional_test_utils/ov_plugin_cache.hpp" #include "functional_test_utils/ov_plugin_cache.hpp"
#include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/skip_tests_config.hpp"
#include "functional_test_utils/blob_utils.hpp" #include "functional_test_utils/blob_utils.hpp"
#include "functional_test_utils/summary/api_summary.hpp"
#include "openvino/util/file_util.hpp"
namespace ov { namespace ov {
namespace test { namespace test {
@ -33,18 +42,87 @@ inline std::shared_ptr<ngraph::Function> getDefaultNGraphFunctionForTheDevice(st
return ngraph::builder::subgraph::makeConvPoolRelu(inputShape, ngPrc); return ngraph::builder::subgraph::makeConvPoolRelu(inputShape, ngPrc);
} }
class APIBaseTest : public CommonTestUtils::TestsCommon {
private:
// place to jump in case of a crash
int jmpRes = 0;
// in case of crash jump will be made and work will be continued
const std::unique_ptr<CommonTestUtils::CrashHandler> crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
protected:
std::string target_device = "";
ov::test::utils::ov_entity api_entity = ov::test::utils::ov_entity::undefined;
ov::test::utils::ApiSummary& api_summary = ov::test::utils::ApiSummary::getInstance();
public:
APIBaseTest() = default;
virtual void set_api_entity() { api_entity = ov::test::utils::ov_entity::undefined; }
void SetUp() override {
set_api_entity();
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::CRASHED);
#ifdef _WIN32
jmpRes = setjmp(CommonTestUtils::env);
#else
jmpRes = sigsetjmp(CommonTestUtils::env, 0);
#endif
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
crashHandler->StartTimer();
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::HANGED);
GTEST_FAIL();
}
}
void TearDown() override {
if (api_entity == ov::test::utils::ov_entity::undefined) {
set_api_entity();
}
if (this->HasFailure()) {
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::FAILED);
} else if (this->IsSkipped()) {
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::SKIPPED);
} else {
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::PASSED);
}
}
};
class OVInferRequestTestBase : public APIBaseTest {
private:
void set_api_entity() override {
api_entity = ov::test::utils::ov_entity::ov_infer_request;
};
};
class OVCompiledNetworkTestBase : public APIBaseTest {
private:
void set_api_entity() override {
api_entity = ov::test::utils::ov_entity::ov_compiled_model;
};
};
class OVPluginTestBase : public APIBaseTest {
private:
void set_api_entity() override {
api_entity = ov::test::utils::ov_entity::ov_plugin;
};
};
typedef std::tuple< typedef std::tuple<
std::string, // Device name std::string, // Device name
ov::AnyMap // Config ov::AnyMap // Config
> InferRequestParams; > InferRequestParams;
class OVInferRequestTests : public testing::WithParamInterface<InferRequestParams>, class OVInferRequestTests : public testing::WithParamInterface<InferRequestParams>,
public CommonTestUtils::TestsCommon { public OVInferRequestTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
std::string targetDevice; std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(targetDevice, configuration) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "targetDevice=" << targetDevice << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
@ -58,21 +136,23 @@ public:
} }
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
ov::AnyMap params; ov::AnyMap params;
for (auto&& v : configuration) { for (auto&& v : configuration) {
params.emplace(v.first, v.second); params.emplace(v.first, v.second);
} }
execNet = core->compile_model(function, targetDevice, params); execNet = core->compile_model(function, target_device, params);
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
utils::PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
protected: protected:
@ -95,11 +175,11 @@ inline ov::Core createCoreWithTemplate() {
return core; return core;
} }
class OVClassNetworkTest : public ::testing::Test { class OVClassNetworkTest {
public: public:
std::shared_ptr<ngraph::Function> actualNetwork, simpleNetwork, multinputNetwork, ksoNetwork; std::shared_ptr<ngraph::Function> actualNetwork, simpleNetwork, multinputNetwork, ksoNetwork;
void SetUp() override { void SetUp() {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
// Generic network // Generic network
actualNetwork = ngraph::builder::subgraph::makeSplitConvConcat(); actualNetwork = ngraph::builder::subgraph::makeSplitConvConcat();
@ -129,18 +209,33 @@ public:
} }
}; };
class OVClassBaseTestP : public OVClassNetworkTest, public ::testing::WithParamInterface<std::string> { class OVClassBaseTestP : public OVClassNetworkTest,
public ::testing::WithParamInterface<std::string>,
public OVPluginTestBase {
public: public:
std::string deviceName;
void SetUp() override { void SetUp() override {
target_device = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
// TODO: Remove it after fixing issue 69529
// w/a for myriad (cann't store 2 caches simultaneously)
PluginCache::get().reset();
OVClassNetworkTest::SetUp();
}
};
class OVCompiledModelClassBaseTestP : public OVClassNetworkTest,
public ::testing::WithParamInterface<std::string>,
public OVCompiledNetworkTestBase {
public:
void SetUp() override {
target_device = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
// TODO: Remove it after fixing issue 69529 // TODO: Remove it after fixing issue 69529
// w/a for myriad (cann't store 2 caches simultaneously) // w/a for myriad (cann't store 2 caches simultaneously)
PluginCache::get().reset(); PluginCache::get().reset();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
OVClassNetworkTest::SetUp(); OVClassNetworkTest::SetUp();
deviceName = GetParam();
} }
}; };
@ -148,16 +243,17 @@ using PriorityParams = std::tuple<
std::string, // Device name std::string, // Device name
ov::AnyMap // device priority Configuration key ov::AnyMap // device priority Configuration key
>; >;
class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::Test, public ::testing::WithParamInterface<PriorityParams> { class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::WithParamInterface<PriorityParams>,
public OVCompiledNetworkTestBase {
protected: protected:
std::string deviceName;
ov::AnyMap configuration; ov::AnyMap configuration;
std::shared_ptr<ngraph::Function> simpleNetwork; std::shared_ptr<ngraph::Function> simpleNetwork;
public: public:
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(deviceName, configuration) = GetParam(); APIBaseTest::SetUp();
simpleNetwork = ngraph::builder::subgraph::makeSingleConv(); simpleNetwork = ngraph::builder::subgraph::makeSingleConv();
} }
}; };

View File

@ -18,18 +18,17 @@
namespace ExecutionGraphTests { namespace ExecutionGraphTests {
class ExecGraphUniqueNodeNames : public testing::WithParamInterface<LayerTestsUtils::basicParams>, class ExecGraphUniqueNodeNames : public testing::WithParamInterface<LayerTestsUtils::basicParams>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEExecutableNetworkTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<LayerTestsUtils::basicParams> obj); static std::string getTestCaseName(testing::TestParamInfo<LayerTestsUtils::basicParams> obj);
void SetUp() override; void SetUp() override;
void TearDown() override;
protected: protected:
std::string targetDevice;
std::shared_ptr<ngraph::Function> fnPtr; std::shared_ptr<ngraph::Function> fnPtr;
}; };
class ExecGraphSerializationTest : public CommonTestUtils::TestsCommon, public testing::WithParamInterface<std::string> { class ExecGraphSerializationTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
public testing::WithParamInterface<std::string> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj); static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override; void SetUp() override;
@ -56,6 +55,6 @@ protected:
std::pair<bool, std::string> compare_docs(const pugi::xml_document &doc1, std::pair<bool, std::string> compare_docs(const pugi::xml_document &doc1,
const pugi::xml_document &doc2); const pugi::xml_document &doc2);
std::string deviceName, m_out_xml_path, m_out_bin_path; std::string m_out_xml_path, m_out_bin_path;
}; };
} // namespace ExecutionGraphTests } // namespace ExecutionGraphTests

View File

@ -9,15 +9,16 @@
#include "openvino/core/model.hpp" #include "openvino/core/model.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
class ExecutableNetworkBaseTest : public testing::WithParamInterface<BehaviorTestsUtils::InferRequestParams>, class ExecutableNetworkBaseTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
public CommonTestUtils::TestsCommon { public testing::WithParamInterface<BehaviorTestsUtils::InferRequestParams> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<BehaviorTestsUtils::InferRequestParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<BehaviorTestsUtils::InferRequestParams> obj) {
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(target_device, configuration) = obj.param;
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; std::replace(target_device.begin(), target_device.end(), ':', '.');
result << "target_device=" << target_device << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
using namespace CommonTestUtils; using namespace CommonTestUtils;
result << "config=" << configuration; result << "config=" << configuration;
@ -26,55 +27,49 @@ public:
} }
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(targetDevice, configuration) = this->GetParam(); ov::test::behavior::APIBaseTest::SetUp();
ie = PluginCache::get().ie(targetDevice); ie = PluginCache::get().ie(target_device);
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }
void TearDown() override {
if (!configuration.empty()) {
PluginCache::get().reset();
}
}
protected: protected:
InferenceEngine::CNNNetwork cnnNet; InferenceEngine::CNNNetwork cnnNet;
std::shared_ptr<InferenceEngine::Core> ie; std::shared_ptr<InferenceEngine::Core> ie;
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
}; };
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) { TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) {
ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration)); ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration));
} }
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) { TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) {
std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }}; std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }};
ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, incorrectConfig)); ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, incorrectConfig));
} }
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) { TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(auto req = execNet.CreateInferRequest()); ASSERT_NO_THROW(auto req = execNet.CreateInferRequest());
} }
TEST_P(ExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) { TEST_P(ExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
InferenceEngine::CNNNetwork execGraph = execNet.GetExecGraphInfo(); InferenceEngine::CNNNetwork execGraph = execNet.GetExecGraphInfo();
ASSERT_NE(execGraph.getFunction(), nullptr); ASSERT_NE(execGraph.getFunction(), nullptr);
} }
TEST_P(ExecutableNetworkBaseTest, checkGetMetric) { TEST_P(ExecutableNetworkBaseTest, checkGetMetric) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(execNet.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(execNet.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
} }
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) { TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
for (const auto& configItem : configuration) { for (const auto& configItem : configuration) {
InferenceEngine::Parameter param; InferenceEngine::Parameter param;
ASSERT_NO_THROW(param = execNet.GetConfig(configItem.first)); ASSERT_NO_THROW(param = execNet.GetConfig(configItem.first));
@ -84,7 +79,7 @@ TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckCo
} }
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) { TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice); auto execNet = ie->LoadNetwork(cnnNet, target_device);
std::map<std::string, InferenceEngine::Parameter> config; std::map<std::string, InferenceEngine::Parameter> config;
for (const auto& confItem : configuration) { for (const auto& confItem : configuration) {
config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)}); config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)});
@ -93,7 +88,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) {
} }
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) { TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice); auto execNet = ie->LoadNetwork(cnnNet, target_device);
std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }}; std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }};
std::map<std::string, InferenceEngine::Parameter> config; std::map<std::string, InferenceEngine::Parameter> config;
for (const auto& confItem : incorrectConfig) { for (const auto& confItem : incorrectConfig) {
@ -103,7 +98,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) {
} }
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) { TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice); auto execNet = ie->LoadNetwork(cnnNet, target_device);
std::map<std::string, InferenceEngine::Parameter> config; std::map<std::string, InferenceEngine::Parameter> config;
for (const auto& confItem : configuration) { for (const auto& confItem : configuration) {
config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)}); config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)});
@ -120,7 +115,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) {
TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
std::vector<InferenceEngine::ExecutableNetwork> vec; std::vector<InferenceEngine::ExecutableNetwork> vec;
for (auto i = 0; i < 2; i++) { for (auto i = 0; i < 2; i++) {
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration))); ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration)));
ASSERT_NE(nullptr, cnnNet.getFunction()); ASSERT_NE(nullptr, cnnNet.getFunction());
} }
} }
@ -128,24 +123,24 @@ TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) { TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) {
std::vector<InferenceEngine::ExecutableNetwork> vec; std::vector<InferenceEngine::ExecutableNetwork> vec;
for (auto i = 0; i < 2; i++) { for (auto i = 0; i < 2; i++) {
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration))); ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration)));
ASSERT_NE(nullptr, vec[i].GetExecGraphInfo().getFunction()); ASSERT_NE(nullptr, vec[i].GetExecGraphInfo().getFunction());
ASSERT_NE(vec.begin()->GetExecGraphInfo().getFunction(), vec[i].GetExecGraphInfo().getFunction()); ASSERT_NE(vec.begin()->GetExecGraphInfo().getFunction(), vec[i].GetExecGraphInfo().getFunction());
} }
} }
TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfo) { TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfo) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(auto inInfo = execNet.GetInputsInfo()); ASSERT_NO_THROW(auto inInfo = execNet.GetInputsInfo());
} }
TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfo) { TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfo) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(auto outInfo = execNet.GetOutputsInfo()); ASSERT_NO_THROW(auto outInfo = execNet.GetOutputsInfo());
} }
TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
auto inInfo = execNet.GetInputsInfo(); auto inInfo = execNet.GetInputsInfo();
auto inCnnInfo = cnnNet.getInputsInfo(); auto inCnnInfo = cnnNet.getInputsInfo();
for (const auto& itemInInfo : inCnnInfo) { for (const auto& itemInInfo : inCnnInfo) {
@ -154,7 +149,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
} }
TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
auto outInfo = execNet.GetOutputsInfo(); auto outInfo = execNet.GetOutputsInfo();
auto outCnnInfo = cnnNet.getOutputsInfo(); auto outCnnInfo = cnnNet.getOutputsInfo();
for (const auto& itemOutInfo : outCnnInfo) { for (const auto& itemOutInfo : outCnnInfo) {
@ -165,7 +160,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
InferenceEngine::CNNNetwork execGraph; InferenceEngine::CNNNetwork execGraph;
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
std::map<std::string, int> originalLayersMap; std::map<std::string, int> originalLayersMap;
for (const auto &layer : function->get_ops()) { for (const auto &layer : function->get_ops()) {
@ -215,7 +210,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) { TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
InferenceEngine::CNNNetwork execGraph; InferenceEngine::CNNNetwork execGraph;
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
std::map<std::string, int> originalLayersMap; std::map<std::string, int> originalLayersMap;
for (const auto &layer : function->get_ops()) { for (const auto &layer : function->get_ops()) {
@ -278,7 +273,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) {
InferenceEngine::CNNNetwork execGraph; InferenceEngine::CNNNetwork execGraph;
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
ASSERT_NO_THROW(execGraph.serialize(out_xml_path, out_bin_path)); ASSERT_NO_THROW(execGraph.serialize(out_xml_path, out_bin_path));
CommonTestUtils::removeIRFiles(out_xml_path, out_bin_path); CommonTestUtils::removeIRFiles(out_xml_path, out_bin_path);
@ -287,7 +282,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) {
TEST_P(ExecutableNetworkBaseTest, canExport) { TEST_P(ExecutableNetworkBaseTest, canExport) {
auto ts = CommonTestUtils::GetTimestamp(); auto ts = CommonTestUtils::GetTimestamp();
std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts; std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts;
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
ASSERT_NO_THROW(execNet.Export(modelName)); ASSERT_NO_THROW(execNet.Export(modelName));
ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".xml")); ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".xml"));
ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".bin")); ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".bin"));
@ -300,14 +295,29 @@ TEST_P(ExecutableNetworkBaseTest, pluginDoesNotChangeOriginalNetwork) {
compare_functions(cnnNet.getFunction(), referenceNetwork); compare_functions(cnnNet.getFunction(), referenceNetwork);
} }
using ExecNetSetPrecision = BehaviorTestsUtils::BehaviorTestsBasic; class ExecNetSetPrecision : public BehaviorTestsUtils::BehaviorTestsBasicBase,
public BehaviorTestsUtils::IEExecutableNetworkTestBase {
protected:
void SetUp() override {
std::tie(netPrecision, target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu();
}
void TearDown() override {
if (!configuration.empty()) {
PluginCache::get().reset();
}
APIBaseTest::TearDown();
}
};
TEST_P(ExecNetSetPrecision, canSetInputPrecisionForNetwork) { TEST_P(ExecNetSetPrecision, canSetInputPrecisionForNetwork) {
InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::InputsDataMap inputs_info = cnnNet.getInputsInfo(); InferenceEngine::InputsDataMap inputs_info = cnnNet.getInputsInfo();
ASSERT_EQ(1u, inputs_info.size()); ASSERT_EQ(1u, inputs_info.size());
inputs_info.begin()->second->setPrecision(netPrecision); inputs_info.begin()->second->setPrecision(netPrecision);
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
} }
TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) { TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) {
@ -315,7 +325,7 @@ TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) {
InferenceEngine::OutputsDataMap outputs_info = cnnNet.getOutputsInfo(); InferenceEngine::OutputsDataMap outputs_info = cnnNet.getOutputsInfo();
ASSERT_EQ(outputs_info.size(), 1u); ASSERT_EQ(outputs_info.size(), 1u);
outputs_info.begin()->second->setPrecision(netPrecision); outputs_info.begin()->second->setPrecision(netPrecision);
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
} }
TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) { TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) {
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
@ -337,7 +347,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) {
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork cnnNet(function);
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
} }
TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) { TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) {
@ -360,7 +370,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) {
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork cnnNet(function);
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
} }
} // namespace BehaviorTestsDefinitions } // namespace BehaviorTestsDefinitions

View File

@ -8,6 +8,7 @@
#include "base/behavior_test_utils.hpp" #include "base/behavior_test_utils.hpp"
#include "common_test_utils/common_utils.hpp" #include "common_test_utils/common_utils.hpp"
#include "common_test_utils/file_utils.hpp"
#include "common_test_utils/test_assertions.hpp" #include "common_test_utils/test_assertions.hpp"
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
@ -32,18 +33,19 @@ namespace BehaviorTestsDefinitions {
} }
class IEClassExecutableNetworkGetMetricTestForSpecificConfig : class IEClassExecutableNetworkGetMetricTestForSpecificConfig :
public BehaviorTestsUtils::IEExecutableNetworkTestBase,
public BehaviorTestsUtils::IEClassNetworkTest, public BehaviorTestsUtils::IEClassNetworkTest,
public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>> { public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>> {
protected: protected:
std::string deviceName;
std::string configKey; std::string configKey;
std::string configValue; std::string configValue;
public: public:
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); target_device = std::get<0>(GetParam());
IEClassNetworkTest::SetUp();
deviceName = std::get<0>(GetParam());
std::tie(configKey, configValue) = std::get<1>(GetParam()); std::tie(configKey, configValue) = std::get<1>(GetParam());
SKIP_IF_CURRENT_TEST_IS_DISABLED();
ov::test::behavior::APIBaseTest::SetUp();
IEClassNetworkTest::SetUp();
} }
}; };
@ -51,17 +53,18 @@ public:
// Hetero Executable network case // Hetero Executable network case
// //
class IEClassHeteroExecutableNetworkGetMetricTest : class IEClassHeteroExecutableNetworkGetMetricTest :
public BehaviorTestsUtils::IEExecutableNetworkTestBase,
public BehaviorTestsUtils::IEClassNetworkTest, public BehaviorTestsUtils::IEClassNetworkTest,
public ::testing::WithParamInterface<std::string> { public ::testing::WithParamInterface<std::string> {
protected: protected:
std::string deviceName;
std::string heteroDeviceName; std::string heteroDeviceName;
public: public:
void SetUp() override { void SetUp() override {
target_device = GetParam();
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU;
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
ov::test::behavior::APIBaseTest::SetUp();
IEClassNetworkTest::SetUp(); IEClassNetworkTest::SetUp();
deviceName = GetParam();
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") + CommonTestUtils::DEVICE_CPU;
} }
}; };
@ -70,13 +73,14 @@ public:
// ImportExportNetwork // ImportExportNetwork
// //
using IEClassImportExportTestP = BehaviorTestsUtils::IEClassBaseTestP; using IEClassGetMetricP = BehaviorTestsUtils::IEExecNetClassBaseTestP;
using IEClassImportExportTestP = IEClassGetMetricP;
TEST_P(IEClassImportExportTestP, smoke_ImportNetworkThrowsIfNoDeviceName) { TEST_P(IEClassImportExportTestP, smoke_ImportNetworkThrowsIfNoDeviceName) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::stringstream strm; std::stringstream strm;
InferenceEngine::ExecutableNetwork executableNetwork; InferenceEngine::ExecutableNetwork executableNetwork;
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName)); ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device));
ASSERT_NO_THROW(executableNetwork.Export(strm)); ASSERT_NO_THROW(executableNetwork.Export(strm));
IE_SUPPRESS_DEPRECATED_START IE_SUPPRESS_DEPRECATED_START
@ -88,9 +92,9 @@ TEST_P(IEClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::stringstream strm; std::stringstream strm;
InferenceEngine::ExecutableNetwork executableNetwork; InferenceEngine::ExecutableNetwork executableNetwork;
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName)); ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device));
ASSERT_NO_THROW(executableNetwork.Export(strm)); ASSERT_NO_THROW(executableNetwork.Export(strm));
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName)); ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device));
ASSERT_NO_THROW(executableNetwork.CreateInferRequest()); ASSERT_NO_THROW(executableNetwork.CreateInferRequest());
} }
@ -99,27 +103,28 @@ TEST_P(IEClassImportExportTestP, smoke_ExportUsingFileNameImportFromStreamNoThro
InferenceEngine::ExecutableNetwork executableNetwork; InferenceEngine::ExecutableNetwork executableNetwork;
std::string fileName{"ExportedNetwork"}; std::string fileName{"ExportedNetwork"};
{ {
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName)); ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device));
ASSERT_NO_THROW(executableNetwork.Export(fileName)); ASSERT_NO_THROW(executableNetwork.Export(fileName));
} }
{ {
{ {
std::ifstream strm(fileName, std::ifstream::binary | std::ifstream::in); std::ifstream strm(fileName, std::ifstream::binary | std::ifstream::in);
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName)); ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device));
} }
ASSERT_EQ(0, remove(fileName.c_str())); ASSERT_EQ(0, remove(fileName.c_str()));
} }
ASSERT_NO_THROW(executableNetwork.CreateInferRequest()); ASSERT_NO_THROW(executableNetwork.CreateInferRequest());
CommonTestUtils::removeFile(fileName);
} }
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClassGetMetricP;
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassGetMetricP;
using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassGetMetricP;
using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = IEClassGetMetricP;
using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = IEClassGetMetricP;
using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP;
using IEClassExecutableNetworkSetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkSetConfigTest = IEClassGetMetricP;
using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP;
// //
// ExecutableNetwork GetMetric / GetConfig // ExecutableNetwork GetMetric / GetConfig
@ -131,7 +136,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> configValues = p; std::vector<std::string> configValues = p;
@ -149,7 +154,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
std::vector<std::string> metricValues = p; std::vector<std::string> metricValues = p;
@ -167,7 +172,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(NETWORK_NAME))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(NETWORK_NAME)));
std::string networkname = p; std::string networkname = p;
@ -181,7 +186,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)));
unsigned int value = p; unsigned int value = p;
@ -195,7 +200,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow)
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), InferenceEngine::Exception); ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), InferenceEngine::Exception);
} }
@ -204,14 +209,14 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> configValues = p; std::vector<std::string> configValues = p;
for (auto &&confKey : configValues) { for (auto &&confKey : configValues) {
InferenceEngine::Parameter defaultValue; InferenceEngine::Parameter defaultValue;
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
ASSERT_FALSE(defaultValue.empty()); ASSERT_FALSE(defaultValue.empty());
} }
} }
@ -220,7 +225,7 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), InferenceEngine::Exception); ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), InferenceEngine::Exception);
} }
@ -229,7 +234,7 @@ TEST_P(IEClassExecutableNetworkSetConfigTest, SetConfigThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), InferenceEngine::Exception); ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), InferenceEngine::Exception);
} }
@ -238,7 +243,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(exeNetwork.SetConfig({{configKey, configValue}})); ASSERT_NO_THROW(exeNetwork.SetConfig({{configKey, configValue}}));
ASSERT_NO_THROW(p = exeNetwork.GetConfig(configKey)); ASSERT_NO_THROW(p = exeNetwork.GetConfig(configKey));
@ -249,7 +254,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
TEST_P(IEClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) { TEST_P(IEClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception); ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception);
} }
@ -258,10 +263,10 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> devConfigValues = p; std::vector<std::string> devConfigValues = p;
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> execConfigValues = p; std::vector<std::string> execConfigValues = p;
@ -280,14 +285,14 @@ using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClas
using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassHeteroExecutableNetworkGetMetricTest; using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassHeteroExecutableNetworkGetMetricTest;
using IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassHeteroExecutableNetworkGetMetricTest; using IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassHeteroExecutableNetworkGetMetricTest;
using IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = IEClassHeteroExecutableNetworkGetMetricTest; using IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = IEClassHeteroExecutableNetworkGetMetricTest;
using IEClassExecutableNetworkGetMetricTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassExecutableNetworkGetMetricTest = IEClassGetMetricP;
TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) { TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter pHetero, pDevice; InferenceEngine::Parameter pHetero, pDevice;
InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device);
ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
@ -320,7 +325,7 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN
InferenceEngine::Parameter pHetero, pDevice; InferenceEngine::Parameter pHetero, pDevice;
InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device);
ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
@ -369,13 +374,13 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoT
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
setHeteroNetworkAffinity(deviceName); setHeteroNetworkAffinity(target_device);
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
ASSERT_NO_THROW(p = exeNetwork.GetConfig("TARGET_FALLBACK")); ASSERT_NO_THROW(p = exeNetwork.GetConfig("TARGET_FALLBACK"));
std::string targets = p; std::string targets = p;
auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU; auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU;
std::cout << "Exe network fallback targets: " << targets << std::endl; std::cout << "Exe network fallback targets: " << targets << std::endl;
ASSERT_EQ(expectedTargets, targets); ASSERT_EQ(expectedTargets, targets);

View File

@ -5,9 +5,11 @@
#pragma once #pragma once
#include "common_test_utils/test_common.hpp" #include "common_test_utils/test_common.hpp"
#include "common_test_utils/file_utils.hpp"
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "ngraph_functions/subgraph_builders.hpp" #include "ngraph_functions/subgraph_builders.hpp"
#include "common_test_utils/file_utils.hpp"
#include "base/behavior_test_utils.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
@ -16,13 +18,12 @@ typedef std::tuple<
std::string> // Target device name std::string> // Target device name
LocaleParams; LocaleParams;
class CustomLocaleTest : public CommonTestUtils::TestsCommon, class CustomLocaleTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
public ::testing::WithParamInterface<LocaleParams> { public ::testing::WithParamInterface<LocaleParams> {
protected: protected:
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string localeName; std::string localeName;
std::string testName; std::string testName;
std::string deviceName;
void SetUp() override; void SetUp() override;
public: public:

View File

@ -118,7 +118,7 @@ TEST_P(InferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooS
function = SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38); function = SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38);
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
InferenceEngine::InferRequest req; InferenceEngine::InferRequest req;
ASSERT_NO_THROW(req = execNet.CreateInferRequest()); ASSERT_NO_THROW(req = execNet.CreateInferRequest());
@ -145,7 +145,7 @@ TEST_P(InferRequestCallbackTests, ImplDoseNotCopyCallback) {
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork cnnNet(function);
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
{ {
auto somePtr = std::make_shared<int>(42); auto somePtr = std::make_shared<int>(42);

View File

@ -10,18 +10,7 @@
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
class InferRequestCancellationTests : public BehaviorTestsUtils::InferRequestTests { using InferRequestCancellationTests = BehaviorTestsUtils::InferRequestTests;
public:
void SetUp() override {
// Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(targetDevice, configuration) = this->GetParam();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice, {1, 3, 640, 640});
cnnNet = InferenceEngine::CNNNetwork(function);
// Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
}
};
TEST_P(InferRequestCancellationTests, canCancelAsyncRequest) { TEST_P(InferRequestCancellationTests, canCancelAsyncRequest) {
// Create InferRequest // Create InferRequest

View File

@ -18,35 +18,37 @@ typedef std::tuple<
> InferRequestParams; > InferRequestParams;
class InferRequestConfigTest : public testing::WithParamInterface<InferRequestParams>, class InferRequestConfigTest : public testing::WithParamInterface<InferRequestParams>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEInferRequestTestBase {
public: public:
void SetUp() override { void SetUp() override {
std::tie(streamExecutorNumber, target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(streamExecutorNumber, targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
// Create CNNNetwork from ngrpah::Function // Create CNNNetwork from ngrpah::Function
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
std::string targetDevice;
size_t streamExecutorNumber;
std::map<std::string, std::string> configuration;
std::tie(streamExecutorNumber, targetDevice, configuration) = obj.param;
std::ostringstream result;
result << "targetDevice=" << targetDevice << "_";
result << "streamExecutorNumber=" << targetDevice << "_";
if (!configuration.empty()) {
result << "config=" << configuration;
}
return result.str();
}
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
}
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
std::string target_device;
size_t streamExecutorNumber;
std::map<std::string, std::string> configuration;
std::tie(streamExecutorNumber, target_device, configuration) = obj.param;
std::ostringstream result;
result << "target_device=" << target_device << "_";
result << "streamExecutorNumber=" << target_device << "_";
if (!configuration.empty()) {
result << "config=" << configuration;
}
return result.str();
} }
protected: protected:
@ -54,20 +56,22 @@ protected:
InferenceEngine::ExecutableNetwork execNet; InferenceEngine::ExecutableNetwork execNet;
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
size_t streamExecutorNumber; size_t streamExecutorNumber;
void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ie_infer_request; }
inline InferenceEngine::InferRequest createInferRequestWithConfig() { inline InferenceEngine::InferRequest createInferRequestWithConfig() {
// Load config // Load config
configuration.insert({CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(YES)}); configuration.insert({CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(YES)});
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
ie->SetConfig(configuration, targetDevice); target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
ie->SetConfig(configuration, target_device);
} }
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
return req; return req;
} }
@ -76,9 +80,10 @@ protected:
TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) { TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) {
ASSERT_EQ(0ul, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(0ul, InferenceEngine::executorManager()->getExecutorsNumber());
ASSERT_NO_THROW(createInferRequestWithConfig()); ASSERT_NO_THROW(createInferRequestWithConfig());
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber());
} }
} }
@ -86,9 +91,10 @@ TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) {
TEST_P(InferRequestConfigTest, withoutExclusiveAsyncRequests) { TEST_P(InferRequestConfigTest, withoutExclusiveAsyncRequests) {
ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber());
ASSERT_NO_THROW(createInferRequestWithConfig()); ASSERT_NO_THROW(createInferRequestWithConfig());
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber());
} }
} }
@ -101,20 +107,21 @@ TEST_P(InferRequestConfigTest, ReusableCPUStreamsExecutor) {
// Load config // Load config
std::map<std::string, std::string> config = {{CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(NO)}}; std::map<std::string, std::string> config = {{CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(NO)}};
config.insert(configuration.begin(), configuration.end()); config.insert(configuration.begin(), configuration.end());
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice)); target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
ASSERT_NO_THROW(ie->SetConfig(config, target_device));
} }
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, config); execNet = ie->LoadNetwork(cnnNet, target_device, config);
execNet.CreateInferRequest(); execNet.CreateInferRequest();
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) || if ((target_device == CommonTestUtils::DEVICE_MYRIAD) ||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) { (target_device == CommonTestUtils::DEVICE_KEEMBAY)) {
ASSERT_EQ(1u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(1u, InferenceEngine::executorManager()->getExecutorsNumber());
ASSERT_EQ(0u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber()); ASSERT_EQ(0u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber());
} else if ((targetDevice == CommonTestUtils::DEVICE_AUTO) || } else if ((target_device == CommonTestUtils::DEVICE_AUTO) ||
(targetDevice == CommonTestUtils::DEVICE_MULTI)) { (target_device == CommonTestUtils::DEVICE_MULTI)) {
} else { } else {
ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber()); ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber());
ASSERT_GE(2u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber()); ASSERT_GE(2u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber());

View File

@ -21,7 +21,7 @@ typedef std::tuple<
> dynamicBatchTestParams; > dynamicBatchTestParams;
class DynamicBatchTest : virtual public LayerTestsUtils::LayerTestsCommon, class DynamicBatchTest : virtual public LayerTestsUtils::LayerTestsCommon,
public testing::WithParamInterface<dynamicBatchTestParams> { public testing::WithParamInterface<dynamicBatchTestParams> {
private: private:
bool run_async = false; bool run_async = false;
size_t max_batch_size = 0; size_t max_batch_size = 0;

View File

@ -11,8 +11,8 @@
#include "shared_test_classes/subgraph/basic_lstm.hpp" #include "shared_test_classes/subgraph/basic_lstm.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests;
using namespace CommonTestUtils; using namespace CommonTestUtils;
using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests;
TEST_P(InferRequestIOBBlobTest, CanCreateInferRequest) { TEST_P(InferRequestIOBBlobTest, CanCreateInferRequest) {
// Create InferRequest // Create InferRequest
@ -331,16 +331,25 @@ TEST_P(InferRequestIOBBlobTest, canInferWithGetOut) {
ASSERT_NO_THROW(InferenceEngine::Blob::Ptr outputBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first)); ASSERT_NO_THROW(InferenceEngine::Blob::Ptr outputBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first));
} }
class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasic { class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasicBase,
public: public BehaviorTestsUtils::IEInferRequestTestBase {
void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(netPrecision, targetDevice, configuration) = this->GetParam();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
cnnNet = InferenceEngine::CNNNetwork(function);
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
}
protected: protected:
void SetUp() override {
std::tie(netPrecision, target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
cnnNet = InferenceEngine::CNNNetwork(function);
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
}
void TearDown() override {
if (!configuration.empty()) {
PluginCache::get().reset();
}
APIBaseTest::TearDown();
}
InferenceEngine::ExecutableNetwork execNet; InferenceEngine::ExecutableNetwork execNet;
InferenceEngine::CNNNetwork cnnNet; InferenceEngine::CNNNetwork cnnNet;
}; };
@ -386,16 +395,16 @@ typedef std::tuple<
> InferRequestIOBBlobSetLayoutParams; > InferRequestIOBBlobSetLayoutParams;
class InferRequestIOBBlobSetLayoutTest : public testing::WithParamInterface<InferRequestIOBBlobSetLayoutParams>, class InferRequestIOBBlobSetLayoutTest : public testing::WithParamInterface<InferRequestIOBBlobSetLayoutParams>,
public CommonTestUtils::TestsCommon { public ov::test::behavior::APIBaseTest {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<InferRequestIOBBlobSetLayoutParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<InferRequestIOBBlobSetLayoutParams> obj) {
InferenceEngine::Layout layout; InferenceEngine::Layout layout;
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(layout, targetDevice, configuration) = obj.param; std::tie(layout, target_device, configuration) = obj.param;
std::ostringstream result; std::ostringstream result;
result << "layout=" << layout << "_"; result << "layout=" << layout << "_";
result << "targetDevice=" << targetDevice << "_"; result << "target_device=" << target_device << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
result << "config=" << configuration; result << "config=" << configuration;
} }
@ -403,17 +412,18 @@ public:
} }
void SetUp() override { void SetUp() override {
std::tie(layout, target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(layout, targetDevice, configuration) = this->GetParam();
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::SetUp();
} }
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
@ -421,7 +431,6 @@ public:
InferenceEngine::Layout layout; InferenceEngine::Layout layout;
InferenceEngine::CNNNetwork cnnNet; InferenceEngine::CNNNetwork cnnNet;
InferenceEngine::ExecutableNetwork execNet; InferenceEngine::ExecutableNetwork execNet;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
}; };

View File

@ -6,7 +6,7 @@
#include "common_test_utils/test_common.hpp" #include "common_test_utils/test_common.hpp"
#include <ie_core.hpp> #include "base/behavior_test_utils.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
typedef std::tuple< typedef std::tuple<
@ -16,7 +16,7 @@ typedef std::tuple<
std::map<std::string, std::string>> // device configuration std::map<std::string, std::string>> // device configuration
memoryStateParams; memoryStateParams;
class InferRequestVariableStateTest : public CommonTestUtils::TestsCommon, class InferRequestVariableStateTest : public BehaviorTestsUtils::IEInferRequestTestBase,
public testing::WithParamInterface<memoryStateParams> { public testing::WithParamInterface<memoryStateParams> {
protected: protected:
InferenceEngine::CNNNetwork net; InferenceEngine::CNNNetwork net;

View File

@ -10,15 +10,15 @@ namespace BehaviorTestsDefinitions {
class InferRequestPerfCountersTest : public BehaviorTestsUtils::InferRequestTests { class InferRequestPerfCountersTest : public BehaviorTestsUtils::InferRequestTests {
public: public:
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
ie = PluginCache::get().ie(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
configuration.insert({ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }); configuration.insert({ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES });
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
} }
}; };

View File

@ -19,13 +19,14 @@ using InferRequestSetBlobByTypeParams = std::tuple<
>; >;
class InferRequestSetBlobByType : public testing::WithParamInterface<InferRequestSetBlobByTypeParams>, class InferRequestSetBlobByType : public testing::WithParamInterface<InferRequestSetBlobByTypeParams>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEInferRequestTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<InferRequestSetBlobByTypeParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<InferRequestSetBlobByTypeParams> obj) {
FuncTestUtils::BlobType BlobType; FuncTestUtils::BlobType BlobType;
std::string targetDevice; std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(BlobType, targetDevice, configuration) = obj.param; std::tie(BlobType, targetDevice, configuration) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "BlobType=" << BlobType << "_"; result << "BlobType=" << BlobType << "_";
@ -35,14 +36,15 @@ public:
} }
void SetUp() override { void SetUp() override {
std::map<std::string, std::string> config;
std::tie(blobType, target_device, config) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::map<std::string, std::string> config; APIBaseTest::SetUp();
std::tie(blobType, targetDevice, config) = this->GetParam();
std::shared_ptr<ngraph::Function> function = ngraph::builder::subgraph::makeConvPoolRelu( std::shared_ptr<ngraph::Function> function = ngraph::builder::subgraph::makeConvPoolRelu(
{4, 3, 6, 8}, ngraph::element::Type_t::u8); {4, 3, 6, 8}, ngraph::element::Type_t::u8);
InferenceEngine::CNNNetwork cnnNetwork(function); InferenceEngine::CNNNetwork cnnNetwork(function);
executableNetwork = ie->LoadNetwork(cnnNetwork, targetDevice, config); executableNetwork = ie->LoadNetwork(cnnNetwork, target_device, config);
} }
protected: protected:
@ -52,18 +54,18 @@ protected:
return true; return true;
case FuncTestUtils::BlobType::Compound: case FuncTestUtils::BlobType::Compound:
case FuncTestUtils::BlobType::I420: case FuncTestUtils::BlobType::I420:
// case FuncTestUtils::BlobType::Remote: case FuncTestUtils::BlobType::Remote:
case FuncTestUtils::BlobType::NV12: case FuncTestUtils::BlobType::NV12:
return false; return false;
case FuncTestUtils::BlobType::Batched: { case FuncTestUtils::BlobType::Batched: {
std::vector<std::string> supported_metrics = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_METRICS)); std::vector<std::string> supported_metrics = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS));
if (std::find(supported_metrics.begin(), supported_metrics.end(), if (std::find(supported_metrics.begin(), supported_metrics.end(),
METRIC_KEY(OPTIMIZATION_CAPABILITIES)) == supported_metrics.end()) { METRIC_KEY(OPTIMIZATION_CAPABILITIES)) == supported_metrics.end()) {
return false; return false;
} }
std::vector<std::string> optimization_caps = std::vector<std::string> optimization_caps =
ie->GetMetric(targetDevice, METRIC_KEY(OPTIMIZATION_CAPABILITIES)); ie->GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
return std::find(optimization_caps.begin(), optimization_caps.end(), return std::find(optimization_caps.begin(), optimization_caps.end(),
METRIC_VALUE(BATCHED_BLOB)) != optimization_caps.end(); METRIC_VALUE(BATCHED_BLOB)) != optimization_caps.end();
} }
@ -72,7 +74,6 @@ protected:
} }
} }
std::string targetDevice;
FuncTestUtils::BlobType blobType; FuncTestUtils::BlobType blobType;
InferenceEngine::ExecutableNetwork executableNetwork; InferenceEngine::ExecutableNetwork executableNetwork;
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();

View File

@ -23,7 +23,8 @@ using SetBlobParams = std::tuple<InferenceEngine::Precision, // precision in C
setType, // type for which blob is set setType, // type for which blob is set
std::string>; // Device name std::string>; // Device name
class SetBlobTest : public testing::WithParamInterface<SetBlobParams>, virtual public LayerTestsUtils::LayerTestsCommon { class SetBlobTest : public testing::WithParamInterface<SetBlobParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<SetBlobParams> obj); static std::string getTestCaseName(testing::TestParamInfo<SetBlobParams> obj);
void Infer() override; void Infer() override;

View File

@ -25,13 +25,14 @@ typedef std::tuple<
> OVExecGraphImportExportTestParams; > OVExecGraphImportExportTestParams;
class OVExecGraphImportExportTest : public testing::WithParamInterface<OVExecGraphImportExportTestParams>, class OVExecGraphImportExportTest : public testing::WithParamInterface<OVExecGraphImportExportTestParams>,
public CommonTestUtils::TestsCommon { public OVCompiledNetworkTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<OVExecGraphImportExportTestParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<OVExecGraphImportExportTestParams> obj) {
ov::element::Type_t elementType; ov::element::Type_t elementType;
std::string targetDevice; std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
std::tie(elementType, targetDevice, configuration) = obj.param; std::tie(elementType, targetDevice, configuration) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "targetDevice=" << targetDevice << "_";
result << "elementType=" << elementType << "_"; result << "elementType=" << elementType << "_";
@ -48,27 +49,28 @@ class OVExecGraphImportExportTest : public testing::WithParamInterface<OVExecGra
} }
void SetUp() override { void SetUp() override {
std::tie(elementType, target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(elementType, targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
utils::PluginCache::get().reset(); utils::PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
protected: protected:
std::shared_ptr<ov::Core> core = utils::PluginCache::get().core(); std::shared_ptr<ov::Core> core = utils::PluginCache::get().core();
std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
ov::element::Type_t elementType; ov::element::Type_t elementType;
std::shared_ptr<ov::Model> function; std::shared_ptr<ov::Model> function;
}; };
TEST_P(OVExecGraphImportExportTest, importExportedFunction) { TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -96,12 +98,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
ngraph::ParameterVector{param1, param2}); ngraph::ParameterVector{param1, param2});
function->set_friendly_name("SingleRuLU"); function->set_friendly_name("SingleRuLU");
} }
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
std::stringstream strm; std::stringstream strm;
execNet.export_model(strm); execNet.export_model(strm);
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), 2);
EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size()); EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size());
EXPECT_THROW(importedExecNet.input(), ov::Exception); EXPECT_THROW(importedExecNet.input(), ov::Exception);
@ -151,7 +153,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
} }
TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) { TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -167,11 +169,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
function->set_friendly_name("ParamResult"); function->set_friendly_name("ParamResult");
} }
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
std::stringstream strm; std::stringstream strm;
execNet.export_model(strm); execNet.export_model(strm);
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), 1);
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
EXPECT_NO_THROW(importedCompiledModel.input()); EXPECT_NO_THROW(importedCompiledModel.input());
@ -191,7 +193,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
} }
TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) { TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -207,11 +209,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) {
function->set_friendly_name("ConstResult"); function->set_friendly_name("ConstResult");
} }
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
std::stringstream strm; std::stringstream strm;
execNet.export_model(strm); execNet.export_model(strm);
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 0); EXPECT_EQ(function->inputs().size(), 0);
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
EXPECT_THROW(importedCompiledModel.input(), ov::Exception); EXPECT_THROW(importedCompiledModel.input(), ov::Exception);
@ -286,20 +288,20 @@ TEST_P(OVExecGraphImportExportTest, readFromV10IR) {
EXPECT_NO_THROW(function->input("in1")); // remove if read_model does not change function names EXPECT_NO_THROW(function->input("in1")); // remove if read_model does not change function names
EXPECT_NO_THROW(function->output("round")); // remove if read_model does not change function names EXPECT_NO_THROW(function->output("round")); // remove if read_model does not change function names
ov::CompiledModel execNet = core->compile_model(function, targetDevice, configuration); ov::CompiledModel execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(execNet.inputs().size(), 1); EXPECT_EQ(execNet.inputs().size(), 1);
EXPECT_EQ(execNet.outputs().size(), 1); EXPECT_EQ(execNet.outputs().size(), 1);
EXPECT_NO_THROW(execNet.input("in1")); EXPECT_NO_THROW(execNet.input("in1"));
EXPECT_NO_THROW(execNet.output("round")); EXPECT_NO_THROW(execNet.output("round"));
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
std::stringstream strm; std::stringstream strm;
execNet.export_model(strm); execNet.export_model(strm);
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
EXPECT_EQ(importedExecNet.inputs().size(), 1); EXPECT_EQ(importedExecNet.inputs().size(), 1);
EXPECT_EQ(importedExecNet.outputs().size(), 1); EXPECT_EQ(importedExecNet.outputs().size(), 1);
EXPECT_NO_THROW(importedExecNet.input("in1")); EXPECT_NO_THROW(importedExecNet.input("in1"));
@ -327,7 +329,7 @@ static std::map<std::string, std::string> any_copy(const ov::AnyMap& params) {
} }
TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) { TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -356,12 +358,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
ngraph::ParameterVector{param1, param2}); ngraph::ParameterVector{param1, param2});
function->set_friendly_name("SingleReLU"); function->set_friendly_name("SingleReLU");
} }
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
std::stringstream strm; std::stringstream strm;
execNet.Export(strm); execNet.Export(strm);
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), 2);
EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size()); EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size());
EXPECT_THROW(importedExecNet.input(), ov::Exception); EXPECT_THROW(importedExecNet.input(), ov::Exception);
@ -392,7 +394,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
} }
TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) { TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -410,7 +412,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
ngraph::ParameterVector{param}); ngraph::ParameterVector{param});
function->set_friendly_name("ParamResult"); function->set_friendly_name("ParamResult");
} }
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
auto inputPrecision = InferenceEngine::details::convertPrecision(execNet.GetInputsInfo().at("param")->getPrecision()); auto inputPrecision = InferenceEngine::details::convertPrecision(execNet.GetInputsInfo().at("param")->getPrecision());
auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("param")->getPrecision()); auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("param")->getPrecision());
@ -418,7 +420,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
std::stringstream strm; std::stringstream strm;
execNet.Export(strm); execNet.Export(strm);
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), 1);
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
EXPECT_NO_THROW(importedCompiledModel.input()); EXPECT_NO_THROW(importedCompiledModel.input());
@ -438,7 +440,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
} }
TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) { TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -456,14 +458,14 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
ngraph::ParameterVector{}); ngraph::ParameterVector{});
function->set_friendly_name("ConstResult"); function->set_friendly_name("ConstResult");
} }
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration)); execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("constant")->getPrecision()); auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("constant")->getPrecision());
std::stringstream strm; std::stringstream strm;
execNet.Export(strm); execNet.Export(strm);
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration); ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 0); EXPECT_EQ(function->inputs().size(), 0);
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size()); EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
EXPECT_THROW(importedCompiledModel.input(), ov::Exception); EXPECT_THROW(importedCompiledModel.input(), ov::Exception);
@ -483,7 +485,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
} }
TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) { TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) {
if (targetDevice == "MULTI" || targetDevice == "AUTO") { if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl; GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
} }
@ -512,12 +514,12 @@ TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) {
ngraph::ParameterVector{param1, param2}); ngraph::ParameterVector{param1, param2});
function->set_friendly_name("SingleReLU"); function->set_friendly_name("SingleReLU");
} }
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
std::stringstream strm; std::stringstream strm;
execNet.export_model(strm); execNet.export_model(strm);
InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, targetDevice, any_copy(configuration)); InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, target_device, any_copy(configuration));
EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), 2);
EXPECT_EQ(function->inputs().size(), importedExecNet.GetInputsInfo().size()); EXPECT_EQ(function->inputs().size(), importedExecNet.GetInputsInfo().size());
EXPECT_NO_THROW(importedExecNet.GetInputsInfo()["param1"]); EXPECT_NO_THROW(importedExecNet.GetInputsInfo()["param1"]);

View File

@ -17,12 +17,14 @@ namespace test {
namespace behavior { namespace behavior {
class OVExecutableNetworkBaseTest : public testing::WithParamInterface<InferRequestParams>, class OVExecutableNetworkBaseTest : public testing::WithParamInterface<InferRequestParams>,
public CommonTestUtils::TestsCommon { public OVCompiledNetworkTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
std::string targetDevice; std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(targetDevice, configuration) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "targetDevice=" << targetDevice << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
@ -36,16 +38,18 @@ public:
} }
void SetUp() override { void SetUp() override {
std::tie(target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice); function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
utils::PluginCache::get().reset(); utils::PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
bool compareTensors(const ov::Tensor& t1, const ov::Tensor& t2) { bool compareTensors(const ov::Tensor& t1, const ov::Tensor& t2) {
@ -70,13 +74,14 @@ public:
protected: protected:
std::shared_ptr<ov::Core> core = utils::PluginCache::get().core(); std::shared_ptr<ov::Core> core = utils::PluginCache::get().core();
std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
std::shared_ptr<ov::Model> function; std::shared_ptr<ov::Model> function;
void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ov_compiled_model; }
}; };
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) { TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) {
EXPECT_NO_THROW(auto execNet = core->compile_model(function, targetDevice, configuration)); EXPECT_NO_THROW(auto execNet = core->compile_model(function, target_device, configuration));
} }
TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) { TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
@ -88,27 +93,27 @@ TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) { TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) {
ov::AnyMap incorrectConfig = {{"abc", "def"}}; ov::AnyMap incorrectConfig = {{"abc", "def"}};
EXPECT_ANY_THROW(auto execNet = core->compile_model(function, targetDevice, incorrectConfig)); EXPECT_ANY_THROW(auto execNet = core->compile_model(function, target_device, incorrectConfig));
} }
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) { TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(auto req = execNet.create_infer_request()); EXPECT_NO_THROW(auto req = execNet.create_infer_request());
} }
TEST_P(OVExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) { TEST_P(OVExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
auto execGraph = execNet.get_runtime_model(); auto execGraph = execNet.get_runtime_model();
EXPECT_NE(execGraph, nullptr); EXPECT_NE(execGraph, nullptr);
} }
TEST_P(OVExecutableNetworkBaseTest, checkGetMetric) { TEST_P(OVExecutableNetworkBaseTest, checkGetMetric) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(execNet.get_property(ov::supported_properties)); EXPECT_NO_THROW(execNet.get_property(ov::supported_properties));
} }
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) { TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
for (const auto& configItem : configuration) { for (const auto& configItem : configuration) {
ov::Any param; ov::Any param;
EXPECT_NO_THROW(param = execNet.get_property(configItem.first)); EXPECT_NO_THROW(param = execNet.get_property(configItem.first));
@ -118,7 +123,7 @@ TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheck
} }
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) { TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) {
auto execNet = core->compile_model(function, targetDevice); auto execNet = core->compile_model(function, target_device);
std::map<std::string, ov::Any> config; std::map<std::string, ov::Any> config;
for (const auto& confItem : configuration) { for (const auto& confItem : configuration) {
config.emplace(confItem.first, confItem.second); config.emplace(confItem.first, confItem.second);
@ -127,7 +132,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) {
} }
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) { TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) {
auto execNet = core->compile_model(function, targetDevice); auto execNet = core->compile_model(function, target_device);
std::map<std::string, std::string> incorrectConfig = {{"abc", "def"}}; std::map<std::string, std::string> incorrectConfig = {{"abc", "def"}};
std::map<std::string, ov::Any> config; std::map<std::string, ov::Any> config;
for (const auto& confItem : incorrectConfig) { for (const auto& confItem : incorrectConfig) {
@ -137,7 +142,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) {
} }
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck) { TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck) {
auto execNet = core->compile_model(function, targetDevice); auto execNet = core->compile_model(function, target_device);
std::map<std::string, ov::Any> config; std::map<std::string, ov::Any> config;
for (const auto& confItem : configuration) { for (const auto& confItem : configuration) {
config.emplace(confItem.first, confItem.second); config.emplace(confItem.first, confItem.second);
@ -154,7 +159,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck)
TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) { TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
std::vector<ov::CompiledModel> vec; std::vector<ov::CompiledModel> vec;
for (auto i = 0; i < 2; i++) { for (auto i = 0; i < 2; i++) {
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration))); EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration)));
EXPECT_NE(nullptr, function); EXPECT_NE(nullptr, function);
} }
} }
@ -162,24 +167,24 @@ TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) { TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) {
std::vector<ov::CompiledModel> vec; std::vector<ov::CompiledModel> vec;
for (auto i = 0; i < 2; i++) { for (auto i = 0; i < 2; i++) {
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration))); EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration)));
EXPECT_NE(nullptr, vec[i].get_runtime_model()); EXPECT_NE(nullptr, vec[i].get_runtime_model());
EXPECT_NE(vec.begin()->get_runtime_model(), vec[i].get_runtime_model()); EXPECT_NE(vec.begin()->get_runtime_model(), vec[i].get_runtime_model());
} }
} }
TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfo) { TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfo) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(auto inInfo = execNet.inputs()); EXPECT_NO_THROW(auto inInfo = execNet.inputs());
} }
TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfo) { TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfo) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(auto outInfo = execNet.outputs()); EXPECT_NO_THROW(auto outInfo = execNet.outputs());
} }
TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) { TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
auto inputs = execNet.inputs(); auto inputs = execNet.inputs();
std::vector<std::string> paramVec; std::vector<std::string> paramVec;
for (const auto& input : inputs) { for (const auto& input : inputs) {
@ -193,7 +198,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
} }
TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) { TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
auto outputs = execNet.outputs(); auto outputs = execNet.outputs();
std::vector<std::string> resVec; std::vector<std::string> resVec;
for (const auto& out : outputs) { for (const auto& out : outputs) {
@ -209,7 +214,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) { TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
std::shared_ptr<const ov::Model> execGraph; std::shared_ptr<const ov::Model> execGraph;
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(execGraph = execNet.get_runtime_model()); EXPECT_NO_THROW(execGraph = execNet.get_runtime_model());
std::map<std::string, int> originalLayersMap; std::map<std::string, int> originalLayersMap;
for (const auto& layer : function->get_ops()) { for (const auto& layer : function->get_ops()) {
@ -259,7 +264,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) { TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
std::shared_ptr<const ov::Model> execGraph; std::shared_ptr<const ov::Model> execGraph;
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
EXPECT_NO_THROW(execGraph = execNet.get_runtime_model()); EXPECT_NO_THROW(execGraph = execNet.get_runtime_model());
std::map<std::string, int> originalLayersMap; std::map<std::string, int> originalLayersMap;
for (const auto& layer : function->get_ops()) { for (const auto& layer : function->get_ops()) {
@ -319,7 +324,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
TEST_P(OVExecutableNetworkBaseTest, canExport) { TEST_P(OVExecutableNetworkBaseTest, canExport) {
auto ts = CommonTestUtils::GetTimestamp(); auto ts = CommonTestUtils::GetTimestamp();
std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts; std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts;
auto execNet = core->compile_model(function, targetDevice, configuration); auto execNet = core->compile_model(function, target_device, configuration);
std::ofstream out(modelName, std::ios::out); std::ofstream out(modelName, std::ios::out);
EXPECT_NO_THROW(execNet.export_model(out)); EXPECT_NO_THROW(execNet.export_model(out));
out.close(); out.close();
@ -339,7 +344,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputFromFunctionWithSingleInput) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::CompiledModel execNet; ov::CompiledModel execNet;
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 1); EXPECT_EQ(function->inputs().size(), 1);
EXPECT_EQ(function->inputs().size(), execNet.inputs().size()); EXPECT_EQ(function->inputs().size(), execNet.inputs().size());
EXPECT_NO_THROW(execNet.input()); EXPECT_NO_THROW(execNet.input());
@ -366,7 +371,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputFromFunctionWithSingleInput) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::CompiledModel execNet; ov::CompiledModel execNet;
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(function->outputs().size(), 1); EXPECT_EQ(function->outputs().size(), 1);
EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
EXPECT_NO_THROW(execNet.output()); EXPECT_NO_THROW(execNet.output());
@ -414,7 +419,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputsFromFunctionWithSeveralInputs) {
ngraph::ParameterVector{param1, param2}); ngraph::ParameterVector{param1, param2});
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(function->inputs().size(), 2); EXPECT_EQ(function->inputs().size(), 2);
EXPECT_EQ(function->inputs().size(), execNet.inputs().size()); EXPECT_EQ(function->inputs().size(), execNet.inputs().size());
EXPECT_THROW(execNet.input(), ov::Exception); EXPECT_THROW(execNet.input(), ov::Exception);
@ -485,7 +490,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromFunctionWithSeveralOutputs) {
ngraph::ParameterVector{param1, param2}); ngraph::ParameterVector{param1, param2});
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(function->outputs().size(), 2); EXPECT_EQ(function->outputs().size(), 2);
EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
EXPECT_THROW(execNet.output(), ov::Exception); EXPECT_THROW(execNet.output(), ov::Exception);
@ -552,7 +557,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput
std::make_shared<ngraph::Function>(ngraph::ResultVector{result1, result2}, ngraph::ParameterVector{param1}); std::make_shared<ngraph::Function>(ngraph::ResultVector{result1, result2}, ngraph::ParameterVector{param1});
function->set_friendly_name("SingleSplit"); function->set_friendly_name("SingleSplit");
} }
execNet = core->compile_model(function, targetDevice, configuration); execNet = core->compile_model(function, target_device, configuration);
EXPECT_EQ(function->outputs().size(), 2); EXPECT_EQ(function->outputs().size(), 2);
EXPECT_EQ(function->outputs().size(), execNet.outputs().size()); EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
EXPECT_THROW(execNet.output(), ov::Exception); EXPECT_THROW(execNet.output(), ov::Exception);
@ -599,7 +604,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput
// Load correct network to Plugin to get executable network // Load correct network to Plugin to get executable network
TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalFunction) { TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalFunction) {
ov::CompiledModel execNet; ov::CompiledModel execNet;
EXPECT_NO_THROW(execNet = core->compile_model(function, targetDevice, configuration)); EXPECT_NO_THROW(execNet = core->compile_model(function, target_device, configuration));
EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size()); EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size());
auto ref_parameter = function->get_parameters().back(); auto ref_parameter = function->get_parameters().back();
@ -623,7 +628,7 @@ TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalIR) {
ov::pass::Serialize(m_out_xml_path_1, m_out_bin_path_1).run_on_function(function); ov::pass::Serialize(m_out_xml_path_1, m_out_bin_path_1).run_on_function(function);
ov::CompiledModel execNet; ov::CompiledModel execNet;
EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, targetDevice, configuration)); EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, target_device, configuration));
CommonTestUtils::removeIRFiles(m_out_xml_path_1, m_out_bin_path_1); CommonTestUtils::removeIRFiles(m_out_xml_path_1, m_out_bin_path_1);
EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size()); EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size());
@ -645,7 +650,7 @@ TEST_P(OVExecutableNetworkBaseTest, getCompiledModelFromInferRequest) {
ov::InferRequest req; ov::InferRequest req;
{ {
ov::CompiledModel compiled_model; ov::CompiledModel compiled_model;
ASSERT_NO_THROW(compiled_model = core->compile_model(function, targetDevice, configuration)); ASSERT_NO_THROW(compiled_model = core->compile_model(function, target_device, configuration));
ASSERT_NO_THROW(req = compiled_model.create_infer_request()); ASSERT_NO_THROW(req = compiled_model.create_infer_request());
ASSERT_NO_THROW(req.infer()); ASSERT_NO_THROW(req.infer());
} }
@ -677,7 +682,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV10Model) {
function->get_rt_info()["version"] = int64_t(10); function->get_rt_info()["version"] = int64_t(10);
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
EXPECT_THROW(core->compile_model(function, targetDevice, configuration), ov::Exception); EXPECT_THROW(core->compile_model(function, target_device, configuration), ov::Exception);
} }
TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) { TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) {
@ -699,7 +704,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) {
function->get_rt_info()["version"] = int64_t(11); function->get_rt_info()["version"] = int64_t(11);
function->set_friendly_name("SimpleReLU"); function->set_friendly_name("SimpleReLU");
} }
EXPECT_NO_THROW(core->compile_model(function, targetDevice, configuration)); EXPECT_NO_THROW(core->compile_model(function, target_device, configuration));
} }
} // namespace behavior } // namespace behavior

View File

@ -27,30 +27,32 @@ namespace behavior {
ASSERT_NE(properties.end(), it); \ ASSERT_NE(properties.end(), it); \
} }
using OVClassImportExportTestP = OVClassBaseTestP; using OVCompiledModelClassBaseTest = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassBaseTestP; using OVClassExecutableNetworkImportExportTestP = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVClassBaseTestP; using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVClassBaseTestP; using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVClassBaseTestP; using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVClassBaseTestP; using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP; using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkSetConfigTest = OVClassBaseTestP; using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP; using OVClassExecutableNetworkSetConfigTest = OVCompiledModelClassBaseTestP;
using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP;
class OVClassExecutableNetworkGetMetricTestForSpecificConfig : class OVClassExecutableNetworkGetMetricTestForSpecificConfig :
public OVClassNetworkTest, public OVClassNetworkTest,
public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>> { public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>>,
public OVCompiledNetworkTestBase {
protected: protected:
std::string deviceName;
std::string configKey; std::string configKey;
ov::Any configValue; ov::Any configValue;
public: public:
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); target_device = std::get<0>(GetParam());
OVClassNetworkTest::SetUp();
deviceName = std::get<0>(GetParam());
std::tie(configKey, configValue) = std::get<1>(GetParam()); std::tie(configKey, configValue) = std::get<1>(GetParam());
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
OVClassNetworkTest::SetUp();
} }
}; };
@ -62,18 +64,17 @@ using OVClassExecutableNetworkUnsupportedConfigTest = OVClassExecutableNetworkGe
// //
class OVClassHeteroExecutableNetworkGetMetricTest : class OVClassHeteroExecutableNetworkGetMetricTest :
public OVClassNetworkTest, public OVClassNetworkTest,
public ::testing::WithParamInterface<std::string> { public ::testing::WithParamInterface<std::string>,
public OVCompiledNetworkTestBase {
protected: protected:
std::string deviceName;
std::string heteroDeviceName; std::string heteroDeviceName;
public: public:
void SetUp() override { void SetUp() override {
target_device = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU;;
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
OVClassNetworkTest::SetUp(); OVClassNetworkTest::SetUp();
deviceName = GetParam();
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") +
CommonTestUtils::DEVICE_CPU;
} }
}; };
using OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassHeteroExecutableNetworkGetMetricTest; using OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassHeteroExecutableNetworkGetMetricTest;
@ -85,13 +86,13 @@ using OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = OVClassHeter
// ImportExportNetwork // ImportExportNetwork
// //
TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) { TEST_P(OVClassExecutableNetworkImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
std::stringstream strm; std::stringstream strm;
ov::CompiledModel executableNetwork; ov::CompiledModel executableNetwork;
OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, deviceName)); OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, target_device));
OV_ASSERT_NO_THROW(executableNetwork.export_model(strm)); OV_ASSERT_NO_THROW(executableNetwork.export_model(strm));
OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, deviceName)); OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, target_device));
OV_ASSERT_NO_THROW(executableNetwork.create_infer_request()); OV_ASSERT_NO_THROW(executableNetwork.create_infer_request());
} }
@ -101,7 +102,7 @@ TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
std::vector<ov::PropertyName> supported_properties; std::vector<ov::PropertyName> supported_properties;
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
@ -118,7 +119,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT
TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
std::vector<ov::PropertyName> supported_properties; std::vector<ov::PropertyName> supported_properties;
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
@ -135,7 +136,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow
TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
std::string model_name; std::string model_name;
OV_ASSERT_NO_THROW(model_name = compiled_model.get_property(ov::model_name)); OV_ASSERT_NO_THROW(model_name = compiled_model.get_property(ov::model_name));
@ -148,7 +149,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
unsigned int value = 0; unsigned int value = 0;
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::optimal_number_of_infer_requests)); OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::optimal_number_of_infer_requests));
@ -159,7 +160,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G
} }
TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration); auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration);
ov::hint::Priority value; ov::hint::Priority value;
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::hint::model_priority)); OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::hint::model_priority));
@ -168,7 +169,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) {
TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration); auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration);
std::string value; std::string value;
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::device::priorities)); OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::device::priorities));
@ -178,7 +179,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow)
TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) { TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception); ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception);
} }
@ -186,7 +187,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow)
TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoThrow) { TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
std::vector<ov::PropertyName> property_names; std::vector<ov::PropertyName> property_names;
OV_ASSERT_NO_THROW(property_names = compiled_model.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(property_names = compiled_model.get_property(ov::supported_properties));
@ -202,7 +203,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
ov::Any p; ov::Any p;
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception); ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception);
} }
@ -210,7 +211,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) {
TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) { TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
ASSERT_THROW(compiled_model.set_property({{"unsupported_config", "some_value"}}), ov::Exception); ASSERT_THROW(compiled_model.set_property({{"unsupported_config", "some_value"}}), ov::Exception);
} }
@ -219,7 +220,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
ov::Any p; ov::Any p;
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
OV_ASSERT_NO_THROW(compiled_model.set_property({{configKey, configValue}})); OV_ASSERT_NO_THROW(compiled_model.set_property({{configKey, configValue}}));
OV_ASSERT_NO_THROW(p = compiled_model.get_property(configKey)); OV_ASSERT_NO_THROW(p = compiled_model.get_property(configKey));
ASSERT_EQ(p, configValue); ASSERT_EQ(p, configValue);
@ -228,7 +229,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
TEST_P(OVClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) { TEST_P(OVClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
ASSERT_THROW(compiled_model.set_property({{configKey, configValue}}), ov::Exception); ASSERT_THROW(compiled_model.set_property({{configKey, configValue}}), ov::Exception);
} }
@ -237,9 +238,9 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
std::vector<ov::PropertyName> dev_property_names; std::vector<ov::PropertyName> dev_property_names;
OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(deviceName, ov::supported_properties)); OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(target_device, ov::supported_properties));
auto compiled_model = ie.compile_model(simpleNetwork, deviceName); auto compiled_model = ie.compile_model(simpleNetwork, target_device);
std::vector<ov::PropertyName> model_property_names; std::vector<ov::PropertyName> model_property_names;
OV_ASSERT_NO_THROW(model_property_names = compiled_model.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(model_property_names = compiled_model.get_property(ov::supported_properties));
@ -249,7 +250,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMet
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName); auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName);
auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName); auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device);
std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues; std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues;
OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties));
@ -285,7 +286,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName); auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName);
auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName); auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device);
std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues; std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues;
OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties));
@ -331,13 +332,13 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThro
TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoThrow) { TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoThrow) {
ov::Core ie = createCoreWithTemplate(); ov::Core ie = createCoreWithTemplate();
setHeteroNetworkAffinity(deviceName); setHeteroNetworkAffinity(target_device);
auto compiled_model = ie.compile_model(actualNetwork, heteroDeviceName); auto compiled_model = ie.compile_model(actualNetwork, heteroDeviceName);
std::string targets; std::string targets;
OV_ASSERT_NO_THROW(targets = compiled_model.get_property(ov::device::priorities)); OV_ASSERT_NO_THROW(targets = compiled_model.get_property(ov::device::priorities));
auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU; auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU;
std::cout << "Compiled model fallback targets: " << targets << std::endl; std::cout << "Compiled model fallback targets: " << targets << std::endl;
ASSERT_EQ(expectedTargets, targets); ASSERT_EQ(expectedTargets, targets);

View File

@ -16,31 +16,27 @@ namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
class OVCompiledModelPropertiesBase : public CommonTestUtils::TestsCommon { class OVCompiledModelPropertiesBase : public OVCompiledNetworkTestBase {
public: public:
std::shared_ptr<Core> core = utils::PluginCache::get().core(); std::shared_ptr<Core> core = utils::PluginCache::get().core();
std::shared_ptr<Model> model; std::shared_ptr<Model> model;
std::string device_name;
AnyMap properties; AnyMap properties;
}; };
class OVCompiledModelEmptyPropertiesTests : public testing::WithParamInterface<std::string>, class OVCompiledModelEmptyPropertiesTests : public testing::WithParamInterface<std::string>,
public OVCompiledModelPropertiesBase { public OVCompiledModelPropertiesBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj); static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override; void SetUp() override;
}; };
using PropertiesParams = std::tuple<std::string, AnyMap>; using PropertiesParams = std::tuple<std::string, AnyMap>;
class OVCompiledModelPropertiesTests : public testing::WithParamInterface<PropertiesParams>, class OVCompiledModelPropertiesTests : public testing::WithParamInterface<PropertiesParams>,
public OVCompiledModelPropertiesBase { public OVCompiledModelPropertiesBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<PropertiesParams> obj); static std::string getTestCaseName(testing::TestParamInfo<PropertiesParams> obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
}; };

View File

@ -14,13 +14,12 @@ namespace test {
namespace behavior { namespace behavior {
class OVInferRequestBatchedTests : public testing::WithParamInterface<std::string>, class OVInferRequestBatchedTests : public testing::WithParamInterface<std::string>,
public CommonTestUtils::TestsCommon { public OVInferRequestTestBase {
public: public:
static std::string getTestCaseName(const testing::TestParamInfo<std::string>& device_name); static std::string getTestCaseName(const testing::TestParamInfo<std::string>& device_name);
protected: protected:
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
static std::string generateCacheDirName(const std::string& test_name); static std::string generateCacheDirName(const std::string& test_name);
@ -28,7 +27,6 @@ protected:
const PartialShape& shape, const ov::Layout& layout); const PartialShape& shape, const ov::Layout& layout);
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core(); std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
std::string targetDevice;
std::string m_cache_dir; // internal member std::string m_cache_dir; // internal member
bool m_need_reset_core = false; bool m_need_reset_core = false;
}; };

View File

@ -4,14 +4,127 @@
#pragma once #pragma once
#include <future>
#include "base/ov_behavior_test_utils.hpp" #include "base/ov_behavior_test_utils.hpp"
#include "shared_test_classes/subgraph/basic_lstm.hpp"
namespace ov { namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestCallbackTests : public OVInferRequestTests { using OVInferRequestCallbackTests = OVInferRequestTests;
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
}; TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
bool is_called = false;
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
ASSERT_EQ(exception_ptr, nullptr);
is_called = true;
}));
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
ASSERT_TRUE(is_called);
}
TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
bool is_called = false;
req.set_callback([&] (std::exception_ptr exception_ptr) {
ASSERT_EQ(nullptr, exception_ptr);
is_called = true;
});
req.infer();
ASSERT_FALSE(is_called);
}
// test that can wait all callbacks on dtor
TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) {
const int NUM_ITER = 10;
struct TestUserData {
std::atomic<int> numIter = {0};
std::promise<bool> promise;
};
TestUserData data;
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
if (exception_ptr) {
data.promise.set_exception(exception_ptr);
} else {
if (data.numIter.fetch_add(1) != NUM_ITER) {
req.start_async();
} else {
data.promise.set_value(true);
}
}
}));
auto future = data.promise.get_future();
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
future.wait();
auto callbackStatus = future.get();
ASSERT_TRUE(callbackStatus);
auto dataNumIter = data.numIter - 1;
ASSERT_EQ(NUM_ITER, dataNumIter);
}
TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) {
OPENVINO_UNREACHABLE("Throw");
}));
OV_ASSERT_NO_THROW(req.start_async());
ASSERT_THROW(req.wait(), ov::Exception);
}
TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) {
// GetNetwork(3000, 380) make inference around 20ms on GNA SW
// so increases chances for getting RESULT_NOT_READY
OV_ASSERT_NO_THROW(execNet = core->compile_model(
SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), target_device, configuration));
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
std::promise<std::chrono::system_clock::time_point> callbackTimeStamp;
auto callbackTimeStampFuture = callbackTimeStamp.get_future();
// add a callback to the request and capture the timestamp
OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) {
if (exception_ptr) {
callbackTimeStamp.set_exception(exception_ptr);
} else {
callbackTimeStamp.set_value(std::chrono::system_clock::now());
}
}));
OV_ASSERT_NO_THROW(req.start_async());
bool ready = false;
OV_ASSERT_NO_THROW(ready = req.wait_for({}));
// get timestamp taken AFTER return from the wait(STATUS_ONLY)
const auto afterWaitTimeStamp = std::chrono::system_clock::now();
// IF the callback timestamp is larger than the afterWaitTimeStamp
// then we should observe false ready result
if (afterWaitTimeStamp < callbackTimeStampFuture.get()) {
ASSERT_FALSE(ready);
}
OV_ASSERT_NO_THROW(req.wait());
}
TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
{
auto somePtr = std::make_shared<int>(42);
OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) {
ASSERT_EQ(nullptr, exception_ptr);
ASSERT_EQ(1, somePtr.use_count());
}));
}
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
}
} // namespace behavior } // namespace behavior
} // namespace test } // namespace test
} // namespace ov } // namespace ov

View File

@ -6,14 +6,60 @@
#include <future> #include <future>
#include "openvino/runtime/exception.hpp"
#include "base/ov_behavior_test_utils.hpp" #include "base/ov_behavior_test_utils.hpp"
namespace ov { namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestCancellationTests : public OVInferRequestTests { using OVInferRequestCancellationTests = OVInferRequestTests;
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
}; TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.cancel());
try {
req.wait();
} catch (const ov::Cancelled&) {
SUCCEED();
}
}
TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.cancel());
try {
req.wait();
} catch (const ov::Cancelled&) {
SUCCEED();
}
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
}
TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.cancel());
}
TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
auto infer = std::async(std::launch::async, [&req]{req.infer();});
while (!req.wait_for({})) {
}
OV_ASSERT_NO_THROW(req.cancel());
try {
infer.get();
} catch (const ov::Cancelled&) {
SUCCEED();
}
}
} // namespace behavior } // namespace behavior
} // namespace test } // namespace test
} // namespace ov } // namespace ov

View File

@ -36,26 +36,23 @@ namespace test {
namespace behavior { namespace behavior {
using OVInferRequestDynamicParams = std::tuple< using OVInferRequestDynamicParams = std::tuple<
std::shared_ptr<Model>, // ov Model std::shared_ptr<Model>, // ov Model
std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>>, // input/expected output shapes per inference std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>>, // input/expected output shapes per inference
std::string, // Device name std::string, // Device name
ov::AnyMap // Config ov::AnyMap // Config
>; >;
class OVInferRequestDynamicTests : public testing::WithParamInterface<OVInferRequestDynamicParams>, class OVInferRequestDynamicTests : public testing::WithParamInterface<OVInferRequestDynamicParams>,
virtual public ov::test::SubgraphBaseTest { public OVInferRequestTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj); static std::string getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj);
protected: protected:
void SetUp() override; void SetUp() override;
void TearDown() override;
bool checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual); bool checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual);
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core(); std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
std::shared_ptr<Model> function; std::shared_ptr<Model> function;
std::string targetDevice;
ov::AnyMap configuration; ov::AnyMap configuration;
std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes; std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes;
}; };

View File

@ -71,21 +71,18 @@ inline OVInferReqInferParam roi_1d() {
} // namespace tensor_roi } // namespace tensor_roi
class OVInferRequestInferenceTests : public testing::WithParamInterface<OVInferRequestInferenceTestsParams>, class OVInferRequestInferenceTests : public testing::WithParamInterface<OVInferRequestInferenceTestsParams>,
public CommonTestUtils::TestsCommon { public OVInferRequestTestBase {
public: public:
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestInferenceTestsParams>& device_name); static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestInferenceTestsParams>& device_name);
protected: protected:
void SetUp() override; void SetUp() override;
void TearDown() override;
static std::shared_ptr<Model> create_n_inputs(size_t num, element::Type type, static std::shared_ptr<Model> create_n_inputs(size_t num, element::Type type,
const PartialShape& shape); const PartialShape& shape);
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core(); std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
OVInferReqInferParam m_param; OVInferReqInferParam m_param;
std::string m_device_name;
}; };
} // namespace behavior } // namespace behavior

View File

@ -15,7 +15,6 @@ namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestIOTensorTest : public OVInferRequestTests { struct OVInferRequestIOTensorTest : public OVInferRequestTests {
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
ov::InferRequest req; ov::InferRequest req;
@ -29,7 +28,7 @@ using OVInferRequestSetPrecisionParams = std::tuple<
ov::AnyMap // Config ov::AnyMap // Config
>; >;
struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterface<OVInferRequestSetPrecisionParams>, struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterface<OVInferRequestSetPrecisionParams>,
public CommonTestUtils::TestsCommon { public OVInferRequestTestBase {
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestSetPrecisionParams>& obj); static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestSetPrecisionParams>& obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
@ -37,7 +36,6 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa
std::shared_ptr<ov::Model> function; std::shared_ptr<ov::Model> function;
ov::CompiledModel execNet; ov::CompiledModel execNet;
ov::InferRequest req; ov::InferRequest req;
std::string target_device;
ov::AnyMap config; ov::AnyMap config;
element::Type element_type; element::Type element_type;
}; };
@ -45,7 +43,7 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa
using OVInferRequestCheckTensorPrecisionParams = OVInferRequestSetPrecisionParams; using OVInferRequestCheckTensorPrecisionParams = OVInferRequestSetPrecisionParams;
struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface<OVInferRequestCheckTensorPrecisionParams>, struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface<OVInferRequestCheckTensorPrecisionParams>,
public CommonTestUtils::TestsCommon { public OVInferRequestTestBase {
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestCheckTensorPrecisionParams>& obj); static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestCheckTensorPrecisionParams>& obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
@ -56,8 +54,7 @@ struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface<O
CompiledModel compModel; CompiledModel compModel;
InferRequest req; InferRequest req;
AnyMap config; AnyMap config;
std::string target_device; element::Type element_type;
element::Type element_type;
}; };
} // namespace behavior } // namespace behavior

View File

@ -4,14 +4,87 @@
#pragma once #pragma once
#include <future>
#include "base/ov_behavior_test_utils.hpp" #include "base/ov_behavior_test_utils.hpp"
namespace ov { namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestMultithreadingTests : public OVInferRequestTests { using OVInferRequestMultithreadingTests = OVInferRequestTests;
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
}; TEST_P(OVInferRequestMultithreadingTests, canRun3SyncRequestsConsistentlyFromThreads) {
ov::InferRequest req1, req2, req3;
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
auto f1 = std::async(std::launch::async, [&] { req1.infer(); });
auto f2 = std::async(std::launch::async, [&] { req2.infer(); });
auto f3 = std::async(std::launch::async, [&] { req3.infer(); });
f1.wait();
f2.wait();
f3.wait();
OV_ASSERT_NO_THROW(f1.get());
OV_ASSERT_NO_THROW(f2.get());
OV_ASSERT_NO_THROW(f3.get());
}
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyFromThreadsWithoutWait) {
ov::InferRequest req1, req2, req3;
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req1.infer());
OV_ASSERT_NO_THROW(req2.infer());
OV_ASSERT_NO_THROW(req3.infer());
auto f1 = std::async(std::launch::async, [&] { req1.start_async(); });
auto f2 = std::async(std::launch::async, [&] { req2.start_async(); });
auto f3 = std::async(std::launch::async, [&] { req3.start_async(); });
f1.wait();
f2.wait();
f3.wait();
OV_ASSERT_NO_THROW(f1.get());
OV_ASSERT_NO_THROW(f2.get());
OV_ASSERT_NO_THROW(f3.get());
}
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyWithWait) {
ov::InferRequest req1, req2, req3;
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
req1.start_async();
OV_ASSERT_NO_THROW(req1.wait());
req2.start_async();
OV_ASSERT_NO_THROW(req2.wait());
req3.start_async();
OV_ASSERT_NO_THROW(req3.wait());
}
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsParallelWithWait) {
ov::InferRequest req1, req2, req3;
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
req1.start_async();
req2.start_async();
req3.start_async();
OV_ASSERT_NO_THROW(req2.wait());
OV_ASSERT_NO_THROW(req1.wait());
OV_ASSERT_NO_THROW(req3.wait());
}
} // namespace behavior } // namespace behavior
} // namespace test } // namespace test
} // namespace ov } // namespace ov

View File

@ -9,8 +9,7 @@
namespace ov { namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestPerfCountersTest : public OVInferRequestTests { struct OVInferRequestPerfCountersTest : public virtual OVInferRequestTests {
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
void SetUp() override; void SetUp() override;
ov::InferRequest req; ov::InferRequest req;
}; };

View File

@ -10,7 +10,6 @@ namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
struct OVInferRequestWaitTests : public OVInferRequestTests { struct OVInferRequestWaitTests : public OVInferRequestTests {
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
ov::InferRequest req; ov::InferRequest req;

View File

@ -13,6 +13,7 @@
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "common_test_utils/unicode_utils.hpp" #include "common_test_utils/unicode_utils.hpp"
#include "openvino/util/common_util.hpp" #include "openvino/util/common_util.hpp"
#include "base/ov_behavior_test_utils.hpp"
#include <ie_core.hpp> #include <ie_core.hpp>
#include <ie_common.h> #include <ie_common.h>
@ -33,7 +34,8 @@ using compileModelCacheParams = std::tuple<
>; >;
class CompileModelCacheTestBase : public testing::WithParamInterface<compileModelCacheParams>, class CompileModelCacheTestBase : public testing::WithParamInterface<compileModelCacheParams>,
virtual public SubgraphBaseTest { virtual public SubgraphBaseTest,
virtual public OVPluginTestBase {
std::string m_cacheFolderName; std::string m_cacheFolderName;
std::string m_functionName; std::string m_functionName;
ov::element::Type m_precision; ov::element::Type m_precision;
@ -52,35 +54,21 @@ public:
}; };
using compileKernelsCacheParams = std::tuple< using compileKernelsCacheParams = std::tuple<
std::string, // device name std::string, // device name
std::pair<ov::AnyMap, std::string> // device and cache configuration std::pair<ov::AnyMap, std::string> // device and cache configuration
>; >;
class CompiledKernelsCacheTest : virtual public SubgraphBaseTest, class CompiledKernelsCacheTest : virtual public SubgraphBaseTest,
virtual public OVPluginTestBase,
public testing::WithParamInterface<compileKernelsCacheParams> { public testing::WithParamInterface<compileKernelsCacheParams> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj); static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj);
protected: protected:
std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name();
std::shared_ptr<ngraph::Function> function;
std::string cache_path; std::string cache_path;
std::vector<std::string> m_extList; std::vector<std::string> m_extList;
void SetUp() override {
function = ngraph::builder::subgraph::makeConvPoolRelu(); void SetUp() override;
std::pair<ov::AnyMap, std::string> userConfig; void TearDown() override;
std::tie(targetDevice, userConfig) = GetParam();
configuration = userConfig.first;
std::string ext = userConfig.second;
std::string::size_type pos = 0;
if ((pos = ext.find(",", pos)) != std::string::npos) {
m_extList.push_back(ext.substr(0, pos));
m_extList.push_back(ext.substr(pos + 1));
} else {
m_extList.push_back(ext);
}
std::replace(test_name.begin(), test_name.end(), '/', '_');
std::replace(test_name.begin(), test_name.end(), '\\', '_');
cache_path = "compiledModel" + test_name + "_cache";
}
}; };
} // namespace behavior } // namespace behavior
} // namespace test } // namespace test

View File

@ -10,30 +10,27 @@ namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
class OVHoldersTest : public CommonTestUtils::TestsCommon, class OVHoldersTest : public OVPluginTestBase,
public ::testing::WithParamInterface<std::string> { public ::testing::WithParamInterface<std::string> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj); static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
protected:
std::string deathTestStyle; std::string deathTestStyle;
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
}; };
class OVHoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon, class OVHoldersTestOnImportedNetwork : public OVPluginTestBase,
public ::testing::WithParamInterface<std::string> { public ::testing::WithParamInterface<std::string> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj); static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override; void SetUp() override;
void TearDown() override; void TearDown() override;
protected:
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
std::string deathTestStyle; std::string deathTestStyle;
}; };
} // namespace behavior } // namespace behavior

View File

@ -16,11 +16,10 @@ namespace ov {
namespace test { namespace test {
namespace behavior { namespace behavior {
class OVPropertiesBase : public CommonTestUtils::TestsCommon { class OVPropertiesBase : public OVPluginTestBase {
public: public:
std::shared_ptr<Core> core = utils::PluginCache::get().core(); std::shared_ptr<Core> core = utils::PluginCache::get().core();
std::shared_ptr<Model> model; std::shared_ptr<Model> model;
std::string device_name;
AnyMap properties; AnyMap properties;
}; };

View File

@ -10,6 +10,7 @@
#include "openvino/runtime/compiled_model.hpp" #include "openvino/runtime/compiled_model.hpp"
#include "openvino/op/parameter.hpp" #include "openvino/op/parameter.hpp"
#include "functional_test_utils/ov_plugin_cache.hpp" #include "functional_test_utils/ov_plugin_cache.hpp"
#include "base/ov_behavior_test_utils.hpp"
namespace ov { namespace ov {
namespace test { namespace test {
@ -20,7 +21,7 @@ using RemoteTensorParams = std::tuple<element::Type, // element type
std::pair<ov::AnyMap, ov::AnyMap>>; // remote context and tensor parameters std::pair<ov::AnyMap, ov::AnyMap>>; // remote context and tensor parameters
class OVRemoteTest : public testing::WithParamInterface<RemoteTensorParams>, class OVRemoteTest : public testing::WithParamInterface<RemoteTensorParams>,
public CommonTestUtils::TestsCommon { public ov::test::behavior::OVPluginTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<RemoteTensorParams> obj); static std::string getTestCaseName(testing::TestParamInfo<RemoteTensorParams> obj);
protected: protected:
@ -28,7 +29,6 @@ protected:
void TearDown() override; void TearDown() override;
element::Type element_type; element::Type element_type;
std::string target_device;
ov::AnyMap config; ov::AnyMap config;
ov::AnyMap context_parameters; ov::AnyMap context_parameters;
ov::AnyMap tensor_parameters; ov::AnyMap tensor_parameters;

View File

@ -13,6 +13,7 @@
#include "ngraph_functions/subgraph_builders.hpp" #include "ngraph_functions/subgraph_builders.hpp"
#include "functional_test_utils/blob_utils.hpp" #include "functional_test_utils/blob_utils.hpp"
#include "base/behavior_test_utils.hpp"
using namespace ::testing; using namespace ::testing;
using namespace InferenceEngine; using namespace InferenceEngine;
@ -25,10 +26,10 @@ using AutoBatchTwoNetsParams = std::tuple<
size_t, // number of requests size_t, // number of requests
size_t>; // batch size> size_t>; // batch size>
class AutoBatching_Test : public CommonTestUtils::TestsCommon, class AutoBatching_Test : public BehaviorTestsUtils::IEPluginTestBase,
public testing::WithParamInterface<AutoBatchTwoNetsParams> { public testing::WithParamInterface<AutoBatchTwoNetsParams> {
void SetUp() override { void SetUp() override {
std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
fn_ptrs = {ngraph::builder::subgraph::makeSingleConv(), fn_ptrs = {ngraph::builder::subgraph::makeSingleConv(),
ngraph::builder::subgraph::makeMultiSingleConv()}; ngraph::builder::subgraph::makeMultiSingleConv()};
}; };
@ -36,15 +37,14 @@ public:
static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) { static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) {
size_t streams, requests, batch; size_t streams, requests, batch;
bool use_get_blob; bool use_get_blob;
std::string device_name; std::string target_device;
std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param; std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param;
return device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" + return target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" +
std::to_string(batch) + std::to_string(batch) +
"_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests); "_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests);
} }
protected: protected:
std::string device_name;
bool use_get_blob; bool use_get_blob;
size_t num_streams; size_t num_streams;
size_t num_requests; size_t num_requests;
@ -70,16 +70,16 @@ protected:
n.second->setPrecision(Precision::FP32); n.second->setPrecision(Precision::FP32);
} }
std::map<std::string, std::string> config; std::map<std::string, std::string> config;
if (device_name.find("GPU") != std::string::npos) if (target_device.find("GPU") != std::string::npos)
config[CONFIG_KEY(GPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams); config[CONFIG_KEY(GPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams);
if (device_name.find("CPU") != std::string::npos) { if (target_device.find("CPU") != std::string::npos) {
config[CONFIG_KEY(CPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams); config[CONFIG_KEY(CPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams);
config[CONFIG_KEY(ENFORCE_BF16)] = CONFIG_VALUE(NO); config[CONFIG_KEY(ENFORCE_BF16)] = CONFIG_VALUE(NO);
} }
// minimize timeout to reduce test time // minimize timeout to reduce test time
config[CONFIG_KEY(AUTO_BATCH_TIMEOUT)] = std::to_string(1); config[CONFIG_KEY(AUTO_BATCH_TIMEOUT)] = std::to_string(1);
auto exec_net_ref = ie.LoadNetwork(net, std::string(CommonTestUtils::DEVICE_BATCH) + ":" + auto exec_net_ref = ie.LoadNetwork(net, std::string(CommonTestUtils::DEVICE_BATCH) + ":" +
device_name + "(" + std::to_string(num_batch) + ")", target_device + "(" + std::to_string(num_batch) + ")",
config); config);
auto network_outputs = net.getOutputsInfo(); auto network_outputs = net.getOutputsInfo();
@ -144,7 +144,7 @@ protected:
class AutoBatching_Test_DetectionOutput : public AutoBatching_Test { class AutoBatching_Test_DetectionOutput : public AutoBatching_Test {
public: public:
void SetUp() override { void SetUp() override {
std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam(); std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
fn_ptrs = {ngraph::builder::subgraph::makeDetectionOutput(), fn_ptrs = {ngraph::builder::subgraph::makeDetectionOutput(),
ngraph::builder::subgraph::makeDetectionOutput()}; ngraph::builder::subgraph::makeDetectionOutput()};
}; };
@ -152,9 +152,9 @@ public:
static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) { static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) {
size_t streams, requests, batch; size_t streams, requests, batch;
bool use_get_blob; bool use_get_blob;
std::string device_name; std::string target_device;
std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param; std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param;
return "DetectionOutput_HETERO_" + device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") + return "DetectionOutput_HETERO_" + target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") +
"_batch_size_" + std::to_string(batch) + "_batch_size_" + std::to_string(batch) +
"_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests); "_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests);
} }

View File

@ -13,6 +13,7 @@
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "common_test_utils/unicode_utils.hpp" #include "common_test_utils/unicode_utils.hpp"
#include "openvino/util/common_util.hpp" #include "openvino/util/common_util.hpp"
#include "base/behavior_test_utils.hpp"
#include <ie_core.hpp> #include <ie_core.hpp>
#include <ie_common.h> #include <ie_common.h>
@ -30,6 +31,7 @@ using loadNetworkCacheParams = std::tuple<
namespace LayerTestsDefinitions { namespace LayerTestsDefinitions {
class LoadNetworkCacheTestBase : public testing::WithParamInterface<loadNetworkCacheParams>, class LoadNetworkCacheTestBase : public testing::WithParamInterface<loadNetworkCacheParams>,
virtual public BehaviorTestsUtils::IEPluginTestBase,
virtual public LayerTestsUtils::LayerTestsCommon { virtual public LayerTestsUtils::LayerTestsCommon {
std::string m_cacheFolderName; std::string m_cacheFolderName;
std::string m_functionName; std::string m_functionName;
@ -52,18 +54,21 @@ using compileKernelsCacheParams = std::tuple<
std::pair<std::map<std::string, std::string>, std::string> // device and cache configuration std::pair<std::map<std::string, std::string>, std::string> // device and cache configuration
>; >;
class LoadNetworkCompiledKernelsCacheTest : virtual public LayerTestsUtils::LayerTestsCommon, class LoadNetworkCompiledKernelsCacheTest : virtual public LayerTestsUtils::LayerTestsCommon,
public testing::WithParamInterface<compileKernelsCacheParams> { virtual public BehaviorTestsUtils::IEPluginTestBase,
public testing::WithParamInterface<compileKernelsCacheParams> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj); static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj);
protected: protected:
std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name();
std::shared_ptr<ngraph::Function> function;
std::string cache_path; std::string cache_path;
std::vector<std::string> m_extList; std::vector<std::string> m_extList;
void SetUp() override { void SetUp() override {
function = ngraph::builder::subgraph::makeConvPoolRelu();
std::pair<std::map<std::string, std::string>, std::string> userConfig; std::pair<std::map<std::string, std::string>, std::string> userConfig;
std::tie(targetDevice, userConfig) = GetParam(); std::tie(targetDevice, userConfig) = GetParam();
target_device = targetDevice;
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu();
configuration = userConfig.first; configuration = userConfig.first;
std::string ext = userConfig.second; std::string ext = userConfig.second;
std::string::size_type pos = 0; std::string::size_type pos = 0;

View File

@ -7,6 +7,7 @@
#include <tuple> #include <tuple>
#include <string> #include <string>
#include <vector> #include <vector>
#include <algorithm>
#include <ie_core.hpp> #include <ie_core.hpp>
#include <ie_parameter.hpp> #include <ie_parameter.hpp>
@ -17,6 +18,7 @@
#include "common_test_utils/test_common.hpp" #include "common_test_utils/test_common.hpp"
#include "common_test_utils/file_utils.hpp" #include "common_test_utils/file_utils.hpp"
#include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/plugin_cache.hpp"
#include "base/behavior_test_utils.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
@ -34,7 +36,8 @@ using DefaultConfigurationParameters = std::tuple<
DefaultParameter // default parameter key value comparator DefaultParameter // default parameter key value comparator
>; >;
struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public ::testing::WithParamInterface<DefaultConfigurationParameters> { struct DefaultConfigurationTest : public BehaviorTestsUtils::IEPluginTestBase,
public ::testing::WithParamInterface<DefaultConfigurationParameters> {
enum { enum {
DeviceName, DefaultParamterId DeviceName, DefaultParamterId
}; };
@ -43,16 +46,14 @@ struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public ::
protected: protected:
std::shared_ptr<InferenceEngine::Core> _core = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> _core = PluginCache::get().ie();
std::string targetDevice;
DefaultParameter defaultParameter; DefaultParameter defaultParameter;
}; };
class ConfigBase : public CommonTestUtils::TestsCommon { class ConfigBase : public BehaviorTestsUtils::IEPluginTestBase {
public: public:
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
InferenceEngine::CNNNetwork cnnNet; InferenceEngine::CNNNetwork cnnNet;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
}; };
@ -60,17 +61,19 @@ class BehaviorTestsEmptyConfig : public testing::WithParamInterface<std::string>
public ConfigBase { public ConfigBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) { static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) {
std::string targetDevice; std::string target_device;
targetDevice = obj.param; target_device = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice; result << "target_device=" << target_device;
return result.str(); return result.str();
} }
void SetUp() override { // Skip test according to plugin specific disabledTestPatterns() (if any) void SetUp() override { // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
// Create CNNNetwork from ngrpah::Function // Create CNNNetwork from ngrpah::Function
targetDevice = this->GetParam(); target_device = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }
@ -85,20 +88,24 @@ class BehaviorTestsSingleOptionDefault : public testing::WithParamInterface<Beha
public ConfigBase { public ConfigBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<BehaviorParamsSingleOptionDefault> obj) { static std::string getTestCaseName(testing::TestParamInfo<BehaviorParamsSingleOptionDefault> obj) {
std::string targetDevice; std::string target_device;
std::pair<std::string, InferenceEngine::Parameter> configuration; std::pair<std::string, InferenceEngine::Parameter> configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(target_device, configuration) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "target_device=" << target_device << "_";
result << "config=" << "(" << configuration.first << "_" << configuration.second.as<std::string>() << ")"; std::string config_value = configuration.second.as<std::string>();
std::replace(config_value.begin(), config_value.end(), '-', '_');
result << "config=" << "(" << configuration.first << "_" << config_value << ")";
return result.str(); return result.str();
} }
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::pair<std::string, InferenceEngine::Parameter> entry; std::pair<std::string, InferenceEngine::Parameter> entry;
std::tie(targetDevice, entry) = this->GetParam(); std::tie(target_device, entry) = this->GetParam();
std::tie(key, value) = entry; std::tie(key, value) = entry;
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
} }
std::string key; std::string key;
@ -114,11 +121,12 @@ class CorrectConfigTests : public testing::WithParamInterface<CorrectConfigParam
public ConfigBase { public ConfigBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<CorrectConfigParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<CorrectConfigParams> obj) {
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(targetDevice, configuration) = obj.param; std::tie(target_device, configuration) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "target_device=" << target_device << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
using namespace CommonTestUtils; using namespace CommonTestUtils;
result << "config=" << (configuration); result << "config=" << (configuration);
@ -127,9 +135,10 @@ public:
} }
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::map<std::string, std::string> entry; std::map<std::string, std::string> entry;
std::tie(targetDevice, configuration) = this->GetParam(); std::tie(target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }
@ -138,6 +147,7 @@ public:
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
}; };
@ -152,7 +162,7 @@ public:
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tuple<std::string, std::string, InferenceEngine::Parameter> entry; std::tuple<std::string, std::string, InferenceEngine::Parameter> entry;
std::tie(targetDevice, entry) = this->GetParam(); std::tie(target_device, entry) = this->GetParam();
std::tie(key, value, reference) = entry; std::tie(key, value, reference) = entry;
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
@ -172,8 +182,9 @@ class BehaviorTestsSingleOption : public testing::WithParamInterface<BehaviorPar
public ConfigBase { public ConfigBase {
public: public:
void SetUp() override { void SetUp() override {
std::tie(target_device, key) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(targetDevice, key) = this->GetParam(); APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }
@ -191,12 +202,13 @@ class SetPropLoadNetWorkGetPropTests : public testing::WithParamInterface<LoadNe
public ConfigBase { public ConfigBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<LoadNetWorkPropertiesParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<LoadNetWorkPropertiesParams> obj) {
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::map<std::string, std::string> loadNetWorkConfig; std::map<std::string, std::string> loadNetWorkConfig;
std::tie(targetDevice, configuration, loadNetWorkConfig) = obj.param; std::tie(target_device, configuration, loadNetWorkConfig) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice << "_"; result << "target_device=" << target_device << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
result << "configItem="; result << "configItem=";
for (auto& configItem : configuration) { for (auto& configItem : configuration) {
@ -215,9 +227,10 @@ public:
} }
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::map<std::string, std::string> entry; std::map<std::string, std::string> entry;
std::tie(targetDevice, configuration, loadNetWorkConfig) = this->GetParam(); std::tie(target_device, configuration, loadNetWorkConfig) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
function = ngraph::builder::subgraph::makeConvPoolRelu(); function = ngraph::builder::subgraph::makeConvPoolRelu();
cnnNet = InferenceEngine::CNNNetwork(function); cnnNet = InferenceEngine::CNNNetwork(function);
} }

View File

@ -25,19 +25,22 @@ namespace BehaviorTestsDefinitions {
#define ASSERT_METRIC_SUPPORTED_IE(metricName) \ #define ASSERT_METRIC_SUPPORTED_IE(metricName) \
{ \ { \
std::vector<std::string> metrics = \ std::vector<std::string> metrics = \
ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)); \ ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)); \
auto it = std::find(metrics.begin(), metrics.end(), metricName); \ auto it = std::find(metrics.begin(), metrics.end(), metricName); \
ASSERT_NE(metrics.end(), it); \ ASSERT_NE(metrics.end(), it); \
} }
class IEClassBasicTestP : public ::testing::Test, public ::testing::WithParamInterface<std::pair<std::string, std::string> > { class IEClassBasicTestP : public BehaviorTestsUtils::IEPluginTestBase,
public ::testing::WithParamInterface<std::pair<std::string, std::string> > {
protected: protected:
std::string deviceName; std::string deviceName;
std::string pluginName; std::string pluginName;
public: public:
void SetUp() override { void SetUp() override {
std::tie(pluginName, target_device) = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(pluginName, deviceName) = GetParam(); ov::test::behavior::APIBaseTest::SetUp();
pluginName += IE_BUILD_POSTFIX; pluginName += IE_BUILD_POSTFIX;
if (pluginName == (std::string("openvino_template_plugin") + IE_BUILD_POSTFIX)) { if (pluginName == (std::string("openvino_template_plugin") + IE_BUILD_POSTFIX)) {
pluginName = ov::util::make_plugin_library_name(CommonTestUtils::getExecutableDirectory(), pluginName); pluginName = ov::util::make_plugin_library_name(CommonTestUtils::getExecutableDirectory(), pluginName);
@ -45,14 +48,14 @@ public:
} }
}; };
class IEClassSetDefaultDeviceIDTest : public ::testing::Test, class IEClassSetDefaultDeviceIDTest : public BehaviorTestsUtils::IEPluginTestBase,
public ::testing::WithParamInterface<std::pair<std::string, std::string>> { public ::testing::WithParamInterface<std::pair<std::string, std::string>> {
protected: protected:
std::string deviceName;
std::string deviceID; std::string deviceID;
public: public:
void SetUp() override { void SetUp() override {
std::tie(deviceName, deviceID) = GetParam(); std::tie(target_device, deviceID) = GetParam();
} }
}; };
@ -78,31 +81,35 @@ using IEClassGetMetricTest_RANGE_FOR_STREAMS = BehaviorTestsUtils::IEClassBaseTe
using IEClassSetGlobalConfigTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassSetGlobalConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
using IEClassSpecificDeviceTestSetConfig = BehaviorTestsUtils::IEClassBaseTestP; using IEClassSpecificDeviceTestSetConfig = BehaviorTestsUtils::IEClassBaseTestP;
using IEClassSpecificDeviceTestGetConfig = BehaviorTestsUtils::IEClassBaseTestP; using IEClassSpecificDeviceTestGetConfig = BehaviorTestsUtils::IEClassBaseTestP;
using IEClassLoadNetworkAfterCoreRecreateTest = BehaviorTestsUtils::IEClassBaseTestP; using IEClassLoadNetworkAfterCoreRecreateTest = BehaviorTestsUtils::IEClassBaseTestP;
class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEClassNetworkTest, class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEPluginTestBase,
public BehaviorTestsUtils::IEClassNetworkTest,
public ::testing::WithParamInterface<std::vector<std::string>> { public ::testing::WithParamInterface<std::vector<std::string>> {
public: public:
std::vector<std::string> deviceNames; std::vector<std::string> target_devices;
void SetUp() override { void SetUp() override {
target_device = CommonTestUtils::DEVICE_MULTI;
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::test::behavior::APIBaseTest::SetUp();
IEClassNetworkTest::SetUp(); IEClassNetworkTest::SetUp();
deviceNames = GetParam(); target_devices = GetParam();
} }
}; };
using IEClassSeveralDevicesTestLoadNetwork = IEClassSeveralDevicesTest; using IEClassSeveralDevicesTestLoadNetwork = IEClassSeveralDevicesTest;
using IEClassSeveralDevicesTestQueryNetwork = IEClassSeveralDevicesTest; using IEClassSeveralDevicesTestQueryNetwork = IEClassSeveralDevicesTest;
using IEClassSeveralDevicesTestDefaultCore = IEClassSeveralDevicesTest; using IEClassSeveralDevicesTestDefaultCore = IEClassSeveralDevicesTest;
bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &deviceName) { bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &target_device) {
auto supportedMetricKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>(); auto supportedMetricKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
return supportedMetricKeys.end() != std::find(std::begin(supportedMetricKeys), return supportedMetricKeys.end() != std::find(std::begin(supportedMetricKeys),
std::end(supportedMetricKeys), std::end(supportedMetricKeys),
METRIC_KEY(AVAILABLE_DEVICES)); METRIC_KEY(AVAILABLE_DEVICES));
} }
bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &deviceName) { bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &target_device) {
auto supportedConfigKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>(); auto supportedConfigKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys), return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys),
std::end(supportedConfigKeys), std::end(supportedConfigKeys),
CONFIG_KEY(DEVICE_ID)); CONFIG_KEY(DEVICE_ID));
@ -117,7 +124,7 @@ TEST(IEClassBasicTest, smoke_createDefault) {
TEST_P(IEClassBasicTestP, registerExistingPluginThrows) { TEST_P(IEClassBasicTestP, registerExistingPluginThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_THROW(ie.RegisterPlugin(pluginName, deviceName), InferenceEngine::Exception); ASSERT_THROW(ie.RegisterPlugin(pluginName, target_device), InferenceEngine::Exception);
} }
TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) { TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) {
@ -159,7 +166,6 @@ TEST(IEClassBasicTest, smoke_createMockEngineConfigThrows) {
ASSERT_THROW(InferenceEngine::Core ie(filename), InferenceEngine::Exception); ASSERT_THROW(InferenceEngine::Core ie(filename), InferenceEngine::Exception);
CommonTestUtils::removeFile(filename.c_str()); CommonTestUtils::removeFile(filename.c_str());
} }
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
@ -184,7 +190,7 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
ASSERT_NO_THROW(ie.RegisterPlugins(ov::util::wstring_to_string(pluginsXmlW))); ASSERT_NO_THROW(ie.RegisterPlugins(ov::util::wstring_to_string(pluginsXmlW)));
CommonTestUtils::removeFile(pluginsXmlW); CommonTestUtils::removeFile(pluginsXmlW);
ASSERT_NO_THROW(ie.GetVersions("mock")); // from pluginXM ASSERT_NO_THROW(ie.GetVersions("mock")); // from pluginXM
ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.GetVersions(target_device));
GTEST_COUT << "Plugin created " << testIndex << std::endl; GTEST_COUT << "Plugin created " << testIndex << std::endl;
ASSERT_NO_THROW(ie.RegisterPlugin(pluginName, "TEST_DEVICE")); ASSERT_NO_THROW(ie.RegisterPlugin(pluginName, "TEST_DEVICE"));
@ -211,17 +217,17 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
TEST_P(IEClassBasicTestP, getVersionsByExactDeviceNoThrow) { TEST_P(IEClassBasicTestP, getVersionsByExactDeviceNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.GetVersions(deviceName + ".0")); ASSERT_NO_THROW(ie.GetVersions(target_device + ".0"));
} }
TEST_P(IEClassBasicTestP, getVersionsByDeviceClassNoThrow) { TEST_P(IEClassBasicTestP, getVersionsByDeviceClassNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.GetVersions(target_device));
} }
TEST_P(IEClassBasicTestP, getVersionsNonEmpty) { TEST_P(IEClassBasicTestP, getVersionsNonEmpty) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName).size()); ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device).size());
} }
// //
@ -231,22 +237,22 @@ TEST_P(IEClassBasicTestP, getVersionsNonEmpty) {
TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) { TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
// device instance is not created yet // device instance is not created yet
ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception); ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception);
// make the first call to IE which created device instance // make the first call to IE which created device instance
ie.GetVersions(deviceName); ie.GetVersions(target_device);
// now, we can unregister device // now, we can unregister device
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
} }
TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) { TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception); ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception);
ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.GetVersions(target_device));
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
ASSERT_NO_THROW(ie.SetConfig({}, deviceName)); ASSERT_NO_THROW(ie.SetConfig({}, target_device));
ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.GetVersions(target_device));
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
} }
TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) { TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) {
@ -261,7 +267,7 @@ TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) {
TEST_P(IEClassBasicTestP, SetConfigAllThrows) { TEST_P(IEClassBasicTestP, SetConfigAllThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.SetConfig({{"unsupported_key", "4"}})); ASSERT_NO_THROW(ie.SetConfig({{"unsupported_key", "4"}}));
ASSERT_ANY_THROW(ie.GetVersions(deviceName)); ASSERT_ANY_THROW(ie.GetVersions(target_device));
} }
TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) {
@ -272,13 +278,13 @@ TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) {
TEST_P(IEClassBasicTestP, SetConfigNoThrow) { TEST_P(IEClassBasicTestP, SetConfigNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}},
deviceName)); target_device));
} }
TEST_P(IEClassBasicTestP, SetConfigAllNoThrow) { TEST_P(IEClassBasicTestP, SetConfigAllNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}})); ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}));
ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.GetVersions(target_device));
} }
TEST(IEClassBasicTest, smoke_SetConfigHeteroThrows) { TEST(IEClassBasicTest, smoke_SetConfigHeteroThrows) {
@ -291,17 +297,17 @@ TEST_P(IEClassBasicTestP, SetGetConfigForTbbTerminateThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
bool value = false; bool value = false;
ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(YES)}})); ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(YES)}}));
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>()); ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
ASSERT_TRUE(value); ASSERT_TRUE(value);
ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(NO)}})); ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(NO)}}));
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>()); ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
ASSERT_FALSE(value); ASSERT_FALSE(value);
} }
TEST_P(IEClassBasicTestP, SetConfigHeteroTargetFallbackThrows) { TEST_P(IEClassBasicTestP, SetConfigHeteroTargetFallbackThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", deviceName}}, CommonTestUtils::DEVICE_HETERO)); ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", target_device}}, CommonTestUtils::DEVICE_HETERO));
} }
TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) { TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) {
@ -322,23 +328,23 @@ TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) {
TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) { TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::string deviceID, clearDeviceName; std::string deviceID, cleartarget_device;
auto pos = deviceName.find('.'); auto pos = target_device.find('.');
if (pos != std::string::npos) { if (pos != std::string::npos) {
clearDeviceName = deviceName.substr(0, pos); cleartarget_device = target_device.substr(0, pos);
deviceID = deviceName.substr(pos + 1, deviceName.size()); deviceID = target_device.substr(pos + 1, target_device.size());
} }
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
GTEST_SKIP(); GTEST_SKIP();
} }
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, deviceName)); ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, target_device));
std::string value; std::string value;
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>()); ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES); ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
} }
@ -349,8 +355,8 @@ TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
TEST_P(IEClassBasicTestP, ImportNetworkThrows) { TEST_P(IEClassBasicTestP, ImportNetworkThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (deviceName == CommonTestUtils::DEVICE_GPU) { if (target_device == CommonTestUtils::DEVICE_GPU) {
ASSERT_THROW(ie.ImportNetwork("model", deviceName), InferenceEngine::NetworkNotRead); ASSERT_THROW(ie.ImportNetwork("model", target_device), InferenceEngine::NetworkNotRead);
const std::string modelName = "compiled_blob.blob"; const std::string modelName = "compiled_blob.blob";
{ {
@ -358,7 +364,7 @@ TEST_P(IEClassBasicTestP, ImportNetworkThrows) {
file << "content"; file << "content";
} }
EXPECT_THROW(ie.ImportNetwork(modelName, deviceName), InferenceEngine::NotImplemented); EXPECT_THROW(ie.ImportNetwork(modelName, target_device), InferenceEngine::NotImplemented);
ASSERT_EQ(0, std::remove(modelName.c_str())); ASSERT_EQ(0, std::remove(modelName.c_str()));
} }
} }
@ -387,14 +393,14 @@ TEST_P(IEClassBasicTestP, ImportNetworkWithNullContextThrows) {
TEST_P(IEClassNetworkTestP, QueryNetworkActualThrows) { TEST_P(IEClassNetworkTestP, QueryNetworkActualThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device));
} }
TEST_P(IEClassNetworkTestP, QueryNetworkActualNoThrow) { TEST_P(IEClassNetworkTestP, QueryNetworkActualNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
try { try {
ie.QueryNetwork(actualCnnNetwork, deviceName); ie.QueryNetwork(actualCnnNetwork, target_device);
} catch (const InferenceEngine::Exception& ex) { } catch (const InferenceEngine::Exception& ex) {
std::string message = ex.what(); std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
@ -405,12 +411,12 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
try { try {
auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName); auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device);
auto rl_map = rres.supportedLayersMap; auto rl_map = rres.supportedLayersMap;
auto func = ksoCnnNetwork.getFunction(); auto func = ksoCnnNetwork.getFunction();
for (const auto & op : func->get_ops()) { for (const auto & op : func->get_ops()) {
if (!rl_map.count(op->get_friendly_name())) { if (!rl_map.count(op->get_friendly_name())) {
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
} }
} }
} catch (const InferenceEngine::Exception& ex) { } catch (const InferenceEngine::Exception& ex) {
@ -422,26 +428,26 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) {
TEST_P(IEClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) { TEST_P(IEClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::string clearDeviceName; std::string cleartarget_device;
auto pos = deviceNames.begin()->find('.'); auto pos = target_devices.begin()->find('.');
if (pos != std::string::npos) { if (pos != std::string::npos) {
clearDeviceName = deviceNames.begin()->substr(0, pos); cleartarget_device = target_devices.begin()->substr(0, pos);
} }
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
if (deviceIDs.size() < deviceNames.size()) if (deviceIDs.size() < target_devices.size())
GTEST_SKIP(); GTEST_SKIP();
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":");
for (auto& dev_name : deviceNames) { for (auto& dev_name : target_devices) {
multiDeviceName += dev_name; multitarget_device += dev_name;
if (&dev_name != &(deviceNames.back())) { if (&dev_name != &(target_devices.back())) {
multiDeviceName += ","; multitarget_device += ",";
} }
} }
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multiDeviceName)); ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multitarget_device));
} }
TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) { TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) {
@ -477,18 +483,18 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) {
} }
InferenceEngine::CNNNetwork net(func); InferenceEngine::CNNNetwork net(func);
auto rres = ie.QueryNetwork(net, deviceName); auto rres = ie.QueryNetwork(net, target_device);
auto rl_map = rres.supportedLayersMap; auto rl_map = rres.supportedLayersMap;
for (const auto & op : func->get_ops()) { for (const auto & op : func->get_ops()) {
if (!rl_map.count(op->get_friendly_name())) { if (!rl_map.count(op->get_friendly_name())) {
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
} }
} }
for (const auto & op : net.getFunction()->get_ops()) { for (const auto & op : net.getFunction()->get_ops()) {
std::string affinity = rl_map[op->get_friendly_name()]; std::string affinity = rl_map[op->get_friendly_name()];
op->get_rt_info()["affinity"] = affinity; op->get_rt_info()["affinity"] = affinity;
} }
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device);
} catch (const InferenceEngine::NotImplemented & ex) { } catch (const InferenceEngine::NotImplemented & ex) {
std::string message = ex.what(); std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
@ -499,19 +505,19 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
try { try {
auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName); auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device);
auto rl_map = rres.supportedLayersMap; auto rl_map = rres.supportedLayersMap;
auto func = ksoCnnNetwork.getFunction(); auto func = ksoCnnNetwork.getFunction();
for (const auto & op : func->get_ops()) { for (const auto & op : func->get_ops()) {
if (!rl_map.count(op->get_friendly_name())) { if (!rl_map.count(op->get_friendly_name())) {
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
} }
} }
for (const auto & op : ksoCnnNetwork.getFunction()->get_ops()) { for (const auto & op : ksoCnnNetwork.getFunction()->get_ops()) {
std::string affinity = rl_map[op->get_friendly_name()]; std::string affinity = rl_map[op->get_friendly_name()];
op->get_rt_info()["affinity"] = affinity; op->get_rt_info()["affinity"] = affinity;
} }
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName); InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device);
} catch (const InferenceEngine::Exception& ex) { } catch (const InferenceEngine::Exception& ex) {
std::string message = ex.what(); std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
@ -521,7 +527,7 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) {
TEST_P(IEClassNetworkTestP, QueryNetworkHeteroActualNoThrow) { TEST_P(IEClassNetworkTestP, QueryNetworkHeteroActualNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::QueryNetworkResult res; InferenceEngine::QueryNetworkResult res;
ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}})); ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}}));
ASSERT_LT(0, res.supportedLayersMap.size()); ASSERT_LT(0, res.supportedLayersMap.size());
} }
@ -533,9 +539,9 @@ TEST_P(IEClassNetworkTestP, QueryNetworkMultiThrows) {
TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
std::string deviceName = CommonTestUtils::DEVICE_HETERO; std::string target_device = CommonTestUtils::DEVICE_HETERO;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Supported HETERO metrics: " << std::endl; std::cout << "Supported HETERO metrics: " << std::endl;
@ -549,9 +555,9 @@ TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) {
TEST(IEClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroNoThrow) { TEST(IEClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
std::string deviceName = CommonTestUtils::DEVICE_HETERO; std::string target_device = CommonTestUtils::DEVICE_HETERO;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Supported HETERO config keys: " << std::endl; std::cout << "Supported HETERO config keys: " << std::endl;
@ -573,7 +579,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Supported metrics: " << std::endl; std::cout << "Supported metrics: " << std::endl;
@ -588,7 +594,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Supported config values: " << std::endl; std::cout << "Supported config values: " << std::endl;
@ -603,7 +609,7 @@ TEST_P(IEClassGetMetricTest_AVAILABLE_DEVICES, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Available devices: " << std::endl; std::cout << "Available devices: " << std::endl;
@ -618,7 +624,7 @@ TEST_P(IEClassGetMetricTest_FULL_DEVICE_NAME, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(FULL_DEVICE_NAME)));
std::string t = p; std::string t = p;
std::cout << "Full device name: " << std::endl << t << std::endl; std::cout << "Full device name: " << std::endl << t << std::endl;
@ -629,7 +635,7 @@ TEST_P(IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES, GetMetricAndPrintNoThrow)
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
std::vector<std::string> t = p; std::vector<std::string> t = p;
std::cout << "Optimization capabilities: " << std::endl; std::cout << "Optimization capabilities: " << std::endl;
@ -644,7 +650,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_GOPS, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_GOPS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_GOPS)));
std::map<InferenceEngine::Precision, float> t = p; std::map<InferenceEngine::Precision, float> t = p;
std::cout << "Device GOPS: " << std::endl; std::cout << "Device GOPS: " << std::endl;
@ -659,7 +665,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_TYPE, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_TYPE))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_TYPE)));
InferenceEngine::Metrics::DeviceType t = p; InferenceEngine::Metrics::DeviceType t = p;
std::cout << "Device Type: " << t << std::endl; std::cout << "Device Type: " << t << std::endl;
@ -671,7 +677,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_WAITING_INFER_REQUESTS, GetMetricAndPrintN
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS)));
unsigned int t = p; unsigned int t = p;
std::cout << "Number of waiting infer requests: " << std::endl << t << std::endl; std::cout << "Number of waiting infer requests: " << std::endl << t << std::endl;
@ -683,7 +689,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_EXEC_INFER_REQUESTS, GetMetricAndPrintNoTh
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS)));
unsigned int t = p; unsigned int t = p;
std::cout << "Number of executing infer requests: " << std::endl << t << std::endl; std::cout << "Number of executing infer requests: " << std::endl << t << std::endl;
@ -695,7 +701,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS, GetMetricAndPrintNoT
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS)));
std::tuple<unsigned int, unsigned int, unsigned int> t = p; std::tuple<unsigned int, unsigned int, unsigned int> t = p;
unsigned int start = std::get<0>(t); unsigned int start = std::get<0>(t);
@ -717,7 +723,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_STREAMS, GetMetricAndPrintNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_STREAMS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_STREAMS)));
std::tuple<unsigned int, unsigned int> t = p; std::tuple<unsigned int, unsigned int> t = p;
unsigned int start = std::get<0>(t); unsigned int start = std::get<0>(t);
@ -736,19 +742,19 @@ TEST_P(IEClassGetMetricTest_ThrowUnsupported, GetMetricThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_THROW(p = ie.GetMetric(deviceName, "unsupported_metric"), InferenceEngine::Exception); ASSERT_THROW(p = ie.GetMetric(target_device, "unsupported_metric"), InferenceEngine::Exception);
} }
TEST_P(IEClassGetConfigTest, GetConfigNoThrow) { TEST_P(IEClassGetConfigTest, GetConfigNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> configValues = p; std::vector<std::string> configValues = p;
for (auto &&confKey : configValues) { for (auto &&confKey : configValues) {
InferenceEngine::Parameter defaultValue; InferenceEngine::Parameter defaultValue;
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
ASSERT_FALSE(defaultValue.empty()); ASSERT_FALSE(defaultValue.empty());
} }
} }
@ -757,11 +763,11 @@ TEST_P(IEClassGetConfigTest, GetConfigHeteroNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> configValues = p; std::vector<std::string> configValues = p;
for (auto &&confKey : configValues) { for (auto &&confKey : configValues) {
ASSERT_NO_THROW(ie.GetConfig(deviceName, confKey)); ASSERT_NO_THROW(ie.GetConfig(target_device, confKey));
} }
} }
@ -776,7 +782,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)), ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)),
InferenceEngine::Exception); InferenceEngine::Exception);
} }
@ -784,33 +790,33 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), InferenceEngine::Exception); ASSERT_THROW(p = ie.GetConfig(target_device, "unsupported_config"), InferenceEngine::Exception);
} }
TEST_P(IEClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) { TEST_P(IEClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
InferenceEngine::Parameter p; InferenceEngine::Parameter p;
std::string deviceID, clearDeviceName; std::string deviceID, cleartarget_device;
auto pos = deviceName.find('.'); auto pos = target_device.find('.');
if (pos != std::string::npos) { if (pos != std::string::npos) {
clearDeviceName = deviceName.substr(0, pos); cleartarget_device = target_device.substr(0, pos);
deviceID = deviceName.substr(pos + 1, deviceName.size()); deviceID = target_device.substr(pos + 1, target_device.size());
} }
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
GTEST_SKIP(); GTEST_SKIP();
} }
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
std::vector<std::string> configValues = p; std::vector<std::string> configValues = p;
for (auto &&confKey : configValues) { for (auto &&confKey : configValues) {
InferenceEngine::Parameter defaultValue; InferenceEngine::Parameter defaultValue;
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey)); ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
ASSERT_FALSE(defaultValue.empty()); ASSERT_FALSE(defaultValue.empty());
} }
} }
@ -824,7 +830,7 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
bool deviceFound = false; bool deviceFound = false;
std::cout << "Available devices: " << std::endl; std::cout << "Available devices: " << std::endl;
for (auto &&device : devices) { for (auto &&device : devices) {
if (device.find(deviceName) != std::string::npos) { if (device.find(target_device) != std::string::npos) {
deviceFound = true; deviceFound = true;
} }
@ -842,12 +848,12 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) { TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
if (deviceIDs.empty()) if (deviceIDs.empty())
GTEST_SKIP(); GTEST_SKIP();
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
{{"TARGET_FALLBACK", deviceName + "." + deviceIDs[0] + "," + deviceName}})); {{"TARGET_FALLBACK", target_device + "." + deviceIDs[0] + "," + target_device}}));
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -856,9 +862,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
try { try {
ie.QueryNetwork(simpleCnnNetwork, deviceName + ".0"); ie.QueryNetwork(simpleCnnNetwork, target_device + ".0");
} catch (const InferenceEngine::Exception& ex) { } catch (const InferenceEngine::Exception& ex) {
std::string message = ex.what(); std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
@ -871,8 +877,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) {
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".110"), InferenceEngine::Exception); ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".110"), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -881,8 +887,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) {
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception); ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -891,9 +897,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) {
TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) { TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
{{"TARGET_FALLBACK", deviceName + ".100," + deviceName}}), InferenceEngine::Exception); {{"TARGET_FALLBACK", target_device + ".100," + target_device}}), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -915,22 +921,22 @@ TEST(IEClassBasicTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
TEST_P(IEClassNetworkTestP, LoadNetworkActualNoThrow) { TEST_P(IEClassNetworkTestP, LoadNetworkActualNoThrow) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName)); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, target_device));
} }
TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDeviceNoThrow) { TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDeviceNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName)); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device));
} }
TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) { TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}})); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}}));
} }
TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
auto net = ie.LoadNetwork(actualCnnNetwork, deviceName); auto net = ie.LoadNetwork(actualCnnNetwork, target_device);
auto exec_function = net.GetExecGraphInfo().getFunction(); auto exec_function = net.GetExecGraphInfo().getFunction();
ASSERT_NE(nullptr, exec_function); ASSERT_NE(nullptr, exec_function);
auto actual_parameters = exec_function->get_parameters(); auto actual_parameters = exec_function->get_parameters();
@ -960,32 +966,32 @@ TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
TEST_P(IEClassLoadNetworkTestWithThrow, LoadNetworkActualWithThrow) { TEST_P(IEClassLoadNetworkTestWithThrow, LoadNetworkActualWithThrow) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName), InferenceEngine::Exception); ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device), InferenceEngine::Exception);
} }
TEST_P(IEClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) { TEST_P(IEClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::string clearDeviceName; std::string cleartarget_device;
auto pos = deviceNames.begin()->find('.'); auto pos = target_devices.begin()->find('.');
if (pos != std::string::npos) { if (pos != std::string::npos) {
clearDeviceName = deviceNames.begin()->substr(0, pos); cleartarget_device = target_devices.begin()->substr(0, pos);
} }
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
if (deviceIDs.size() < deviceNames.size()) if (deviceIDs.size() < target_devices.size())
GTEST_SKIP(); GTEST_SKIP();
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":"); std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":");
for (auto& dev_name : deviceNames) { for (auto& dev_name : target_devices) {
multiDeviceName += dev_name; multitarget_device += dev_name;
if (&dev_name != &(deviceNames.back())) { if (&dev_name != &(target_devices.back())) {
multiDeviceName += ","; multitarget_device += ",";
} }
} }
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multiDeviceName)); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multitarget_device));
} }
using IEClassLoadNetworkTest = IEClassQueryNetworkTest; using IEClassLoadNetworkTest = IEClassQueryNetworkTest;
@ -995,11 +1001,11 @@ using IEClassLoadNetworkTest = IEClassQueryNetworkTest;
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
if (deviceIDs.empty()) if (deviceIDs.empty())
GTEST_SKIP(); GTEST_SKIP();
std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + "." + deviceIDs[0] + "," + deviceName; std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device + "." + deviceIDs[0] + "," + target_device;
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, heteroDevice)); ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, heteroDevice));
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
@ -1009,11 +1015,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
if (deviceIDs.empty()) if (deviceIDs.empty())
GTEST_SKIP(); GTEST_SKIP();
ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, deviceName + "." + deviceIDs[0])); ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, target_device + "." + deviceIDs[0]));
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -1022,8 +1028,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".10"), InferenceEngine::Exception); ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".10"), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -1032,8 +1038,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception); ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -1042,9 +1048,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, "HETERO", ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, "HETERO",
{{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception); {{"TARGET_FALLBACK", target_device + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -1053,9 +1059,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) { if (supportsDeviceID(ie, target_device)) {
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
{{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU}, {{"TARGET_FALLBACK", target_device + "," + CommonTestUtils::DEVICE_CPU},
{CONFIG_KEY(DEVICE_ID), "110"}}), InferenceEngine::Exception); {CONFIG_KEY(DEVICE_ID), "110"}}), InferenceEngine::Exception);
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
@ -1068,16 +1074,16 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
std::string devices; std::string devices;
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
for (auto &&device : availableDevices) { for (auto &&device : availableDevices) {
devices += deviceName + '.' + device; devices += target_device + '.' + device;
if (&device != &(availableDevices.back())) { if (&device != &(availableDevices.back())) {
devices += ','; devices += ',';
} }
} }
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName); std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device);
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, { ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", targetFallback}})); {"TARGET_FALLBACK", targetFallback}}));
@ -1089,9 +1095,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) {
TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) { TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
std::string devices; std::string devices;
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
for (auto &&device : availableDevices) { for (auto &&device : availableDevices) {
devices += CommonTestUtils::DEVICE_HETERO + std::string(".") + device; devices += CommonTestUtils::DEVICE_HETERO + std::string(".") + device;
if (&device != &(availableDevices.back())) { if (&device != &(availableDevices.back())) {
@ -1100,7 +1106,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
} }
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_MULTI, { ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_MULTI, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", deviceName + "," + deviceName}})); {"TARGET_FALLBACK", target_device + "," + target_device}}));
} else { } else {
GTEST_SKIP(); GTEST_SKIP();
} }
@ -1113,11 +1119,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) { TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
std::string devices; std::string devices;
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
for (auto &&device : availableDevices) { for (auto &&device : availableDevices) {
devices += deviceName + '.' + device; devices += target_device + '.' + device;
if (&device != &(availableDevices.back())) { if (&device != &(availableDevices.back())) {
devices += ','; devices += ',';
} }
@ -1129,7 +1135,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
expectedLayers.emplace(node->get_friendly_name()); expectedLayers.emplace(node->get_friendly_name());
} }
InferenceEngine::QueryNetworkResult result; InferenceEngine::QueryNetworkResult result;
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName); std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device);
ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_HETERO, { ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_HETERO, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", targetFallback}})); {"TARGET_FALLBACK", targetFallback}}));
@ -1147,9 +1153,9 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) { TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) { if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
std::string devices; std::string devices;
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>(); auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
for (auto &&device : availableDevices) { for (auto &&device : availableDevices) {
devices += "HETERO." + device; devices += "HETERO." + device;
if (&device != &(availableDevices.back())) { if (&device != &(availableDevices.back())) {
@ -1165,7 +1171,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
InferenceEngine::QueryNetworkResult result; InferenceEngine::QueryNetworkResult result;
ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_MULTI, { ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_MULTI, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices}, {MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", deviceName + "," + deviceName}})); {"TARGET_FALLBACK", target_device + "," + target_device}}));
std::unordered_set<std::string> actualLayers; std::unordered_set<std::string> actualLayers;
for (auto &&layer : result.supportedLayersMap) { for (auto &&layer : result.supportedLayersMap) {
@ -1180,50 +1186,50 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
TEST_P(IEClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins) { TEST_P(IEClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
{ {
auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + deviceName + "," + CommonTestUtils::DEVICE_CPU); auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + target_device + "," + CommonTestUtils::DEVICE_CPU);
ASSERT_EQ(3, versions.size()); ASSERT_EQ(3, versions.size());
} }
std::map<std::string, std::string> config; std::map<std::string, std::string> config;
if (deviceName == CommonTestUtils::DEVICE_CPU) { if (target_device == CommonTestUtils::DEVICE_CPU) {
config.insert({"CPU_THREADS_NUM", "3"}); config.insert({"CPU_THREADS_NUM", "3"});
} }
ASSERT_NO_THROW({ ASSERT_NO_THROW({
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::string name = actualCnnNetwork.getInputsInfo().begin()->first; std::string name = actualCnnNetwork.getInputsInfo().begin()->first;
actualCnnNetwork.getInputsInfo().at(name)->setPrecision(InferenceEngine::Precision::U8); actualCnnNetwork.getInputsInfo().at(name)->setPrecision(InferenceEngine::Precision::U8);
auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName, config); auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device, config);
}); });
}; };
TEST_P(IEClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) { TEST_P(IEClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES));
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) { if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::string value; std::string value;
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID }, ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID },
{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, { InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }},
deviceName)); target_device));
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>()); ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES); ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
} }
TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) { TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate(); InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES));
InferenceEngine::Parameter ref, src; InferenceEngine::Parameter ref, src;
for (auto& dev_id : deviceIDs) { for (auto& dev_id : deviceIDs) {
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }}, ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }},
deviceName + "." + dev_id)); target_device + "." + dev_id));
} }
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, deviceName)); ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, target_device));
ASSERT_NO_THROW(ref = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); ASSERT_NO_THROW(ref = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
for (auto& dev_id : deviceIDs) { for (auto& dev_id : deviceIDs) {
ASSERT_NO_THROW(src = ie.GetConfig(deviceName + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT)); ASSERT_NO_THROW(src = ie.GetConfig(target_device + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
ASSERT_EQ(src, ref); ASSERT_EQ(src, ref);
} }
} }
@ -1231,24 +1237,24 @@ TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
TEST_P(IEClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) { TEST_P(IEClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
std::string clearDeviceName; std::string cleartarget_device;
auto pos = deviceNames.begin()->find('.'); auto pos = target_devices.begin()->find('.');
if (pos != std::string::npos) { if (pos != std::string::npos) {
clearDeviceName = deviceNames.begin()->substr(0, pos); cleartarget_device = target_devices.begin()->substr(0, pos);
} }
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) { if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES)); std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
if (deviceIDs.size() < deviceNames.size()) if (deviceIDs.size() < target_devices.size())
GTEST_SKIP(); GTEST_SKIP();
for (size_t i = 0; i < deviceNames.size(); ++i) { for (size_t i = 0; i < target_devices.size(); ++i) {
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, deviceNames[i])); ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, target_devices[i]));
} }
std::string res; std::string res;
for (size_t i = 0; i < deviceNames.size(); ++i) { for (size_t i = 0; i < target_devices.size(); ++i) {
ASSERT_NO_THROW(res = ie.GetConfig(deviceNames[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>()); ASSERT_NO_THROW(res = ie.GetConfig(target_devices[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>());
ASSERT_EQ(res, std::to_string(i + 2)); ASSERT_EQ(res, std::to_string(i + 2));
} }
} }

View File

@ -15,6 +15,7 @@
#include <common_test_utils/file_utils.hpp> #include <common_test_utils/file_utils.hpp>
#include <common_test_utils/test_assertions.hpp> #include <common_test_utils/test_assertions.hpp>
#include <common_test_utils/test_constants.hpp> #include <common_test_utils/test_constants.hpp>
#include "base/behavior_test_utils.hpp"
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include <thread> #include <thread>
@ -23,6 +24,7 @@
#include <chrono> #include <chrono>
#include <fstream> #include <fstream>
#include <functional_test_utils/skip_tests_config.hpp> #include <functional_test_utils/skip_tests_config.hpp>
#include "base/ov_behavior_test_utils.hpp"
using Device = std::string; using Device = std::string;
using Config = std::map<std::string, std::string>; using Config = std::map<std::string, std::string>;
@ -49,7 +51,7 @@ public:
} }
} }
void safePluginUnregister(InferenceEngine::Core & ie) { void safePluginUnregister(InferenceEngine::Core & ie, const std::string& deviceName) {
try { try {
ie.UnregisterPlugin(deviceName); ie.UnregisterPlugin(deviceName);
} catch (const InferenceEngine::Exception & ex) { } catch (const InferenceEngine::Exception & ex) {
@ -69,7 +71,6 @@ public:
} }
} }
Device deviceName;
Config config; Config config;
}; };
@ -77,24 +78,27 @@ public:
// Common threading plugin tests // Common threading plugin tests
// //
class CoreThreadingTests : public CoreThreadingTestsBase, class CoreThreadingTests : public testing::WithParamInterface<Params>,
public ::testing::TestWithParam<Params> { public BehaviorTestsUtils::IEPluginTestBase,
public CoreThreadingTestsBase {
public: public:
void SetUp() override { void SetUp() override {
std::tie(target_device, config) = GetParam();
APIBaseTest::SetUp();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(deviceName, config) = GetParam();
} }
static std::string getTestCaseName(testing::TestParamInfo<Params> obj) { static std::string getTestCaseName(testing::TestParamInfo<Params> obj) {
std::string deviceName; std::string deviceName;
Config config; Config config;
std::tie(deviceName, config) = obj.param; std::tie(deviceName, config) = obj.param;
std::replace(deviceName.begin(), deviceName.end(), ':', '.');
char separator('_'); char separator('_');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << deviceName << separator; result << "targetDevice=" << deviceName << separator;
result << "config="; result << "config=";
for (auto& confItem : config) { for (auto& confItem : config) {
result << confItem.first << ":" << confItem.second << separator; result << confItem.first << "=" << confItem.second << separator;
} }
return result.str(); return result.str();
} }
@ -104,9 +108,9 @@ public:
TEST_P(CoreThreadingTests, smoke_GetVersions) { TEST_P(CoreThreadingTests, smoke_GetVersions) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
runParallel([&] () { runParallel([&] () {
auto versions = ie.GetVersions(deviceName); auto versions = ie.GetVersions(target_device);
ASSERT_LE(1u, versions.size()); ASSERT_LE(1u, versions.size());
safePluginUnregister(ie); safePluginUnregister(ie, target_device);
}); });
} }
@ -115,7 +119,7 @@ TEST_P(CoreThreadingTests, smoke_SetConfigPluginExists) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
ie.SetConfig(config); ie.SetConfig(config);
auto versions = ie.GetVersions(deviceName); auto versions = ie.GetVersions(target_device);
runParallel([&] () { runParallel([&] () {
ie.SetConfig(config); ie.SetConfig(config);
@ -129,8 +133,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) {
ie.SetConfig(config); ie.SetConfig(config);
runParallel([&] () { runParallel([&] () {
ie.GetConfig(deviceName, configKey); ie.GetConfig(target_device, configKey);
safePluginUnregister(ie); safePluginUnregister(ie, target_device);
}); });
} }
@ -138,8 +142,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) {
TEST_P(CoreThreadingTests, smoke_GetMetric) { TEST_P(CoreThreadingTests, smoke_GetMetric) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
runParallel([&] () { runParallel([&] () {
ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)); ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
safePluginUnregister(ie); safePluginUnregister(ie, target_device);
}); });
} }
@ -148,12 +152,12 @@ TEST_P(CoreThreadingTests, smoke_QueryNetwork) {
InferenceEngine::Core ie; InferenceEngine::Core ie;
InferenceEngine::CNNNetwork network(ngraph::builder::subgraph::make2InputSubtract()); InferenceEngine::CNNNetwork network(ngraph::builder::subgraph::make2InputSubtract());
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, deviceName); InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, target_device);
runParallel([&] () { runParallel([&] () {
const auto result = ie.QueryNetwork(network, deviceName); const auto result = ie.QueryNetwork(network, target_device);
safePluginUnregister(ie); safePluginUnregister(ie, target_device);
// compare QueryNetworkResult with reference // compare QueryNetworkResult with reference
for (auto && r : refResult.supportedLayersMap) { for (auto && r : refResult.supportedLayersMap) {
@ -179,12 +183,13 @@ enum struct ModelClass : unsigned {
using CoreThreadingParams = std::tuple<Params, Threads, Iterations, ModelClass>; using CoreThreadingParams = std::tuple<Params, Threads, Iterations, ModelClass>;
class CoreThreadingTestsWithIterations : public ::testing::TestWithParam<CoreThreadingParams>, class CoreThreadingTestsWithIterations : public testing::WithParamInterface<CoreThreadingParams>,
public CoreThreadingTestsBase { public BehaviorTestsUtils::IEPluginTestBase,
public CoreThreadingTestsBase {
public: public:
void SetUp() override { void SetUp() override {
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(deviceName, config) = std::get<0>(GetParam()); std::tie(target_device, config) = std::get<0>(GetParam());
numThreads = std::get<1>(GetParam()); numThreads = std::get<1>(GetParam());
numIterations = std::get<2>(GetParam()); numIterations = std::get<2>(GetParam());
modelClass = std::get<3>(GetParam()); modelClass = std::get<3>(GetParam());
@ -195,6 +200,7 @@ public:
std::string deviceName; std::string deviceName;
Config config; Config config;
std::tie(deviceName, config) = std::get<0>(obj.param); std::tie(deviceName, config) = std::get<0>(obj.param);
std::replace(deviceName.begin(), deviceName.end(), ':', '.');
numThreads = std::get<1>(obj.param); numThreads = std::get<1>(obj.param);
numIterations = std::get<2>(obj.param); numIterations = std::get<2>(obj.param);
char separator('_'); char separator('_');
@ -202,13 +208,15 @@ public:
result << "targetDevice=" << deviceName << separator; result << "targetDevice=" << deviceName << separator;
result << "config="; result << "config=";
for (auto& confItem : config) { for (auto& confItem : config) {
result << confItem.first << ":" << confItem.second << separator; result << confItem.first << "=" << confItem.second << separator;
} }
result << "numThreads=" << numThreads << separator; result << "numThreads=" << numThreads << separator;
result << "numIter=" << numIterations; result << "numIter=" << numIterations;
return result.str(); return result.str();
} }
protected:
ModelClass modelClass; ModelClass modelClass;
unsigned int numIterations; unsigned int numIterations;
unsigned int numThreads; unsigned int numThreads;
@ -236,10 +244,10 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork) {
SetupNetworks(); SetupNetworks();
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName); (void)ie.LoadNetwork(networks[value % networks.size()], target_device);
}, numIterations, numThreads); }, numIterations, numThreads);
} }
@ -250,7 +258,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore)
SetupNetworks(); SetupNetworks();
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
@ -264,7 +272,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore)
} }
auto getOutputBlob = [&](InferenceEngine::Core & core) { auto getOutputBlob = [&](InferenceEngine::Core & core) {
auto exec = core.LoadNetwork(network, deviceName); auto exec = core.LoadNetwork(network, target_device);
auto req = exec.CreateInferRequest(); auto req = exec.CreateInferRequest();
req.SetInput(blobs); req.SetInput(blobs);
@ -293,7 +301,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
SetupNetworks(); SetupNetworks();
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
auto network = networks[value % networks.size()]; auto network = networks[value % networks.size()];
@ -306,7 +314,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
} }
auto getOutputBlob = [&](InferenceEngine::Core & core) { auto getOutputBlob = [&](InferenceEngine::Core & core) {
auto exec = core.LoadNetwork(network, deviceName); auto exec = core.LoadNetwork(network, target_device);
auto req = exec.CreateInferRequest(); auto req = exec.CreateInferRequest();
req.SetInput(blobs); req.SetInput(blobs);
@ -325,7 +333,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
// compare actual value using the second Core // compare actual value using the second Core
{ {
InferenceEngine::Core ie2; InferenceEngine::Core ie2;
ie2.SetConfig(config, deviceName); ie2.SetConfig(config, target_device);
auto outputRef = getOutputBlob(ie2); auto outputRef = getOutputBlob(ie2);
FuncTestUtils::compareBlobs(outputActual, outputRef); FuncTestUtils::compareBlobs(outputActual, outputRef);
@ -342,8 +350,8 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_SingleIECore) {
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName); (void)ie.LoadNetwork(networks[value % networks.size()], target_device);
}, numIterations, numThreads); }, numIterations, numThreads);
} }
@ -356,7 +364,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_MultipleIECores) {
runParallel([&] () { runParallel([&] () {
auto value = counter++; auto value = counter++;
InferenceEngine::Core ie; InferenceEngine::Core ie;
ie.SetConfig(config, deviceName); ie.SetConfig(config, target_device);
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName); (void)ie.LoadNetwork(networks[value % networks.size()], target_device);
}, numIterations, numThreads); }, numIterations, numThreads);
} }

View File

@ -15,10 +15,10 @@
#include <common_test_utils/test_constants.hpp> #include <common_test_utils/test_constants.hpp>
#include <cpp/ie_cnn_network.h> #include <cpp/ie_cnn_network.h>
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "common_test_utils/test_common.hpp"
#include "common_test_utils/crash_handler.hpp" #include "common_test_utils/crash_handler.hpp"
#include "functional_test_utils/skip_tests_config.hpp" #include "functional_test_utils/skip_tests_config.hpp"
#include "functional_test_utils/precision_utils.hpp" #include "functional_test_utils/precision_utils.hpp"
#include "base/behavior_test_utils.hpp"
#include <ie_core.hpp> #include <ie_core.hpp>
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
@ -27,29 +27,27 @@ typedef std::tuple<
std::vector<int>> // Order std::vector<int>> // Order
HoldersParams; HoldersParams;
class HoldersTest : public CommonTestUtils::TestsCommon, class HoldersTest : public BehaviorTestsUtils::IEPluginTestBase,
public ::testing::WithParamInterface<HoldersParams> { public ::testing::WithParamInterface<HoldersParams> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<HoldersParams> obj); static std::string getTestCaseName(testing::TestParamInfo<HoldersParams> obj);
void SetUp() override; void SetUp() override;
protected:
std::vector<int> order; std::vector<int> order;
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
}; };
using HoldersTestImportNetwork = HoldersTest; using HoldersTestImportNetwork = HoldersTest;
class HoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon, class HoldersTestOnImportedNetwork : public BehaviorTestsUtils::IEPluginTestBase,
public ::testing::WithParamInterface<std::string> { public ::testing::WithParamInterface<std::string> {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj); static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override; void SetUp() override;
protected:
std::shared_ptr<ngraph::Function> function; std::shared_ptr<ngraph::Function> function;
std::string targetDevice;
}; };
} // namespace BehaviorTestsDefinitions } // namespace BehaviorTestsDefinitions

View File

@ -25,7 +25,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInputInfo) {
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
{ {
@ -44,7 +44,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInferRequest) {
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo(); InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo();
@ -96,7 +96,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessGetBlob) {
} }
preProcess.setVariant(InferenceEngine::MEAN_IMAGE); preProcess.setVariant(InferenceEngine::MEAN_IMAGE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
auto inBlob = req.GetBlob("param"); auto inBlob = req.GetBlob("param");
@ -163,7 +163,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessSetBlob) {
} }
preProcess.setVariant(InferenceEngine::MEAN_IMAGE); preProcess.setVariant(InferenceEngine::MEAN_IMAGE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
@ -225,7 +225,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessGetBlob) {
preProcess[2]->stdScale = 1; preProcess[2]->stdScale = 1;
preProcess.setVariant(InferenceEngine::MEAN_VALUE); preProcess.setVariant(InferenceEngine::MEAN_VALUE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
auto inBlob = req.GetBlob("param"); auto inBlob = req.GetBlob("param");
@ -285,7 +285,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessSetBlob) {
preProcess[2]->stdScale = 1; preProcess[2]->stdScale = 1;
preProcess.setVariant(InferenceEngine::MEAN_VALUE); preProcess.setVariant(InferenceEngine::MEAN_VALUE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
@ -340,7 +340,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessGetBlob) {
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
auto inBlob = req.GetBlob("param"); auto inBlob = req.GetBlob("param");
@ -401,7 +401,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessSetBlob) {
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
@ -472,7 +472,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessGetBlob) {
preProcess[2]->meanValue = 0; preProcess[2]->meanValue = 0;
preProcess.setVariant(InferenceEngine::MEAN_VALUE); preProcess.setVariant(InferenceEngine::MEAN_VALUE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
auto inBlob = req.GetBlob("param"); auto inBlob = req.GetBlob("param");
@ -532,7 +532,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessSetBlob) {
preProcess[2]->meanValue = 0; preProcess[2]->meanValue = 0;
preProcess.setVariant(InferenceEngine::MEAN_VALUE); preProcess.setVariant(InferenceEngine::MEAN_VALUE);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
@ -577,18 +577,19 @@ typedef std::tuple<
> PreprocessConversionParams; > PreprocessConversionParams;
class InferRequestPreprocessConversionTest : public testing::WithParamInterface<PreprocessConversionParams>, class InferRequestPreprocessConversionTest : public testing::WithParamInterface<PreprocessConversionParams>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEPluginTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<PreprocessConversionParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<PreprocessConversionParams> obj) {
InferenceEngine::Precision netPrecision, iPrecision, oPrecision; InferenceEngine::Precision netPrecision, iPrecision, oPrecision;
InferenceEngine::Layout netLayout, iLayout, oLayout; InferenceEngine::Layout netLayout, iLayout, oLayout;
bool setInputBlob, setOutputBlob; bool setInputBlob, setOutputBlob;
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(netPrecision, iPrecision, oPrecision, std::tie(netPrecision, iPrecision, oPrecision,
netLayout, iLayout, oLayout, netLayout, iLayout, oLayout,
setInputBlob, setOutputBlob, setInputBlob, setOutputBlob,
targetDevice, configuration) = obj.param; target_device, configuration) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '_');
std::ostringstream result; std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_"; result << "netPRC=" << netPrecision.name() << "_";
result << "iPRC=" << iPrecision.name() << "_"; result << "iPRC=" << iPrecision.name() << "_";
@ -598,7 +599,7 @@ public:
result << "oLT=" << oLayout << "_"; result << "oLT=" << oLayout << "_";
result << "setIBlob=" << setInputBlob << "_"; result << "setIBlob=" << setInputBlob << "_";
result << "setOBlob=" << setOutputBlob << "_"; result << "setOBlob=" << setOutputBlob << "_";
result << "targetDevice=" << targetDevice; result << "target_device=" << target_device;
if (!configuration.empty()) { if (!configuration.empty()) {
for (auto& configItem : configuration) { for (auto& configItem : configuration) {
result << "configItem=" << configItem.first << "_" << configItem.second << "_"; result << "configItem=" << configItem.first << "_" << configItem.second << "_";
@ -626,25 +627,26 @@ public:
} }
void SetUp() override { void SetUp() override {
// Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(netPrecision, iPrecision, oPrecision, std::tie(netPrecision, iPrecision, oPrecision,
netLayout, iLayout, oLayout, netLayout, iLayout, oLayout,
setInputBlob, setOutputBlob, setInputBlob, setOutputBlob,
targetDevice, configuration) = this->GetParam(); target_device, configuration) = this->GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
InferenceEngine::Precision netPrecision, iPrecision, oPrecision; InferenceEngine::Precision netPrecision, iPrecision, oPrecision;
InferenceEngine::Layout netLayout, iLayout, oLayout; InferenceEngine::Layout netLayout, iLayout, oLayout;
bool setInputBlob, setOutputBlob; bool setInputBlob, setOutputBlob;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
}; };
@ -676,7 +678,7 @@ TEST_P(InferRequestPreprocessConversionTest, Infer) {
cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout); cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
@ -765,7 +767,7 @@ typedef std::tuple<
> PreprocessSetBlobCheckParams; > PreprocessSetBlobCheckParams;
class InferRequestPreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface<PreprocessSetBlobCheckParams>, class InferRequestPreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface<PreprocessSetBlobCheckParams>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEPluginTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<PreprocessSetBlobCheckParams> obj) { static std::string getTestCaseName(testing::TestParamInfo<PreprocessSetBlobCheckParams> obj) {
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
@ -773,12 +775,13 @@ public:
bool changeIPrecision, changeOPrecision; bool changeIPrecision, changeOPrecision;
bool changeILayout, changeOLayout; bool changeILayout, changeOLayout;
bool setInputBlob, setOutputBlob; bool setInputBlob, setOutputBlob;
std::string targetDevice; std::string target_device;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
std::tie(netPrecision, changeIPrecision, changeOPrecision, std::tie(netPrecision, changeIPrecision, changeOPrecision,
netLayout, changeILayout, changeOLayout, netLayout, changeILayout, changeOLayout,
setInputBlob, setOutputBlob, setInputBlob, setOutputBlob,
targetDevice, configuration) = obj.param; target_device, configuration) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '_');
std::ostringstream result; std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_"; result << "netPRC=" << netPrecision.name() << "_";
result << "iPRC=" << changeIPrecision << "_"; result << "iPRC=" << changeIPrecision << "_";
@ -788,7 +791,7 @@ public:
result << "oLT=" << changeOLayout << "_"; result << "oLT=" << changeOLayout << "_";
result << "setIBlob=" << setInputBlob << "_"; result << "setIBlob=" << setInputBlob << "_";
result << "setOBlob=" << setOutputBlob << "_"; result << "setOBlob=" << setOutputBlob << "_";
result << "targetDevice=" << targetDevice; result << "target_device=" << target_device;
if (!configuration.empty()) { if (!configuration.empty()) {
for (auto& configItem : configuration) { for (auto& configItem : configuration) {
result << "configItem=" << configItem.first << "_" << configItem.second << "_"; result << "configItem=" << configItem.first << "_" << configItem.second << "_";
@ -821,13 +824,15 @@ public:
std::tie(netPrecision, changeIPrecision, changeOPrecision, std::tie(netPrecision, changeIPrecision, changeOPrecision,
netLayout, changeILayout, changeOLayout, netLayout, changeILayout, changeOLayout,
setInputBlob, setOutputBlob, setInputBlob, setOutputBlob,
targetDevice, configuration) = this->GetParam(); target_device, configuration) = this->GetParam();
APIBaseTest::SetUp();
} }
void TearDown() override { void TearDown() override {
if (!configuration.empty()) { if (!configuration.empty()) {
PluginCache::get().reset(); PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
@ -836,7 +841,6 @@ public:
InferenceEngine::Layout netLayout; InferenceEngine::Layout netLayout;
bool changeILayout, changeOLayout; bool changeILayout, changeOLayout;
bool setInputBlob, setOutputBlob; bool setInputBlob, setOutputBlob;
std::string targetDevice;
std::map<std::string, std::string> configuration; std::map<std::string, std::string> configuration;
}; };
@ -863,7 +867,7 @@ TEST_P(InferRequestPreprocessDynamicallyInSetBlobTest, Infer) {
InferenceEngine::CNNNetwork cnnNet(ngraph); InferenceEngine::CNNNetwork cnnNet(ngraph);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();
InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr; InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr;
@ -997,7 +1001,7 @@ TEST_P(InferRequestPreprocessTest, InferWithRGB2BGRConversion) {
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
preProcess.setColorFormat(InferenceEngine::ColorFormat::BGR); preProcess.setColorFormat(InferenceEngine::ColorFormat::BGR);
// Load CNNNetwork to target plugins // Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
// Create InferRequest // Create InferRequest
auto req = execNet.CreateInferRequest(); auto req = execNet.CreateInferRequest();

View File

@ -15,34 +15,35 @@
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
class VersionTest : public testing::WithParamInterface<std::string>, class VersionTest : public testing::WithParamInterface<std::string>,
public CommonTestUtils::TestsCommon { public BehaviorTestsUtils::IEPluginTestBase {
public: public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) { static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) {
std::string targetDevice; std::string targetDevice;
std::map<std::string, std::string> config; std::map<std::string, std::string> config;
targetDevice = obj.param; targetDevice = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
std::ostringstream result; std::ostringstream result;
result << "targetDevice=" << targetDevice; result << "targetDevice=" << targetDevice;
return result.str(); return result.str();
} }
void SetUp() override { void SetUp() override {
target_device = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
targetDevice = this->GetParam(); APIBaseTest::SetUp();
} }
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
std::string targetDevice;
}; };
// Load unsupported network type to the Plugin // Load unsupported network type to the Plugin
TEST_P(VersionTest, pluginCurrentVersionIsCorrect) { TEST_P(VersionTest, pluginCurrentVersionIsCorrect) {
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos && if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos && target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
std::map<std::string, InferenceEngine::Version> versions = ie->GetVersions(targetDevice); std::map<std::string, InferenceEngine::Version> versions = ie->GetVersions(target_device);
ASSERT_EQ(versions.size(), 1); ASSERT_EQ(versions.size(), 1);
ASSERT_EQ(versions.begin()->first, targetDevice); ASSERT_EQ(versions.begin()->first, target_device);
auto version = versions.begin()->second; auto version = versions.begin()->second;
IE_SUPPRESS_DEPRECATED_START IE_SUPPRESS_DEPRECATED_START
ASSERT_EQ(version.apiVersion.major, 2); ASSERT_EQ(version.apiVersion.major, 2);

View File

@ -9,7 +9,7 @@
#include "common_test_utils/test_common.hpp" #include "common_test_utils/test_common.hpp"
#include "common_test_utils/common_utils.hpp" #include "common_test_utils/common_utils.hpp"
#include "functional_test_utils/layer_test_utils/summary.hpp" #include "functional_test_utils/summary/op_summary.hpp"
#include "functional_test_utils/ov_plugin_cache.hpp" #include "functional_test_utils/ov_plugin_cache.hpp"
namespace ov { namespace ov {
@ -24,7 +24,7 @@ using OpImplParams = std::tuple<
class OpImplCheckTest : public testing::WithParamInterface<OpImplParams>, class OpImplCheckTest : public testing::WithParamInterface<OpImplParams>,
public CommonTestUtils::TestsCommon { public CommonTestUtils::TestsCommon {
protected: protected:
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance(); ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance();
std::shared_ptr<ov::Core> core = ov::test::utils::PluginCache::get().core(); std::shared_ptr<ov::Core> core = ov::test::utils::PluginCache::get().core();
std::shared_ptr<ov::Model> function; std::shared_ptr<ov::Model> function;
std::string targetDevice; std::string targetDevice;

View File

@ -4,7 +4,7 @@
#pragma once #pragma once
#include <functional_test_utils/layer_test_utils/summary.hpp> #include <functional_test_utils/summary/op_summary.hpp>
#include <ngraph_functions/subgraph_builders.hpp> #include <ngraph_functions/subgraph_builders.hpp>
namespace ov { namespace ov {
@ -16,7 +16,7 @@ OpGenerator getOpGeneratorMap();
static const std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> createFunctions() { static const std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> createFunctions() {
std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> res; std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> res;
auto opsets = LayerTestsUtils::Summary::getInstance().getOpSets(); auto opsets = ov::test::utils::OpSummary::getInstance().getOpSets();
auto opGenerator = getOpGeneratorMap(); auto opGenerator = getOpGeneratorMap();
std::set<ngraph::NodeTypeInfo> opsInfo; std::set<ngraph::NodeTypeInfo> opsInfo;
for (const auto& opset : opsets) { for (const auto& opset : opsets) {

View File

@ -244,12 +244,17 @@ const char expected_serialized_model[] = R"V0G0N(
std::string ExecGraphSerializationTest::getTestCaseName(testing::TestParamInfo<std::string> obj) { std::string ExecGraphSerializationTest::getTestCaseName(testing::TestParamInfo<std::string> obj) {
std::ostringstream result; std::ostringstream result;
std::string targetDevice = obj.param; std::string target_device = obj.param;
result << "TargetDevice=" << targetDevice; std::replace(target_device.begin(), target_device.end(), ':', '.');
result << "TargetDevice=" << target_device;
return result.str(); return result.str();
} }
void ExecGraphSerializationTest::SetUp() { void ExecGraphSerializationTest::SetUp() {
target_device = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
const std::string XML_EXT = ".xml"; const std::string XML_EXT = ".xml";
const std::string BIN_EXT = ".bin"; const std::string BIN_EXT = ".bin";
@ -257,11 +262,10 @@ void ExecGraphSerializationTest::SetUp() {
m_out_xml_path = model_name + XML_EXT; m_out_xml_path = model_name + XML_EXT;
m_out_bin_path = model_name + BIN_EXT; m_out_bin_path = model_name + BIN_EXT;
deviceName = this->GetParam();
} }
void ExecGraphSerializationTest::TearDown() { void ExecGraphSerializationTest::TearDown() {
APIBaseTest::TearDown();
CommonTestUtils::removeIRFiles(m_out_xml_path, m_out_bin_path); CommonTestUtils::removeIRFiles(m_out_xml_path, m_out_bin_path);
} }
@ -340,10 +344,10 @@ std::pair<bool, std::string> ExecGraphSerializationTest::compare_docs(const pugi
} }
TEST_P(ExecGraphSerializationTest, ExecutionGraph) { TEST_P(ExecGraphSerializationTest, ExecutionGraph) {
auto ie = PluginCache::get().ie(deviceName); auto ie = PluginCache::get().ie(target_device);
InferenceEngine::Blob::Ptr a; InferenceEngine::Blob::Ptr a;
auto cnnNet = ie->ReadNetwork(serialize_test_model, a); auto cnnNet = ie->ReadNetwork(serialize_test_model, a);
auto execNet = ie->LoadNetwork(cnnNet, deviceName); auto execNet = ie->LoadNetwork(cnnNet, target_device);
auto execGraph = execNet.GetExecGraphInfo(); auto execGraph = execNet.GetExecGraphInfo();
InferenceEngine::InferRequest req = execNet.CreateInferRequest(); InferenceEngine::InferRequest req = execNet.CreateInferRequest();
execGraph.serialize(m_out_xml_path, m_out_bin_path); execGraph.serialize(m_out_xml_path, m_out_bin_path);
@ -365,6 +369,7 @@ std::string ExecGraphUniqueNodeNames::getTestCaseName(testing::TestParamInfo<Lay
InferenceEngine::SizeVector inputShapes, newInputShapes; InferenceEngine::SizeVector inputShapes, newInputShapes;
std::string targetDevice; std::string targetDevice;
std::tie(netPrecision, inputShapes, targetDevice) = obj.param; std::tie(netPrecision, inputShapes, targetDevice) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
std::ostringstream result; std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_"; result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
@ -375,11 +380,12 @@ std::string ExecGraphUniqueNodeNames::getTestCaseName(testing::TestParamInfo<Lay
} }
void ExecGraphUniqueNodeNames::SetUp() { void ExecGraphUniqueNodeNames::SetUp() {
SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::vector<size_t> inputShape; std::vector<size_t> inputShape;
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
std::tie(netPrecision, inputShape, targetDevice) = this->GetParam(); std::tie(netPrecision, inputShape, target_device) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED();
APIBaseTest::SetUp();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShape}); auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
@ -390,15 +396,11 @@ void ExecGraphUniqueNodeNames::SetUp() {
fnPtr = std::make_shared<ngraph::Function>(results, params, "SplitConvConcat"); fnPtr = std::make_shared<ngraph::Function>(results, params, "SplitConvConcat");
} }
void ExecGraphUniqueNodeNames::TearDown() {
fnPtr.reset();
}
TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) { TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) {
InferenceEngine::CNNNetwork cnnNet(fnPtr); InferenceEngine::CNNNetwork cnnNet(fnPtr);
auto ie = PluginCache::get().ie(targetDevice); auto ie = PluginCache::get().ie(target_device);
auto execNet = ie->LoadNetwork(cnnNet, targetDevice); auto execNet = ie->LoadNetwork(cnnNet, target_device);
InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo(); InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo();

View File

@ -3,6 +3,7 @@
// //
#include "behavior/executable_network/locale.hpp" #include "behavior/executable_network/locale.hpp"
#include "functional_test_utils/summary/api_summary.hpp"
namespace BehaviorTestsDefinitions { namespace BehaviorTestsDefinitions {
@ -24,15 +25,19 @@ inline std::shared_ptr<ngraph::Function> makeTestModel(std::vector<size_t> input
std::string CustomLocaleTest::getTestCaseName(const testing::TestParamInfo<LocaleParams> &obj) { std::string CustomLocaleTest::getTestCaseName(const testing::TestParamInfo<LocaleParams> &obj) {
std::ostringstream results; std::ostringstream results;
std::string deviceName, localeName; std::string targetDevice, localeName;
std::tie(localeName, deviceName) = obj.param; std::tie(localeName, targetDevice) = obj.param;
std::replace(localeName.begin(), localeName.end(), '-', '.');
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
results << "locale=" << localeName << "_" results << "locale=" << localeName << "_"
<< "targetDevice=" << deviceName; << "targetDevice=" << targetDevice;
return results.str(); return results.str();
} }
void CustomLocaleTest::SetUp() { void CustomLocaleTest::SetUp() {
std::tie(localeName, deviceName) = GetParam(); std::tie(localeName, target_device) = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
testName = ::testing::UnitTest::GetInstance()->current_test_info()->name(); testName = ::testing::UnitTest::GetInstance()->current_test_info()->name();
function = makeTestModel(); function = makeTestModel();
} }
@ -45,9 +50,9 @@ TEST_P(CustomLocaleTest, CanLoadNetworkWithCustomLocale) {
GTEST_SKIP(); GTEST_SKIP();
} }
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(deviceName); std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(target_device);
InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork cnnNet(function);
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, deviceName)); ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device));
std::locale::global(prev); std::locale::global(prev);
} }

View File

@ -25,9 +25,10 @@ std::string InferRequestVariableStateTest::getTestCaseName(const testing::TestPa
} }
void InferRequestVariableStateTest::SetUp() { void InferRequestVariableStateTest::SetUp() {
std::tie(net, statesToQuery, deviceName, configuration) = GetParam();
// Skip test according to plugin specific disabledTestPatterns() (if any) // Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(net, statesToQuery, deviceName, configuration) = GetParam(); IEInferRequestTestBase::SetUp();
} }
InferenceEngine::ExecutableNetwork InferRequestVariableStateTest::PrepareNetwork() { InferenceEngine::ExecutableNetwork InferRequestVariableStateTest::PrepareNetwork() {

View File

@ -11,21 +11,23 @@ namespace test {
namespace behavior { namespace behavior {
std::string OVCompiledModelEmptyPropertiesTests::getTestCaseName(testing::TestParamInfo<std::string> obj) { std::string OVCompiledModelEmptyPropertiesTests::getTestCaseName(testing::TestParamInfo<std::string> obj) {
return "device_name=" + obj.param; return "target_device=" + obj.param;
} }
void OVCompiledModelEmptyPropertiesTests::SetUp() { void OVCompiledModelEmptyPropertiesTests::SetUp() {
target_device = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
device_name = this->GetParam(); APIBaseTest::SetUp();
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name); model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
} }
std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamInfo<PropertiesParams> obj) { std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamInfo<PropertiesParams> obj) {
std::string device_name; std::string targetDevice;
AnyMap properties; AnyMap properties;
std::tie(device_name, properties) = obj.param; std::tie(targetDevice, properties) = obj.param;
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "device_name=" << device_name << "_"; result << "targetDevice=" << targetDevice << "_";
if (!properties.empty()) { if (!properties.empty()) {
result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_"); result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_");
} }
@ -33,34 +35,36 @@ std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamIn
} }
void OVCompiledModelPropertiesTests::SetUp() { void OVCompiledModelPropertiesTests::SetUp() {
std::tie(target_device, properties) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED(); SKIP_IF_CURRENT_TEST_IS_DISABLED();
std::tie(device_name, properties) = this->GetParam(); APIBaseTest::SetUp();
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name); model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
} }
void OVCompiledModelPropertiesTests::TearDown() { void OVCompiledModelPropertiesTests::TearDown() {
if (!properties.empty()) { if (!properties.empty()) {
utils::PluginCache::get().reset(); utils::PluginCache::get().reset();
} }
APIBaseTest::TearDown();
} }
TEST_P(OVCompiledModelEmptyPropertiesTests, CanCompileModelWithEmptyProperties) { TEST_P(OVCompiledModelEmptyPropertiesTests, CanCompileModelWithEmptyProperties) {
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, AnyMap{})); OV_ASSERT_NO_THROW(core->compile_model(model, target_device, AnyMap{}));
} }
TEST_P(OVCompiledModelPropertiesTests, CanCompileModelWithCorrectProperties) { TEST_P(OVCompiledModelPropertiesTests, CanCompileModelWithCorrectProperties) {
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
} }
TEST_P(OVCompiledModelPropertiesTests, CanUseCache) { TEST_P(OVCompiledModelPropertiesTests, CanUseCache) {
core->set_property(ov::cache_dir("./test_cache")); core->set_property(ov::cache_dir("./test_cache"));
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties)); OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
CommonTestUtils::removeDir("./test_cache"); CommonTestUtils::removeDir("./test_cache");
} }
TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetProperty) { TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetProperty) {
auto compiled_model = core->compile_model(model, device_name, properties); auto compiled_model = core->compile_model(model, target_device, properties);
auto supported_properties = compiled_model.get_property(ov::supported_properties); auto supported_properties = compiled_model.get_property(ov::supported_properties);
for (const auto& property_item : properties) { for (const auto& property_item : properties) {
if (util::contains(supported_properties, property_item.first)) { if (util::contains(supported_properties, property_item.first)) {
@ -73,26 +77,26 @@ TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetP
} }
TEST_P(OVCompiledModelPropertiesIncorrectTests, CanNotCompileModelWithIncorrectProperties) { TEST_P(OVCompiledModelPropertiesIncorrectTests, CanNotCompileModelWithIncorrectProperties) {
ASSERT_THROW(core->compile_model(model, device_name, properties), ov::Exception); ASSERT_THROW(core->compile_model(model, target_device, properties), ov::Exception);
} }
TEST_P(OVCompiledModelPropertiesDefaultTests, CanCompileWithDefaultValueFromPlugin) { TEST_P(OVCompiledModelPropertiesDefaultTests, CanCompileWithDefaultValueFromPlugin) {
std::vector<ov::PropertyName> supported_properties; std::vector<ov::PropertyName> supported_properties;
OV_ASSERT_NO_THROW(supported_properties = core->get_property(device_name, ov::supported_properties)); OV_ASSERT_NO_THROW(supported_properties = core->get_property(target_device, ov::supported_properties));
AnyMap default_rw_properties; AnyMap default_rw_properties;
for (auto& supported_property : supported_properties) { for (auto& supported_property : supported_properties) {
if (supported_property.is_mutable()) { if (supported_property.is_mutable()) {
Any property; Any property;
OV_ASSERT_NO_THROW(property = core->get_property(device_name, supported_property)); OV_ASSERT_NO_THROW(property = core->get_property(target_device, supported_property));
default_rw_properties.emplace(supported_property, property); default_rw_properties.emplace(supported_property, property);
std::cout << supported_property << ":" << property.as<std::string>() << std::endl; std::cout << supported_property << ":" << property.as<std::string>() << std::endl;
} }
} }
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, default_rw_properties)); OV_ASSERT_NO_THROW(core->compile_model(model, target_device, default_rw_properties));
} }
TEST_P(OVCompiledModelPropertiesDefaultTests, CheckDefaultValues) { TEST_P(OVCompiledModelPropertiesDefaultTests, CheckDefaultValues) {
auto compiled_model = core->compile_model(model, device_name); auto compiled_model = core->compile_model(model, target_device);
std::vector<ov::PropertyName> supported_properties; std::vector<ov::PropertyName> supported_properties;
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties)); OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
std::cout << "SUPPORTED PROPERTIES: " << std::endl; std::cout << "SUPPORTED PROPERTIES: " << std::endl;

View File

@ -14,7 +14,7 @@ namespace test {
namespace behavior { namespace behavior {
std::string OVInferRequestBatchedTests::getTestCaseName(const testing::TestParamInfo<std::string>& obj) { std::string OVInferRequestBatchedTests::getTestCaseName(const testing::TestParamInfo<std::string>& obj) {
return "targetDevice=" + obj.param; return "target_device=" + obj.param;
} }
std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string& test_name) { std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string& test_name) {
@ -29,8 +29,9 @@ std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string&
} }
void OVInferRequestBatchedTests::SetUp() { void OVInferRequestBatchedTests::SetUp() {
target_device = GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
targetDevice = GetParam(); APIBaseTest::SetUp();
m_cache_dir = generateCacheDirName(GetTestName()); m_cache_dir = generateCacheDirName(GetTestName());
} }
@ -42,6 +43,7 @@ void OVInferRequestBatchedTests::TearDown() {
CommonTestUtils::removeFilesWithExt(m_cache_dir, "blob"); CommonTestUtils::removeFilesWithExt(m_cache_dir, "blob");
CommonTestUtils::removeDir(m_cache_dir); CommonTestUtils::removeDir(m_cache_dir);
} }
APIBaseTest::TearDown();
} }
std::shared_ptr<Model> OVInferRequestBatchedTests::create_n_inputs(size_t n, element::Type type, std::shared_ptr<Model> OVInferRequestBatchedTests::create_n_inputs(size_t n, element::Type type,
@ -74,7 +76,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase) {
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
std::vector<float> buffer(one_shape_size * batch * 2, 0); std::vector<float> buffer(one_shape_size * batch * 2, 0);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -108,7 +110,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsAsync) {
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
std::vector<float> buffer(one_shape_size * batch * 2, 0); std::vector<float> buffer(one_shape_size * batch * 2, 0);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -143,7 +145,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_override_with_set) {
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
std::vector<float> buffer(one_shape_size * batch, 4); std::vector<float> buffer(one_shape_size * batch, 4);
std::vector<float> buffer2(one_shape_size * batch, 5); std::vector<float> buffer2(one_shape_size * batch, 5);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -179,8 +181,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase_Caching) {
auto one_shape_size = ov::shape_size(one_shape); auto one_shape_size = ov::shape_size(one_shape);
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "N...");
ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}}); ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}});
auto execNet_no_cache = ie->compile_model(model, targetDevice); auto execNet_no_cache = ie->compile_model(model, target_device);
auto execNet_cache = ie->compile_model(model, targetDevice); auto execNet_cache = ie->compile_model(model, target_device);
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
std::vector<float> buffer(one_shape_size * batch * 2, 0); std::vector<float> buffer(one_shape_size * batch * 2, 0);
@ -219,7 +221,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Multiple_Infer) {
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
std::vector<float> buffer(one_shape_size * batch * 2, 0); std::vector<float> buffer(one_shape_size * batch * 2, 0);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -256,7 +258,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Can_Infer_Dynamic) {
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, PartialShape({-1, 2, 2, 2}), "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, PartialShape({-1, 2, 2, 2}), "N...");
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks // Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
std::vector<float> buffer(one_shape_size * batch * 2, 0); std::vector<float> buffer(one_shape_size * batch * 2, 0);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -292,7 +294,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Batch1) {
auto one_shape = Shape{1, 3, 10, 10}; auto one_shape = Shape{1, 3, 10, 10};
auto one_shape_size = ov::shape_size(one_shape); auto one_shape_size = ov::shape_size(one_shape);
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, one_shape, "N..."); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, one_shape, "N...");
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
@ -323,7 +325,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Get_Tensor_Not_Allowed) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -337,7 +339,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_No_Batch) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "DCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "DCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -350,7 +352,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_Name) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "undefined"; const std::string tensor_name = "undefined";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -363,7 +365,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_No_Name) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "undefined"; const std::string tensor_name = "undefined";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -376,7 +378,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Friendly_Name) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "input0"; const std::string tensor_name = "input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -388,7 +390,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_index) {
auto one_shape = Shape{1, 3, 3, 3}; auto one_shape = Shape{1, 3, 3, 3};
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -400,7 +402,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_no_name_multiple_inputs) {
auto one_shape = Shape{1, 3, 3, 3}; auto one_shape = Shape{1, 3, 3, 3};
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
@ -413,7 +415,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_count) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch + 1, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch + 1, ov::Tensor(element::f32, one_shape));
@ -425,7 +427,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Empty_Array) {
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors; std::vector<ov::Tensor> tensors;
@ -436,7 +438,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_diff_batches) {
auto batch_shape = Shape{3, 3, 3, 3}; auto batch_shape = Shape{3, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors; std::vector<ov::Tensor> tensors;
@ -451,7 +453,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Correct_all) {
auto batch_shape = Shape{2, 3, 3, 3}; auto batch_shape = Shape{2, 3, 3, 3};
std::vector<float> buffer(ov::shape_size(batch_shape), 1); std::vector<float> buffer(ov::shape_size(batch_shape), 1);
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors; std::vector<ov::Tensor> tensors;
@ -468,8 +470,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Cache_CheckDeepCopy) {
std::vector<float> buffer_out(ov::shape_size(batch_shape), 1); std::vector<float> buffer_out(ov::shape_size(batch_shape), 1);
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}}); ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}});
auto execNet_no_cache = ie->compile_model(model, targetDevice); auto execNet_no_cache = ie->compile_model(model, target_device);
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
model->input(0).set_names({"updated_input0"}); // Change param name of original model model->input(0).set_names({"updated_input0"}); // Change param name of original model
@ -490,7 +492,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_element_type
auto batch_shape = Shape{batch, 3, 3, 3}; auto batch_shape = Shape{batch, 3, 3, 3};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));
@ -504,7 +506,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_shape) {
auto batch_shape = Shape{batch, 4, 4, 4}; auto batch_shape = Shape{batch, 4, 4, 4};
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW"); auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
const std::string tensor_name = "tensor_input0"; const std::string tensor_name = "tensor_input0";
auto execNet = ie->compile_model(model, targetDevice); auto execNet = ie->compile_model(model, target_device);
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape)); std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));

View File

@ -1,132 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <future>
#include "shared_test_classes/subgraph/basic_lstm.hpp"
#include "behavior/ov_infer_request/callback.hpp"
namespace ov {
namespace test {
namespace behavior {
std::string OVInferRequestCallbackTests::getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj) {
return OVInferRequestTests::getTestCaseName(obj);
}
TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
bool is_called = false;
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
ASSERT_EQ(exception_ptr, nullptr);
is_called = true;
}));
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
ASSERT_TRUE(is_called);
}
TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
bool is_called = false;
req.set_callback([&] (std::exception_ptr exception_ptr) {
ASSERT_EQ(nullptr, exception_ptr);
is_called = true;
});
req.infer();
ASSERT_FALSE(is_called);
}
// test that can wait all callbacks on dtor
TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) {
const int NUM_ITER = 10;
struct TestUserData {
std::atomic<int> numIter = {0};
std::promise<bool> promise;
};
TestUserData data;
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
if (exception_ptr) {
data.promise.set_exception(exception_ptr);
} else {
if (data.numIter.fetch_add(1) != NUM_ITER) {
req.start_async();
} else {
data.promise.set_value(true);
}
}
}));
auto future = data.promise.get_future();
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
future.wait();
auto callbackStatus = future.get();
ASSERT_TRUE(callbackStatus);
auto dataNumIter = data.numIter - 1;
ASSERT_EQ(NUM_ITER, dataNumIter);
}
TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) {
OPENVINO_UNREACHABLE("Throw");
}));
OV_ASSERT_NO_THROW(req.start_async());
ASSERT_THROW(req.wait(), ov::Exception);
}
TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) {
// GetNetwork(3000, 380) make inference around 20ms on GNA SW
// so increases chances for getting RESULT_NOT_READY
OV_ASSERT_NO_THROW(execNet = core->compile_model(
SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), targetDevice, configuration));
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
std::promise<std::chrono::system_clock::time_point> callbackTimeStamp;
auto callbackTimeStampFuture = callbackTimeStamp.get_future();
// add a callback to the request and capture the timestamp
OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) {
if (exception_ptr) {
callbackTimeStamp.set_exception(exception_ptr);
} else {
callbackTimeStamp.set_value(std::chrono::system_clock::now());
}
}));
OV_ASSERT_NO_THROW(req.start_async());
bool ready = false;
OV_ASSERT_NO_THROW(ready = req.wait_for({}));
// get timestamp taken AFTER return from the wait(STATUS_ONLY)
const auto afterWaitTimeStamp = std::chrono::system_clock::now();
// IF the callback timestamp is larger than the afterWaitTimeStamp
// then we should observe false ready result
if (afterWaitTimeStamp < callbackTimeStampFuture.get()) {
ASSERT_FALSE(ready);
}
OV_ASSERT_NO_THROW(req.wait());
}
TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
{
auto somePtr = std::make_shared<int>(42);
OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) {
ASSERT_EQ(nullptr, exception_ptr);
ASSERT_EQ(1, somePtr.use_count());
}));
}
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
}
} // namespace behavior
} // namespace test
} // namespace ov

View File

@ -1,66 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <future>
#include "behavior/ov_infer_request/cancellation.hpp"
#include "openvino/runtime/exception.hpp"
namespace ov {
namespace test {
namespace behavior {
std::string OVInferRequestCancellationTests::getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj) {
return OVInferRequestTests::getTestCaseName(obj);
}
TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.cancel());
try {
req.wait();
} catch (const ov::Cancelled&) {
SUCCEED();
}
}
TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.cancel());
try {
req.wait();
} catch (const ov::Cancelled&) {
SUCCEED();
}
OV_ASSERT_NO_THROW(req.start_async());
OV_ASSERT_NO_THROW(req.wait());
}
TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
OV_ASSERT_NO_THROW(req.cancel());
}
TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) {
ov::InferRequest req;
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
auto infer = std::async(std::launch::async, [&req]{req.infer();});
while (!req.wait_for({})) {
}
OV_ASSERT_NO_THROW(req.cancel());
try {
infer.get();
} catch (const ov::Cancelled&) {
SUCCEED();
}
}
} // namespace behavior
} // namespace test
} // namespace ov

View File

@ -37,9 +37,10 @@ namespace behavior {
std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj) { std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj) {
std::shared_ptr<Model> func; std::shared_ptr<Model> func;
std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes; std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes;
std::string targetDevice; std::string target_device;
ov::AnyMap configuration; ov::AnyMap configuration;
std::tie(func, inOutShapes, targetDevice, configuration) = obj.param; std::tie(func, inOutShapes, target_device, configuration) = obj.param;
std::replace(target_device.begin(), target_device.end(), ':', '.');
std::ostringstream result; std::ostringstream result;
result << "function=" << func->get_friendly_name() << "_"; result << "function=" << func->get_friendly_name() << "_";
result << "inOutShape=("; result << "inOutShape=(";
@ -47,7 +48,7 @@ std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<O
result << "(" << CommonTestUtils::vec2str(inOutShape.first) << "_" << CommonTestUtils::vec2str(inOutShape.second) << ")"; result << "(" << CommonTestUtils::vec2str(inOutShape.first) << "_" << CommonTestUtils::vec2str(inOutShape.second) << ")";
} }
result << ")_"; result << ")_";
result << "targetDevice=" << targetDevice << "_"; result << "targetDevice=" << target_device << "_";
if (!configuration.empty()) { if (!configuration.empty()) {
for (auto& configItem : configuration) { for (auto& configItem : configuration) {
result << "configItem=" << configItem.first << "_"; result << "configItem=" << configItem.first << "_";
@ -59,8 +60,9 @@ std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<O
} }
void OVInferRequestDynamicTests::SetUp() { void OVInferRequestDynamicTests::SetUp() {
std::tie(function, inOutShapes, target_device, configuration) = this->GetParam();
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
std::tie(function, inOutShapes, targetDevice, configuration) = this->GetParam(); APIBaseTest::SetUp();
} }
bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual) { bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual) {
@ -81,13 +83,6 @@ bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, cons
return result; return result;
} }
void OVInferRequestDynamicTests::TearDown() {
if (!configuration.empty()) {
PluginCache::get().reset();
}
function.reset();
}
/* /*
We have to check that we don't get a segmentation fault during We have to check that we don't get a segmentation fault during
inference if we set the first two times to the same shape and inference if we set the first two times to the same shape and
@ -106,7 +101,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetwork) {
}; };
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
const std::string outputname = function->outputs().back().get_any_name(); const std::string outputname = function->outputs().back().get_any_name();
@ -127,7 +122,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetUnexpectedOutputTensorB
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::runtime::Tensor tensor, otensor; ov::runtime::Tensor tensor, otensor;
@ -152,7 +147,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputTensorPreAllocate
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::runtime::Tensor tensor; ov::runtime::Tensor tensor;
@ -177,7 +172,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputShapeBeforeInfer)
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::runtime::Tensor tensor, otensor; ov::runtime::Tensor tensor, otensor;
@ -199,7 +194,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithoutSetShape) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -213,7 +208,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkBoundWithoutSetShape) {
shapes[tensor_name] = {ov::Dimension(0, 5), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension(0, 5), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -230,7 +225,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor, otensor; ov::Tensor tensor, otensor;
@ -260,7 +255,7 @@ TEST_P(OVInferRequestDynamicTests, InferUpperBoundNetworkWithGetTensor) {
shapes[tensor_name] = {ov::Dimension(0, 19), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension(0, 19), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor, otensor; ov::Tensor tensor, otensor;
@ -288,7 +283,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithGetTensor) {
shapes[tensor_name] = ov::PartialShape::dynamic(); shapes[tensor_name] = ov::PartialShape::dynamic();
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor, otensor; ov::Tensor tensor, otensor;
@ -317,7 +312,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorLower
shapes[tensor_name] = {ov::Dimension(2, 3), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension(2, 3), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -336,7 +331,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorUpper
shapes[tensor_name] = {ov::Dimension(1, 2), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension(1, 2), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -357,7 +352,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor2times) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -392,7 +387,7 @@ TEST_P(OVInferRequestDynamicTests, GetSameTensor2times) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor; ov::Tensor tensor;
@ -412,7 +407,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor(ov::element::f32, refShape); ov::Tensor tensor(ov::element::f32, refShape);
@ -436,7 +431,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithSetTensor) {
shapes[tensor_name] = ov::PartialShape::dynamic(); shapes[tensor_name] = ov::PartialShape::dynamic();
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor(ov::element::f32, refShape), otensor; ov::Tensor tensor(ov::element::f32, refShape), otensor;
@ -469,7 +464,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor2times) {
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
const std::string outputName = function->outputs().back().get_any_name(); const std::string outputName = function->outputs().back().get_any_name();
// Load ov::Model to target plugins // Load ov::Model to target plugins
auto execNet = ie->compile_model(function, targetDevice, configuration); auto execNet = ie->compile_model(function, target_device, configuration);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
ov::Tensor tensor(ov::element::f32, refShape); ov::Tensor tensor(ov::element::f32, refShape);
@ -504,7 +499,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithLocalCore) {
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20}; shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
OV_ASSERT_NO_THROW(function->reshape(shapes)); OV_ASSERT_NO_THROW(function->reshape(shapes));
// Load ov::Model to target plugins // Load ov::Model to target plugins
compiled_model = local_core.compile_model(function, targetDevice, configuration); compiled_model = local_core.compile_model(function, target_device, configuration);
} }
// Create InferRequest // Create InferRequest
OV_ASSERT_NO_THROW(compiled_model.create_infer_request()); OV_ASSERT_NO_THROW(compiled_model.create_infer_request());
@ -522,7 +517,7 @@ TEST_P(OVNotSupportRequestDynamicTests, InferDynamicNotSupported) {
const std::string outputName = function->outputs().back().get_any_name(); const std::string outputName = function->outputs().back().get_any_name();
// Load ov::Function to target plugins // Load ov::Function to target plugins
ov::CompiledModel execNet; ov::CompiledModel execNet;
ASSERT_THROW((execNet = ie->compile_model(function, targetDevice, configuration)), ov::Exception); ASSERT_THROW((execNet = ie->compile_model(function, target_device, configuration)), ov::Exception);
} }
} // namespace behavior } // namespace behavior
} // namespace test } // namespace test

View File

@ -17,12 +17,10 @@ std::string OVInferRequestInferenceTests::getTestCaseName(
} }
void OVInferRequestInferenceTests::SetUp() { void OVInferRequestInferenceTests::SetUp() {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
m_param = std::get<0>(GetParam()); m_param = std::get<0>(GetParam());
m_device_name = std::get<1>(GetParam()); target_device = std::get<1>(GetParam());
} SKIP_IF_CURRENT_TEST_IS_DISABLED()
APIBaseTest::SetUp();
void OVInferRequestInferenceTests::TearDown() {
} }
std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n, std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n,
@ -50,7 +48,7 @@ std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n,
TEST_P(OVInferRequestInferenceTests, Inference_ROI_Tensor) { TEST_P(OVInferRequestInferenceTests, Inference_ROI_Tensor) {
auto shape_size = ov::shape_size(m_param.m_shape); auto shape_size = ov::shape_size(m_param.m_shape);
auto model = OVInferRequestInferenceTests::create_n_inputs(1, element::f32, m_param.m_shape); auto model = OVInferRequestInferenceTests::create_n_inputs(1, element::f32, m_param.m_shape);
auto execNet = ie->compile_model(model, m_device_name); auto execNet = ie->compile_model(model, target_device);
// Create InferRequest // Create InferRequest
ov::InferRequest req; ov::InferRequest req;
req = execNet.create_infer_request(); req = execNet.create_infer_request();

Some files were not shown because too many files have changed in this diff Show More