API Conformance report: C++ & Merge XML Part (#11245)
* Separate Summary, OpSummary, ApiSummary * final separatin * ChangeNamespaces * git status * filename * link error * Fix linking * Fix compilation * Report - ieplugin * Next step * Fix build * Changing inheritance + fix build * Fix run * Summary * Fix comments * Fix the run * fix build * #Extend report * Fix build * fix template * api_report flag * rebase to master branch * fix * fix build * myriad * fix problem with crash * Fix some mistakes * python merge * fix tests * tmp * Update Merge_xml script * Fix op * fix build * Fix bug with --report_unique_name * build * remove extra * gg * gpu build * c * Fix issue with win * infer_req * compiled + exec net * ov_plugin * ie_plugin * Fix comments * ff * fix last comment * fix build * fix template func * Apply commnets * Apply commnets * fix ci * build * build * build * inl * Remove extra * fix merge_xml * fix build * remarks * skip one test Co-authored-by: Alexander Zhogov <alexander.zhogov@intel.com>
This commit is contained in:
parent
dd55f434c3
commit
d04521a7c3
@ -17,7 +17,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_Non_0) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "CNHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -31,7 +31,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_remote_tensor_default) {
|
||||
auto batch_shape = Shape{batch, 4, 4, 4};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));
|
||||
@ -49,7 +49,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Strides) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
|
||||
std::vector<float> buffer1(one_shape_size_stride, 10);
|
||||
std::vector<float> buffer2(one_shape_size_stride, 20);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
|
@ -69,7 +69,13 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
// IE Class SetConfig
|
||||
//
|
||||
|
||||
using IEClassSetConfigTestHETERO = BehaviorTestsUtils::IEClassNetworkTest;
|
||||
class IEClassSetConfigTestHETERO : public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public BehaviorTestsUtils::IEPluginTestBase {
|
||||
void SetUp() override {
|
||||
IEClassNetworkTest::SetUp();
|
||||
IEPluginTestBase::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(IEClassSetConfigTestHETERO, smoke_SetConfigNoThrow) {
|
||||
{
|
||||
@ -115,7 +121,13 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
smoke_IEClassGetConfigTest, IEClassGetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
using IEClassGetConfigTestTEMPLATE = BehaviorTestsUtils::IEClassNetworkTest;
|
||||
class IEClassGetConfigTestTEMPLATE : public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public BehaviorTestsUtils::IEPluginTestBase {
|
||||
void SetUp() override {
|
||||
IEClassNetworkTest::SetUp();
|
||||
IEPluginTestBase::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
@ -9,7 +9,7 @@
|
||||
#include "ngraph/node.hpp"
|
||||
#include "pugixml.hpp"
|
||||
|
||||
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
|
||||
|
||||
namespace SubgraphsDumper {
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
#include <memory>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include "matchers/matchers_manager.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
|
||||
|
||||
namespace SubgraphsDumper {
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
#include "gtest/gtest.h"
|
||||
#include "matchers/convolutions.hpp"
|
||||
#include "ngraph/ops.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
|
||||
|
||||
class ConvolutionMatcherTest : public ::testing::Test {
|
||||
protected:
|
||||
|
@ -5,7 +5,7 @@
|
||||
#include "gtest/gtest.h"
|
||||
#include "matchers/single_op.hpp"
|
||||
#include "ngraph/ops.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/include/functional_test_utils/summary/op_info.hpp"
|
||||
|
||||
class SingleOpMatcherTest : public ::testing::Test {
|
||||
protected:
|
||||
|
@ -71,7 +71,7 @@ inline const std::vector<std::map<std::string, std::string>> generate_configs(co
|
||||
return resultConfig;
|
||||
}
|
||||
|
||||
inline const std::string generate_complex_device_name(const std::string& deviceName) {
|
||||
inline const std::string generate_complex_device_name(const std::string deviceName) {
|
||||
return deviceName + ":" + ov::test::conformance::targetDevice;
|
||||
}
|
||||
|
||||
@ -85,9 +85,27 @@ inline const std::vector<std::string> return_all_possible_device_combination() {
|
||||
return res;
|
||||
}
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> empty_config = {
|
||||
{},
|
||||
};
|
||||
inline std::vector<std::pair<std::string, std::string>> generate_pairs_plugin_name_by_device() {
|
||||
std::vector<std::pair<std::string, std::string>> res;
|
||||
for (const auto& device : return_all_possible_device_combination()) {
|
||||
std::string real_device = device.substr(0, device.find(':'));
|
||||
res.push_back(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice),
|
||||
real_device));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
inline std::map<std::string, std::string> AnyMap2StringMap(const AnyMap& config) {
|
||||
if (config.empty())
|
||||
return {};
|
||||
std::map<std::string, std::string> result;
|
||||
for (const auto& configItem : config) {
|
||||
result.insert({configItem.first, configItem.second.as<std::string>()});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const std::map<std::string, std::string> ie_config = AnyMap2StringMap(ov::test::conformance::pluginConfig);
|
||||
|
||||
} // namespace conformance
|
||||
} // namespace test
|
||||
|
@ -34,9 +34,7 @@ inline const std::vector<ov::AnyMap> generate_ov_configs(const std::string& targ
|
||||
return resultConfig;
|
||||
}
|
||||
|
||||
const std::vector<ov::AnyMap> empty_ov_config = {
|
||||
{},
|
||||
};
|
||||
const ov::AnyMap ov_config = ov::test::conformance::pluginConfig;
|
||||
|
||||
} // namespace conformance
|
||||
} // namespace test
|
||||
|
@ -11,7 +11,7 @@ namespace {
|
||||
using namespace ExecutionGraphTests;
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphSerializationTest,
|
||||
::testing::Values(ov::test::conformance::targetDevice),
|
||||
::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
|
||||
ExecGraphSerializationTest::getTestCaseName);
|
||||
|
||||
const std::vector<InferenceEngine::Precision> execGraphInfoElemTypes = {
|
||||
@ -22,7 +22,7 @@ INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecGraphUniqueNodeNames,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(execGraphInfoElemTypes),
|
||||
::testing::Values(InferenceEngine::SizeVector({1, 2, 5, 5})),
|
||||
::testing::Values(ov::test::conformance::targetDevice)),
|
||||
::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination())),
|
||||
ExecGraphUniqueNodeNames::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -14,7 +14,7 @@ namespace {
|
||||
INSTANTIATE_TEST_SUITE_P(ie_executable_network, ExecutableNetworkBaseTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
ExecutableNetworkBaseTest::getTestCaseName);
|
||||
|
||||
const std::vector<InferenceEngine::Precision> execNetBaseElemTypes = {
|
||||
@ -28,6 +28,6 @@ namespace {
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(execNetBaseElemTypes),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
ExecNetSetPrecision::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
smoke_IEClassHeteroExecutableNetworkGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(ov::test::conformance::targetDevice));
|
||||
::testing::ValuesIn(return_all_possible_device_combination()));
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ie_executable_network, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
|
@ -12,6 +12,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCallbackTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestCallbackTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -12,6 +12,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestCancellationTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestCancellationTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -15,6 +15,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestIOBBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestIOBBlobTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -16,7 +16,7 @@ using namespace BehaviorTestsDefinitions;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestMultithreadingTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestMultithreadingTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -12,7 +12,7 @@ using namespace BehaviorTestsDefinitions;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestPerfCountersTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestPerfCountersTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -13,16 +13,14 @@ const std::vector<FuncTestUtils::BlobType> setBlobTypes = {
|
||||
FuncTestUtils::BlobType::Compound,
|
||||
FuncTestUtils::BlobType::Batched,
|
||||
FuncTestUtils::BlobType::Memory,
|
||||
// FuncTestUtils::BlobType::Remote,
|
||||
FuncTestUtils::BlobType::Remote,
|
||||
FuncTestUtils::BlobType::I420,
|
||||
FuncTestUtils::BlobType::NV12
|
||||
};
|
||||
|
||||
const std::map<std::string, std::string> ConfigBlobType{}; //nothing special
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestSetBlobByType,
|
||||
::testing::Combine(::testing::ValuesIn(setBlobTypes),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestSetBlobByType::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -15,6 +15,6 @@ using namespace BehaviorTestsDefinitions;
|
||||
INSTANTIATE_TEST_SUITE_P(ie_infer_request, InferRequestWaitTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestWaitTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -30,6 +30,6 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(ovExecGraphInfoElemTypes),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVExecGraphImportExportTest::getTestCaseName);
|
||||
} // namespace
|
@ -14,6 +14,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVExecutableNetworkBaseTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVExecutableNetworkBaseTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -18,7 +18,7 @@ using namespace InferenceEngine::PluginConfigParams;
|
||||
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ov_compiled_model, OVClassImportExportTestP,
|
||||
ov_compiled_model, OVClassExecutableNetworkImportExportTestP,
|
||||
::testing::ValuesIn(return_all_possible_device_combination()));
|
||||
|
||||
//
|
||||
@ -55,7 +55,7 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ov_compiled_model, OVClassExecutableNetworkSetConfigTest,
|
||||
::testing::Values(ov::test::conformance::targetDevice));
|
||||
::testing::ValuesIn(return_all_possible_device_combination()));
|
||||
|
||||
////
|
||||
//// Hetero Executable Network GetMetric
|
||||
|
@ -16,34 +16,15 @@ const std::vector<ov::AnyMap> inproperties = {
|
||||
};
|
||||
|
||||
const std::vector<ov::AnyMap> auto_batch_inproperties = {
|
||||
{{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}},
|
||||
{{ov::auto_batch_timeout(-1)}},
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(ov::test::conformance::targetDevice),
|
||||
::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(inproperties)),
|
||||
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))),
|
||||
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))),
|
||||
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||
::testing::ValuesIn(generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))),
|
||||
OVCompiledModelPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_BATCH),
|
||||
@ -63,35 +44,16 @@ INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesDefaultTest
|
||||
OVCompiledModelPropertiesDefaultTests::getTestCaseName);
|
||||
|
||||
const std::vector<ov::AnyMap> auto_batch_properties = {
|
||||
{},
|
||||
{{CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}},
|
||||
{{ov::auto_batch_timeout(10)}},
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model, OVCompiledModelPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(ov::test::conformance::targetDevice),
|
||||
::testing::ValuesIn(ov::test::conformance::return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(default_properties)),
|
||||
OVCompiledModelPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Hetero, OVCompiledModelPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))),
|
||||
OVCompiledModelPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Multi, OVCompiledModelPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))),
|
||||
OVCompiledModelPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_Auto, OVCompiledModelPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))),
|
||||
OVCompiledModelPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_compiled_model_AutoBatch, OVCompiledModelPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_BATCH),
|
||||
|
@ -15,7 +15,7 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCallbackTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestCallbackTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -12,6 +12,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestCancellationTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestCancellationTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -61,7 +61,7 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_1, OVInferRequestDynamicTests,
|
||||
{{1, 4, 20, 20}, {1, 4, 20, 20}},
|
||||
{{2, 4, 20, 20}, {2, 4, 20, 20}}}),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestDynamicTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests,
|
||||
@ -71,6 +71,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request_2, OVInferRequestDynamicTests,
|
||||
{{1, 4, 20, 20}, {1, 2, 20, 40}},
|
||||
{{2, 4, 20, 20}, {2, 2, 20, 40}}}),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestDynamicTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -13,6 +13,6 @@ using namespace ov::test::conformance;
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferenceChaining,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferenceChaining::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -15,7 +15,7 @@ namespace {
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestIOTensorTest::getTestCaseName);
|
||||
|
||||
std::vector<ov::element::Type> ovIOTensorElemTypes = {
|
||||
@ -41,6 +41,6 @@ INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestIOTensorSetPrecisionTes
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(ovIOTensorElemTypes),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestIOTensorSetPrecisionTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -16,7 +16,7 @@ namespace {
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestMultithreadingTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestMultithreadingTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -13,7 +13,7 @@ namespace {
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestPerfCountersTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestPerfCountersTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -16,7 +16,7 @@ namespace {
|
||||
INSTANTIATE_TEST_SUITE_P(ov_infer_request, OVInferRequestWaitTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_ov_config)),
|
||||
::testing::Values(ov_config)),
|
||||
OVInferRequestWaitTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -17,14 +17,14 @@ namespace {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ov_plugin, OVClassBasicTestP,
|
||||
::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice)));
|
||||
::testing::ValuesIn(generate_pairs_plugin_name_by_device()));
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ov_plugin, OVClassNetworkTestP,
|
||||
::testing::ValuesIn(return_all_possible_device_combination()));
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ov_plugin, OVClassImportExportTestP,
|
||||
smoke_OVClassImportExportTestP, OVClassImportExportTestP,
|
||||
::testing::ValuesIn(return_all_possible_device_combination()));
|
||||
|
||||
//
|
||||
|
@ -3,10 +3,12 @@
|
||||
//
|
||||
|
||||
#include "behavior/ov_plugin/properties_tests.hpp"
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
#include "openvino/runtime/properties.hpp"
|
||||
#include "ov_api_conformance_helpers.hpp"
|
||||
|
||||
using namespace ov::test::behavior;
|
||||
using namespace ov::test::conformance;
|
||||
|
||||
namespace {
|
||||
|
||||
@ -15,34 +17,15 @@ const std::vector<ov::AnyMap> inproperties = {
|
||||
};
|
||||
|
||||
const std::vector<ov::AnyMap> auto_batch_inproperties = {
|
||||
{{ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}},
|
||||
{{ov::auto_batch_timeout(-1)}},
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(ov::test::conformance::targetDevice),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(inproperties)),
|
||||
OVPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, inproperties))),
|
||||
OVPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, inproperties))),
|
||||
OVPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, inproperties))),
|
||||
OVPropertiesIncorrectTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesIncorrectTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_BATCH),
|
||||
@ -62,28 +45,10 @@ const std::vector<ov::AnyMap> auto_batch_properties = {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin, OVPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(ov::test::conformance::targetDevice),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(default_properties)),
|
||||
OVPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Hetero, OVPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_HETERO, default_properties))),
|
||||
OVPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Multi, OVPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_MULTI, default_properties))),
|
||||
OVPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_Auto, OVPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||
::testing::ValuesIn(ov::test::conformance::generate_ov_configs(CommonTestUtils::DEVICE_AUTO, default_properties))),
|
||||
OVPropertiesTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ov_plugin_AutoBatch, OVPropertiesTests,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_BATCH),
|
||||
|
@ -24,8 +24,16 @@ namespace {
|
||||
#else
|
||||
auto defaultBindThreadParameter = InferenceEngine::Parameter{std::string{CONFIG_VALUE(YES)}};
|
||||
#endif
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ie_plugin,
|
||||
DefaultConfigurationTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::Values(DefaultParameter{CONFIG_KEY(PERF_COUNT), CONFIG_VALUE(YES)})),
|
||||
DefaultConfigurationTest::getTestCaseName);
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> pluginConfigs = {
|
||||
{},
|
||||
{{}},
|
||||
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::THROUGHPUT}},
|
||||
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY}},
|
||||
{{InferenceEngine::PluginConfigParams::KEY_PERFORMANCE_HINT, InferenceEngine::PluginConfigParams::LATENCY},
|
||||
@ -161,7 +169,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin_Hetero, CorrectConfigTests,
|
||||
{{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT, "10"}}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, CorrectConfigCheck,
|
||||
INSTANTIATE_TEST_SUITE_P(ie_plugin, CorrectConfigCheck,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(pluginConfigsCheck)),
|
||||
|
@ -16,7 +16,7 @@ namespace {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ie_plugin, IEClassBasicTestP,
|
||||
::testing::Values(std::make_pair(get_plugin_lib_name_by_device(ov::test::conformance::targetDevice), ov::test::conformance::targetDevice)));
|
||||
::testing::ValuesIn(generate_pairs_plugin_name_by_device()));
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ie_plugin, IEClassNetworkTestP,
|
||||
|
@ -16,7 +16,10 @@ const Params coreThreadingParams[] = {
|
||||
std::tuple<Device, Config>{ CommonTestUtils::DEVICE_BATCH, generate_configs(CommonTestUtils::DEVICE_BATCH).front() },
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests, testing::ValuesIn(coreThreadingParams), CoreThreadingTests::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(ie_plugin_, CoreThreadingTests,
|
||||
testing::ValuesIn(coreThreadingParams),
|
||||
CoreThreadingTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ie_plugin, CoreThreadingTests,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
|
@ -19,7 +19,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisionsPreprocess),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestPreprocessTest::getTestCaseName);
|
||||
|
||||
const std::vector<InferenceEngine::Precision> ioPrecisionsPreprocess = {
|
||||
@ -47,7 +47,7 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessConversionTest,
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestPreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTest,
|
||||
@ -61,6 +61,6 @@ INSTANTIATE_TEST_SUITE_P(ie_plugin, InferRequestPreprocessDynamicallyInSetBlobTe
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::ValuesIn(return_all_possible_device_combination()),
|
||||
::testing::ValuesIn(empty_config)),
|
||||
::testing::Values(ie_config)),
|
||||
InferRequestPreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "functional_test_utils/layer_test_utils/environment.hpp"
|
||||
#include "functional_test_utils/summary/environment.hpp"
|
||||
|
||||
#include "read_ir_test/read_ir.hpp"
|
||||
#include "gflag_config.hpp"
|
||||
@ -45,11 +45,15 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
|
||||
FuncTestUtils::SkipTestsConfig::disable_tests_skipping = FLAGS_disable_test_config;
|
||||
LayerTestsUtils::Summary::setExtendReport(FLAGS_extend_report);
|
||||
LayerTestsUtils::Summary::setExtractBody(FLAGS_extract_body);
|
||||
LayerTestsUtils::Summary::setSaveReportWithUniqueName(FLAGS_report_unique_name);
|
||||
LayerTestsUtils::Summary::setOutputFolder(FLAGS_output_folder);
|
||||
LayerTestsUtils::Summary::setSaveReportTimeout(FLAGS_save_report_timeout);
|
||||
ov::test::utils::OpSummary::setExtendReport(FLAGS_extend_report);
|
||||
ov::test::utils::OpSummary::setExtractBody(FLAGS_extract_body);
|
||||
ov::test::utils::OpSummary::setSaveReportWithUniqueName(FLAGS_report_unique_name);
|
||||
ov::test::utils::OpSummary::setOutputFolder(FLAGS_output_folder);
|
||||
ov::test::utils::OpSummary::setSaveReportTimeout(FLAGS_save_report_timeout);
|
||||
{
|
||||
auto &apiSummary = ov::test::utils::ApiSummary::getInstance();
|
||||
apiSummary.setDeviceName(FLAGS_device);
|
||||
}
|
||||
if (FLAGS_shape_mode == std::string("static")) {
|
||||
ov::test::subgraph::shapeMode = ov::test::subgraph::ShapeMode::STATIC;
|
||||
} else if (FLAGS_shape_mode == std::string("dynamic")) {
|
||||
@ -75,25 +79,29 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
|
||||
::testing::InitGoogleTest(&argc, argv);
|
||||
::testing::AddGlobalTestEnvironment(new LayerTestsUtils::TestEnvironment);
|
||||
::testing::AddGlobalTestEnvironment(new ov::test::utils::TestEnvironment);
|
||||
|
||||
auto exernalSignalHandler = [](int errCode) {
|
||||
std::cerr << "Unexpected application crash with code: " << errCode << std::endl;
|
||||
|
||||
auto& op_summary = ov::test::utils::OpSummary::getInstance();
|
||||
auto& api_summary = ov::test::utils::ApiSummary::getInstance();
|
||||
op_summary.saveReport();
|
||||
api_summary.saveReport();
|
||||
|
||||
// set default handler for crash
|
||||
signal(SIGABRT, SIG_DFL);
|
||||
signal(SIGSEGV, SIG_DFL);
|
||||
signal(SIGINT, SIG_DFL);
|
||||
signal(SIGTERM, SIG_DFL);
|
||||
|
||||
if (errCode == SIGINT || errCode == SIGTERM) {
|
||||
auto& s = LayerTestsUtils::Summary::getInstance();
|
||||
s.saveReport();
|
||||
exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// killed by extarnal
|
||||
// killed by external
|
||||
signal(SIGINT, exernalSignalHandler);
|
||||
signal(SIGTERM , exernalSignalHandler);
|
||||
|
||||
signal(SIGSEGV, exernalSignalHandler);
|
||||
signal(SIGABRT, exernalSignalHandler);
|
||||
return RUN_ALL_TESTS();
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
#include "common_test_utils/data_utils.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "functional_test_utils/layer_test_utils/op_info.hpp"
|
||||
#include "functional_test_utils/summary/op_info.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
#include "read_ir_test/read_ir.hpp"
|
||||
@ -56,7 +56,7 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo<ReadIRParam
|
||||
void ReadIRTest::query_model() {
|
||||
// in case of crash jump will be made and work will be continued
|
||||
auto crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
auto &s = LayerTestsUtils::Summary::getInstance();
|
||||
auto &s = ov::test::utils::OpSummary::getInstance();
|
||||
|
||||
// place to jump in case of a crash
|
||||
int jmpRes = 0;
|
||||
@ -74,21 +74,21 @@ void ReadIRTest::query_model() {
|
||||
s.setDeviceName(targetDevice);
|
||||
|
||||
if (FuncTestUtils::SkipTestsConfig::currentTestIsDisabled()) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::SKIPPED);
|
||||
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::SKIPPED);
|
||||
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
|
||||
} else {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::CRASHED);
|
||||
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::CRASHED);
|
||||
}
|
||||
try {
|
||||
SubgraphBaseTest::query_model();
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::PASSED);
|
||||
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED);
|
||||
} catch (...) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::FAILED);
|
||||
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED);
|
||||
}
|
||||
} else if (jmpRes == CommonTestUtils::JMP_STATUS::anyError) {
|
||||
IE_THROW() << "Crash happens";
|
||||
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
|
||||
s.updateOPsStats(functionRefs, LayerTestsUtils::PassRate::Statuses::HANGED);
|
||||
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::HANGED);
|
||||
IE_THROW() << "Crash happens";
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) {
|
||||
InferenceEngine::CNNNetwork cnnNet(fnPtr);
|
||||
|
||||
auto ie = PluginCache::get().ie();
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
|
||||
InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo();
|
||||
|
||||
|
@ -17,7 +17,7 @@ namespace {
|
||||
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
smoke_OVClassImportExportTestP, OVClassImportExportTestP,
|
||||
smoke_OVClassImportExportTestP, OVClassExecutableNetworkImportExportTestP,
|
||||
::testing::Values("HETERO:CPU"));
|
||||
|
||||
//
|
||||
|
@ -78,7 +78,7 @@ TEST_P(IEClassExecutableNetworkSetConfigFromFp32Test, SetConfigFromFp32Throws) {
|
||||
|
||||
std::map<std::string, std::string> initialConfig;
|
||||
initialConfig[GNA_CONFIG_KEY(DEVICE_MODE)] = InferenceEngine::GNAConfigParams::GNA_SW_FP32;
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName, initialConfig);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device, initialConfig);
|
||||
|
||||
ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception);
|
||||
}
|
||||
|
@ -18,7 +18,7 @@
|
||||
using namespace ov::test::behavior;
|
||||
|
||||
namespace {
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
// IE Class Common tests with <pluginName, target_device params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(nightly_OVClassCommon,
|
||||
@ -110,7 +110,7 @@ TEST_P(OVClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow)
|
||||
ov::Core ie;
|
||||
ov::Any p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
|
||||
uint64_t t = p;
|
||||
|
||||
std::cout << "GPU device total memory size: " << t << std::endl;
|
||||
@ -127,7 +127,7 @@ TEST_P(OVClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
ov::Any p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(UARCH_VERSION)));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(UARCH_VERSION)));
|
||||
std::string t = p;
|
||||
|
||||
std::cout << "GPU device uarch: " << t << std::endl;
|
||||
@ -143,7 +143,7 @@ TEST_P(OVClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow)
|
||||
ov::Core ie;
|
||||
ov::Any p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
|
||||
int t = p;
|
||||
|
||||
std::cout << "GPU EUs count: " << t << std::endl;
|
||||
@ -160,7 +160,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricAvailableDevicesAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::vector<std::string> properties;
|
||||
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::available_devices));
|
||||
ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::available_devices));
|
||||
|
||||
std::cout << "AVAILABLE_DEVICES: ";
|
||||
for (const auto& prop : properties) {
|
||||
@ -175,7 +175,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForAsyncInferRequestsAndPrintNo
|
||||
ov::Core ie;
|
||||
|
||||
std::tuple<unsigned int, unsigned int, unsigned int> property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_async_infer_requests));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_async_infer_requests));
|
||||
|
||||
std::cout << "RANGE_FOR_ASYNC_INFER_REQUESTS: " << std::get<0>(property) << " " <<
|
||||
std::get<1>(property) << " " <<
|
||||
@ -188,7 +188,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricRangeForStreamsAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::tuple<unsigned int, unsigned int> property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::range_for_streams));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::range_for_streams));
|
||||
|
||||
std::cout << "RANGE_FOR_STREAMS: " << std::get<0>(property) << " " <<
|
||||
std::get<1>(property) << std::endl;
|
||||
@ -200,7 +200,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricOptimalBatchSizeAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
unsigned int property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::optimal_batch_size));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::optimal_batch_size));
|
||||
|
||||
std::cout << "OPTIMAL_BATCH_SIZE: " << property << std::endl;
|
||||
|
||||
@ -211,7 +211,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricFullNameAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::string property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::full_name));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::full_name));
|
||||
|
||||
std::cout << "FULL_DEVICE_NAME: " << property << std::endl;
|
||||
|
||||
@ -222,7 +222,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricTypeAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
ov::device::Type property = ov::device::Type::INTEGRATED;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::device::type));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::device::type));
|
||||
|
||||
std::cout << "DEVICE_TYPE: " << property << std::endl;
|
||||
|
||||
@ -233,7 +233,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricGopsAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::map<ov::element::Type, float> properties;
|
||||
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::gops));
|
||||
ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::gops));
|
||||
|
||||
std::cout << "DEVICE_GOPS: " << std::endl;
|
||||
for (const auto& prop : properties) {
|
||||
@ -247,7 +247,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricCapabilitiesAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::vector<std::string> properties;
|
||||
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::device::capabilities));
|
||||
ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::device::capabilities));
|
||||
|
||||
std::cout << "OPTIMIZATION_CAPABILITIES: " << std::endl;
|
||||
for (const auto& prop : properties) {
|
||||
@ -261,7 +261,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricDeviceTotalMemSizeAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
uint64_t property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::device_total_mem_size));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::device_total_mem_size));
|
||||
|
||||
std::cout << "GPU_DEVICE_TOTAL_MEM_SIZE: " << property << std::endl;
|
||||
|
||||
@ -272,7 +272,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricUarchVersionAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::string property;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::uarch_version));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::uarch_version));
|
||||
|
||||
std::cout << "GPU_UARCH_VERSION: " << property << std::endl;
|
||||
|
||||
@ -283,7 +283,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricExecutionUnitsCountAndPrintNoThrow)
|
||||
ov::Core ie;
|
||||
|
||||
int32_t property = 0;
|
||||
ASSERT_NO_THROW(property = ie.get_property(deviceName, ov::intel_gpu::execution_units_count));
|
||||
ASSERT_NO_THROW(property = ie.get_property(target_device, ov::intel_gpu::execution_units_count));
|
||||
|
||||
std::cout << "GPU_EXECUTION_UNITS_COUNT: " << property << std::endl;
|
||||
|
||||
@ -294,7 +294,7 @@ TEST_P(OVClassGetPropertyTest_GPU, GetMetricMemoryStatisticsAndPrintNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
std::map<std::string, uint64_t> properties;
|
||||
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
std::cout << "GPU_MEMORY_STATISTICS: " << std::endl;
|
||||
for (const auto& prop : properties) {
|
||||
@ -308,16 +308,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetPerformanceModeNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
ov::hint::PerformanceMode defaultMode{};
|
||||
ASSERT_NO_THROW(defaultMode = ie.get_property(deviceName, ov::hint::performance_mode));
|
||||
ASSERT_NO_THROW(defaultMode = ie.get_property(target_device, ov::hint::performance_mode));
|
||||
|
||||
std::cout << "Default PERFORMANCE_HINT: \"" << defaultMode << "\"" << std::endl;
|
||||
|
||||
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(deviceName, ov::hint::performance_mode));
|
||||
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(deviceName, ov::hint::performance_mode));
|
||||
ie.set_property(deviceName, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(deviceName, ov::hint::performance_mode));
|
||||
ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::UNDEFINED));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::UNDEFINED, ie.get_property(target_device, ov::hint::performance_mode));
|
||||
ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::LATENCY, ie.get_property(target_device, ov::hint::performance_mode));
|
||||
ie.set_property(target_device, ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT));
|
||||
ASSERT_EQ(ov::hint::PerformanceMode::THROUGHPUT, ie.get_property(target_device, ov::hint::performance_mode));
|
||||
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::performance_mode);
|
||||
}
|
||||
@ -326,12 +326,12 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetEnableProfilingNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
bool defaultValue = false;
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::enable_profiling));
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::enable_profiling));
|
||||
|
||||
std::cout << "Default PERF_COUNT: " << defaultValue << std::endl;
|
||||
|
||||
ie.set_property(deviceName, ov::enable_profiling(true));
|
||||
ASSERT_EQ(true, ie.get_property(deviceName, ov::enable_profiling));
|
||||
ie.set_property(target_device, ov::enable_profiling(true));
|
||||
ASSERT_EQ(true, ie.get_property(target_device, ov::enable_profiling));
|
||||
|
||||
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::enable_profiling);
|
||||
@ -356,19 +356,19 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetModelPriorityNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
ov::hint::Priority defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::hint::model_priority));
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::hint::model_priority));
|
||||
|
||||
std::cout << "Default PERF_COUNT: " << defaultValue << std::endl;
|
||||
|
||||
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::HIGH));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::LOW));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(deviceName, ov::hint::model_priority(ov::hint::Priority::MEDIUM));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::HIGH));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::LOW));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::hint::model_priority(ov::hint::Priority::MEDIUM));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::hint::model_priority));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::hint::model_priority);
|
||||
}
|
||||
@ -377,16 +377,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetQueuePriorityNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
ov::hint::Priority defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
|
||||
std::cout << "Default GPU_QUEUE_PRIORITY: " << defaultValue << std::endl;
|
||||
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::HIGH));
|
||||
ASSERT_EQ(ov::hint::Priority::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::LOW));
|
||||
ASSERT_EQ(ov::hint::Priority::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_priority(ov::hint::Priority::MEDIUM));
|
||||
ASSERT_EQ(ov::hint::Priority::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_priority));
|
||||
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_priority);
|
||||
}
|
||||
@ -395,16 +395,16 @@ TEST_P(OVClassGetPropertyTest_GPU, GetAndSetThrottleLevelNoThrow) {
|
||||
ov::Core ie;
|
||||
|
||||
ov::intel_gpu::hint::ThrottleLevel defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle));
|
||||
ASSERT_NO_THROW(defaultValue = ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
|
||||
|
||||
std::cout << "Default GPU_QUEUE_THROTTLE: " << defaultValue << std::endl;
|
||||
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle));
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle));
|
||||
ie.set_property(deviceName, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(deviceName, ov::intel_gpu::hint::queue_throttle));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::HIGH));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::HIGH, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::LOW));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::LOW, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
|
||||
ie.set_property(target_device, ov::intel_gpu::hint::queue_throttle(ov::intel_gpu::hint::ThrottleLevel::MEDIUM));
|
||||
ASSERT_EQ(ov::intel_gpu::hint::ThrottleLevel::MEDIUM, ie.get_property(target_device, ov::intel_gpu::hint::queue_throttle));
|
||||
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::intel_gpu::hint::queue_throttle);
|
||||
}
|
||||
@ -413,20 +413,20 @@ TEST_P(OVClassGetPropertyTest_GPU, CanSetDefaultValueBackToPluginNewAPI) {
|
||||
ov::Core ie;
|
||||
|
||||
std::vector<ov::PropertyName> properties;
|
||||
ASSERT_NO_THROW(properties = ie.get_property(deviceName, ov::supported_properties));
|
||||
ASSERT_NO_THROW(properties = ie.get_property(target_device, ov::supported_properties));
|
||||
|
||||
std::cout << "SUPPORTED_PROPERTIES:" << std::endl;
|
||||
for (const auto& property : properties) {
|
||||
ov::Any prop;
|
||||
if (property.is_mutable()) {
|
||||
std::cout << "RW: " << property << " ";
|
||||
ASSERT_NO_THROW(prop = ie.get_property(deviceName, property));
|
||||
ASSERT_NO_THROW(prop = ie.get_property(target_device, property));
|
||||
prop.print(std::cout);
|
||||
std::cout << std::endl;
|
||||
ASSERT_NO_THROW(ie.set_property(deviceName, {{property, prop}}));
|
||||
ASSERT_NO_THROW(ie.set_property(target_device, {{property, prop}}));
|
||||
} else {
|
||||
std::cout << "RO: " << property << " ";
|
||||
ASSERT_NO_THROW(prop = ie.get_property(deviceName, property));
|
||||
ASSERT_NO_THROW(prop = ie.get_property(target_device, property));
|
||||
prop.print(std::cout);
|
||||
std::cout << std::endl;
|
||||
}
|
||||
@ -446,7 +446,7 @@ TEST_P(OVClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) {
|
||||
unsigned int p;
|
||||
|
||||
ov::AnyMap _options = {ov::hint::model(simpleNetwork)};
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::optimal_batch_size.name(), _options));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, ov::optimal_batch_size.name(), _options));
|
||||
|
||||
std::cout << "GPU device optimal batch size: " << p << std::endl;
|
||||
|
||||
@ -465,7 +465,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow
|
||||
unsigned int p;
|
||||
|
||||
ov::AnyMap _options = {ov::hint::model(simpleNetwork)};
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options));
|
||||
|
||||
std::cout << "GPU device max available batch size: " << p << std::endl;
|
||||
|
||||
@ -482,7 +482,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
ov::Core ie;
|
||||
unsigned int p;
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
uint32_t n_streams = 2;
|
||||
int64_t available_device_mem_size = 1073741824;
|
||||
@ -490,7 +490,7 @@ TEST_P(OVClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
|
||||
ov::num_streams(n_streams),
|
||||
ov::intel_gpu::hint::available_device_mem(available_device_mem_size)};
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::max_batch_size.name(), _options));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, ov::max_batch_size.name(), _options));
|
||||
|
||||
std::cout << "GPU device max available batch size: " << p << std::endl;
|
||||
|
||||
@ -508,9 +508,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh
|
||||
ov::Core ie;
|
||||
std::map<std::string, uint64_t> p;
|
||||
|
||||
auto exec_net = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(p = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(p.empty());
|
||||
std::cout << "Memory Statistics: " << std::endl;
|
||||
@ -534,18 +534,18 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
|
||||
std::map<std::string, uint64_t> t1;
|
||||
std::map<std::string, uint64_t> t2;
|
||||
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t1.empty());
|
||||
for (auto &&kv : t1) {
|
||||
ASSERT_NE(kv.second, 0);
|
||||
}
|
||||
|
||||
auto exec_net2 = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net2 = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
for (auto &&kv : t2) {
|
||||
@ -570,24 +570,24 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
ov::Core ie;
|
||||
std::map<std::string, uint64_t> t1;
|
||||
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_TRUE(t1.empty());
|
||||
|
||||
{
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::map<std::string, uint64_t> t2;
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
for (auto &&kv : t2) {
|
||||
ASSERT_NE(kv.second, 0);
|
||||
}
|
||||
{
|
||||
auto exec_net2 = ie.compile_model(actualNetwork, deviceName);
|
||||
auto exec_net2 = ie.compile_model(actualNetwork, target_device);
|
||||
|
||||
std::map<std::string, uint64_t> t3;
|
||||
ASSERT_NO_THROW(t3 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t3 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t3.empty());
|
||||
for (auto &&kv : t3) {
|
||||
@ -595,7 +595,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
}
|
||||
}
|
||||
std::map<std::string, uint64_t> t4;
|
||||
ASSERT_NO_THROW(t4 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t4 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t4.empty());
|
||||
for (auto &&kv : t4) {
|
||||
@ -609,7 +609,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
}
|
||||
}
|
||||
std::map<std::string, uint64_t> t5;
|
||||
ASSERT_NO_THROW(t5 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t5 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t5.empty());
|
||||
for (auto &&kv : t5) {
|
||||
@ -641,9 +641,9 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
networks.emplace_back(simpleNetwork);
|
||||
networks.emplace_back(simpleNetwork);
|
||||
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto exec_net1 = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t1 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t1.empty());
|
||||
for (auto &&kv : t1) {
|
||||
@ -653,7 +653,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
for (auto & thread : threads) {
|
||||
thread = std::thread([&](){
|
||||
auto value = counter++;
|
||||
exec_net_map[value] = ie.compile_model(networks[value], deviceName);
|
||||
exec_net_map[value] = ie.compile_model(networks[value], target_device);
|
||||
});
|
||||
}
|
||||
|
||||
@ -663,7 +663,7 @@ TEST_P(OVClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(deviceName, ov::intel_gpu::memory_statistics));
|
||||
ASSERT_NO_THROW(t2 = ie.get_property(target_device, ov::intel_gpu::memory_statistics));
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
for (auto &&kv : t2) {
|
||||
|
@ -16,7 +16,7 @@
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
// IE Class Common tests with <pluginName, target_device params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
@ -102,7 +102,7 @@ TEST_P(IEClassGetMetricTest_GPU_DEVICE_TOTAL_MEM_SIZE, GetMetricAndPrintNoThrow)
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(DEVICE_TOTAL_MEM_SIZE)));
|
||||
uint64_t t = p;
|
||||
|
||||
std::cout << "GPU device total memory size: " << t << std::endl;
|
||||
@ -122,7 +122,7 @@ TEST_P(IEClassGetMetricTest_GPU_OPTIMAL_BATCH_SIZE, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}};
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as<unsigned int>());
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMAL_BATCH_SIZE), _options).as<unsigned int>());
|
||||
unsigned int t = p;
|
||||
|
||||
std::cout << "GPU device optimal batch size: " << t << std::endl;
|
||||
@ -142,7 +142,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_DEFAULT, GetMetricAndPrintNoThrow
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
std::map<std::string, InferenceEngine::Parameter> _options = {{"MODEL_PTR", simpleCnnNetwork.getFunction()}};
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
|
||||
uint32_t t = p;
|
||||
|
||||
std::cout << "GPU device max available batch size: " << t << std::endl;
|
||||
@ -166,7 +166,7 @@ TEST_P(IEClassGetMetricTest_GPU_MAX_BATCH_SIZE_STREAM_DEVICE_MEM, GetMetricAndPr
|
||||
_options.insert(std::make_pair("GPU_THROUGHPUT_STREAMS", n_streams));
|
||||
_options.insert(std::make_pair("AVAILABLE_DEVICE_MEM_SIZE", available_device_mem_size));
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(MAX_BATCH_SIZE), _options).as<uint32_t>());
|
||||
|
||||
uint32_t t = p;
|
||||
|
||||
@ -186,7 +186,7 @@ TEST_P(IEClassGetMetricTest_GPU_UARCH_VERSION, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(UARCH_VERSION)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(UARCH_VERSION)));
|
||||
std::string t = p;
|
||||
|
||||
std::cout << "GPU device uarch: " << t << std::endl;
|
||||
@ -205,7 +205,7 @@ TEST_P(IEClassGetMetricTest_GPU_EXECUTION_UNITS_COUNT, GetMetricAndPrintNoThrow)
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(EXECUTION_UNITS_COUNT)));
|
||||
int t = p;
|
||||
|
||||
std::cout << "GPU EUs count: " << t << std::endl;
|
||||
@ -224,9 +224,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_DEFAULT, GetMetricAndPrintNoTh
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t = p;
|
||||
|
||||
ASSERT_FALSE(t.empty());
|
||||
@ -250,9 +250,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t1 = p;
|
||||
|
||||
ASSERT_FALSE(t1.empty());
|
||||
@ -260,9 +260,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTIPLE_NETWORKS, GetMetricAn
|
||||
ASSERT_NE(kv.second, 0);
|
||||
}
|
||||
|
||||
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t2 = p;
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
@ -288,14 +288,14 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t1 = p;
|
||||
ASSERT_TRUE(t1.empty());
|
||||
|
||||
{
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t2 = p;
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
@ -303,9 +303,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
ASSERT_NE(kv.second, 0);
|
||||
}
|
||||
{
|
||||
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net2 = ie.LoadNetwork(actualCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t3 = p;
|
||||
|
||||
ASSERT_FALSE(t3.empty());
|
||||
@ -313,7 +313,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
ASSERT_NE(kv.second, 0);
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t4 = p;
|
||||
|
||||
ASSERT_FALSE(t4.empty());
|
||||
@ -327,7 +327,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_CHECK_VALUES, GetMetricAndPrin
|
||||
}
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t5 = p;
|
||||
|
||||
ASSERT_FALSE(t5.empty());
|
||||
@ -358,9 +358,9 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
networks.emplace_back(simpleCnnNetwork);
|
||||
networks.emplace_back(simpleCnnNetwork);
|
||||
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exec_net1 = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t1 = p;
|
||||
|
||||
ASSERT_FALSE(t1.empty());
|
||||
@ -371,7 +371,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
for (auto & thread : threads) {
|
||||
thread = std::thread([&](){
|
||||
auto value = counter++;
|
||||
exec_net_map[value] = ie.LoadNetwork(networks[value], deviceName);
|
||||
exec_net_map[value] = ie.LoadNetwork(networks[value], target_device);
|
||||
});
|
||||
}
|
||||
|
||||
@ -381,7 +381,7 @@ TEST_P(IEClassGetMetricTest_GPU_MEMORY_STATISTICS_MULTI_THREADS, GetMetricAndPri
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, GPU_METRIC_KEY(MEMORY_STATISTICS)));
|
||||
std::map<std::string, uint64_t> t2 = p;
|
||||
|
||||
ASSERT_FALSE(t2.empty());
|
||||
|
@ -31,7 +31,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_RemoteContext) {
|
||||
networks.emplace_back(InferenceEngine::CNNNetwork(ngraph::builder::subgraph::makeSplitMultiConvConcat()));
|
||||
|
||||
auto ocl_instance = std::make_shared<OpenCL>();
|
||||
ie.SetConfig(config, deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
auto remote_context = make_shared_context(ie, CommonTestUtils::DEVICE_GPU, ocl_instance->_context.get());
|
||||
|
@ -95,5 +95,6 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(.*smoke_VirtualPlugin_BehaviorTests.*LoadedRemoteContext.*)",
|
||||
// Issue: CVS-88667 - Need to verify hetero interoperability
|
||||
R"(.*nightly_OVClassHeteroExecutableNetworlGetMetricTest.*SUPPORTED_(CONFIG_KEYS|METRICS).*)",
|
||||
R"(.*VirtualPlugin.*BehaviorTests.*OVHoldersTest.*LoadedTensor.*target_device=AUTO.*)",
|
||||
};
|
||||
}
|
||||
|
@ -20,14 +20,14 @@ std::pair<std::string, std::string> plugins[] = {
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP,
|
||||
OVClassImportExportTestP,
|
||||
OVClassExecutableNetworkImportExportTestP,
|
||||
::testing::Values(std::string(CommonTestUtils::DEVICE_MYRIAD),
|
||||
"HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD)));
|
||||
|
||||
#if defined(ENABLE_INTEL_CPU) && ENABLE_INTEL_CPU
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_OVClassImportExportTestP_HETERO_CPU,
|
||||
OVClassImportExportTestP,
|
||||
OVClassExecutableNetworkImportExportTestP,
|
||||
::testing::Values("HETERO:" + std::string(CommonTestUtils::DEVICE_MYRIAD) + ",CPU"));
|
||||
#endif
|
||||
|
||||
|
@ -22,7 +22,7 @@ std::pair<std::string, std::string> plugins[] = {
|
||||
};
|
||||
|
||||
//
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
// IE Class Common tests with <pluginName, target_device params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(OVClassBasicTestP_smoke, OVClassBasicTestP, ::testing::ValuesIn(plugins));
|
||||
@ -39,7 +39,7 @@ TEST_P(OVClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF
|
||||
ov::Core ie;
|
||||
OV_ASSERT_PROPERTY_SUPPORTED(ov::device::capabilities)
|
||||
std::vector<std::string> device_capabilities;
|
||||
ASSERT_NO_THROW(device_capabilities = ie.get_property(deviceName, ov::device::capabilities));
|
||||
ASSERT_NO_THROW(device_capabilities = ie.get_property(target_device, ov::device::capabilities));
|
||||
ASSERT_EQ(device_capabilities.size(), 2);
|
||||
ASSERT_NE(std::find(device_capabilities.begin(), device_capabilities.end(), ov::device::capability::EXPORT_IMPORT),
|
||||
device_capabilities.end());
|
||||
|
@ -21,7 +21,7 @@ std::pair<std::string, std::string> plugins[] = {
|
||||
};
|
||||
|
||||
//
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
// IE Class Common tests with <pluginName, target_device params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
@ -43,7 +43,7 @@ TEST_P(IEClassNetworkTestP_VPU_GetMetric, smoke_OptimizationCapabilitiesReturnsF
|
||||
ASSERT_METRIC_SUPPORTED_IE(METRIC_KEY(OPTIMIZATION_CAPABILITIES))
|
||||
|
||||
InferenceEngine::Parameter optimizationCapabilitiesParameter;
|
||||
ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
|
||||
ASSERT_NO_THROW(optimizationCapabilitiesParameter = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
|
||||
|
||||
const auto optimizationCapabilities = optimizationCapabilitiesParameter.as<std::vector<std::string>>();
|
||||
ASSERT_EQ(optimizationCapabilities.size(), 2);
|
||||
|
@ -9,51 +9,31 @@
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
#include "functional_test_utils/summary/api_summary.hpp"
|
||||
|
||||
namespace BehaviorTestsUtils {
|
||||
|
||||
using namespace CommonTestUtils;
|
||||
|
||||
typedef std::tuple<
|
||||
InferenceEngine::Precision, // Network precision
|
||||
std::string, // Device name
|
||||
std::map<std::string, std::string> // Config
|
||||
> BehaviorBasicParams;
|
||||
class IEInferRequestTestBase : public ov::test::behavior::APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ie_infer_request;
|
||||
};
|
||||
};
|
||||
|
||||
class BehaviorTestsBasic : public testing::WithParamInterface<BehaviorBasicParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<BehaviorBasicParams> obj) {
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(netPrecision, targetDevice, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "netPRC=" << netPrecision.name() << "_";
|
||||
result << "targetDevice=" << targetDevice;
|
||||
if (!configuration.empty()) {
|
||||
result << "config=" << configuration;
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
class IEExecutableNetworkTestBase : public ov::test::behavior::APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ie_executable_network;
|
||||
};
|
||||
};
|
||||
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(netPrecision, targetDevice, configuration) = this->GetParam();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
class IEPluginTestBase : public ov::test::behavior::APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ie_plugin;
|
||||
};
|
||||
};
|
||||
|
||||
typedef std::tuple<
|
||||
@ -62,13 +42,14 @@ typedef std::tuple<
|
||||
> InferRequestParams;
|
||||
|
||||
class InferRequestTests : public testing::WithParamInterface<InferRequestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public IEInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
if (!configuration.empty()) {
|
||||
for (auto &configItem : configuration) {
|
||||
@ -79,19 +60,21 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
APIBaseTest::SetUp();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -99,8 +82,7 @@ protected:
|
||||
InferenceEngine::ExecutableNetwork execNet;
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::map<std::string, std::string> configuration;;
|
||||
};
|
||||
|
||||
inline InferenceEngine::Core createIECoreWithTemplate() {
|
||||
@ -118,7 +100,7 @@ class IEClassNetworkTest : public ov::test::behavior::OVClassNetworkTest {
|
||||
public:
|
||||
InferenceEngine::CNNNetwork actualCnnNetwork, simpleCnnNetwork, multinputCnnNetwork, ksoCnnNetwork;
|
||||
|
||||
void SetUp() override {
|
||||
void SetUp() {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
OVClassNetworkTest::SetUp();
|
||||
// Generic network
|
||||
@ -132,13 +114,73 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class IEClassBaseTestP : public IEClassNetworkTest, public ::testing::WithParamInterface<std::string> {
|
||||
class IEClassBaseTestP : public IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string>,
|
||||
public IEPluginTestBase {
|
||||
public:
|
||||
std::string deviceName;
|
||||
void SetUp() override {
|
||||
target_device = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
IEClassNetworkTest::SetUp();
|
||||
deviceName = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
class IEExecNetClassBaseTestP : public IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string>,
|
||||
public IEExecutableNetworkTestBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
target_device = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
IEClassNetworkTest::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::tuple<
|
||||
InferenceEngine::Precision, // Network precision
|
||||
std::string, // Device name
|
||||
std::map<std::string, std::string> // Config
|
||||
> BehaviorBasicParams;
|
||||
|
||||
class BehaviorTestsBasicBase : public testing::WithParamInterface<BehaviorBasicParams> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<BehaviorBasicParams> obj) {
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(netPrecision, targetDevice, configuration) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
|
||||
std::ostringstream result;
|
||||
result << "netPRC=" << netPrecision.name() << "_";
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
if (!configuration.empty()) {
|
||||
result << "config=" << configuration;
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
class BehaviorTestsBasic : public BehaviorTestsBasicBase,
|
||||
public IEPluginTestBase {
|
||||
protected:
|
||||
void SetUp() override {
|
||||
std::tie(netPrecision, target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
}
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
} // namespace BehaviorTestsUtils
|
||||
|
@ -4,6 +4,13 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <signal.h>
|
||||
#include <setjmp.h>
|
||||
|
||||
#ifdef _WIN32
|
||||
#include <process.h>
|
||||
#endif
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "ngraph_functions/subgraph_builders.hpp"
|
||||
@ -11,13 +18,15 @@
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "functional_test_utils/ov_plugin_cache.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "functional_test_utils/blob_utils.hpp"
|
||||
#include "functional_test_utils/summary/api_summary.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
@ -33,18 +42,87 @@ inline std::shared_ptr<ngraph::Function> getDefaultNGraphFunctionForTheDevice(st
|
||||
return ngraph::builder::subgraph::makeConvPoolRelu(inputShape, ngPrc);
|
||||
}
|
||||
|
||||
class APIBaseTest : public CommonTestUtils::TestsCommon {
|
||||
private:
|
||||
// place to jump in case of a crash
|
||||
int jmpRes = 0;
|
||||
// in case of crash jump will be made and work will be continued
|
||||
const std::unique_ptr<CommonTestUtils::CrashHandler> crashHandler = std::unique_ptr<CommonTestUtils::CrashHandler>(new CommonTestUtils::CrashHandler());
|
||||
|
||||
protected:
|
||||
std::string target_device = "";
|
||||
ov::test::utils::ov_entity api_entity = ov::test::utils::ov_entity::undefined;
|
||||
ov::test::utils::ApiSummary& api_summary = ov::test::utils::ApiSummary::getInstance();
|
||||
|
||||
public:
|
||||
APIBaseTest() = default;
|
||||
|
||||
virtual void set_api_entity() { api_entity = ov::test::utils::ov_entity::undefined; }
|
||||
|
||||
void SetUp() override {
|
||||
set_api_entity();
|
||||
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::CRASHED);
|
||||
#ifdef _WIN32
|
||||
jmpRes = setjmp(CommonTestUtils::env);
|
||||
#else
|
||||
jmpRes = sigsetjmp(CommonTestUtils::env, 0);
|
||||
#endif
|
||||
if (jmpRes == CommonTestUtils::JMP_STATUS::ok) {
|
||||
crashHandler->StartTimer();
|
||||
} else if (jmpRes == CommonTestUtils::JMP_STATUS::alarmErr) {
|
||||
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::HANGED);
|
||||
GTEST_FAIL();
|
||||
}
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (api_entity == ov::test::utils::ov_entity::undefined) {
|
||||
set_api_entity();
|
||||
}
|
||||
if (this->HasFailure()) {
|
||||
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::FAILED);
|
||||
} else if (this->IsSkipped()) {
|
||||
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::SKIPPED);
|
||||
} else {
|
||||
api_summary.updateStat(api_entity, target_device, ov::test::utils::PassRate::Statuses::PASSED);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class OVInferRequestTestBase : public APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ov_infer_request;
|
||||
};
|
||||
};
|
||||
|
||||
class OVCompiledNetworkTestBase : public APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ov_compiled_model;
|
||||
};
|
||||
};
|
||||
|
||||
class OVPluginTestBase : public APIBaseTest {
|
||||
private:
|
||||
void set_api_entity() override {
|
||||
api_entity = ov::test::utils::ov_entity::ov_plugin;
|
||||
};
|
||||
};
|
||||
|
||||
typedef std::tuple<
|
||||
std::string, // Device name
|
||||
ov::AnyMap // Config
|
||||
> InferRequestParams;
|
||||
|
||||
class OVInferRequestTests : public testing::WithParamInterface<InferRequestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
if (!configuration.empty()) {
|
||||
@ -58,21 +136,23 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
APIBaseTest::SetUp();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
ov::AnyMap params;
|
||||
for (auto&& v : configuration) {
|
||||
params.emplace(v.first, v.second);
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, params);
|
||||
execNet = core->compile_model(function, target_device, params);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
utils::PluginCache::get().reset();
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -95,11 +175,11 @@ inline ov::Core createCoreWithTemplate() {
|
||||
return core;
|
||||
}
|
||||
|
||||
class OVClassNetworkTest : public ::testing::Test {
|
||||
class OVClassNetworkTest {
|
||||
public:
|
||||
std::shared_ptr<ngraph::Function> actualNetwork, simpleNetwork, multinputNetwork, ksoNetwork;
|
||||
|
||||
void SetUp() override {
|
||||
void SetUp() {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
// Generic network
|
||||
actualNetwork = ngraph::builder::subgraph::makeSplitConvConcat();
|
||||
@ -129,18 +209,33 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class OVClassBaseTestP : public OVClassNetworkTest, public ::testing::WithParamInterface<std::string> {
|
||||
class OVClassBaseTestP : public OVClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string>,
|
||||
public OVPluginTestBase {
|
||||
public:
|
||||
std::string deviceName;
|
||||
|
||||
void SetUp() override {
|
||||
target_device = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
// TODO: Remove it after fixing issue 69529
|
||||
// w/a for myriad (cann't store 2 caches simultaneously)
|
||||
PluginCache::get().reset();
|
||||
OVClassNetworkTest::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
class OVCompiledModelClassBaseTestP : public OVClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string>,
|
||||
public OVCompiledNetworkTestBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
target_device = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
// TODO: Remove it after fixing issue 69529
|
||||
// w/a for myriad (cann't store 2 caches simultaneously)
|
||||
PluginCache::get().reset();
|
||||
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
OVClassNetworkTest::SetUp();
|
||||
deviceName = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
@ -148,16 +243,17 @@ using PriorityParams = std::tuple<
|
||||
std::string, // Device name
|
||||
ov::AnyMap // device priority Configuration key
|
||||
>;
|
||||
class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::Test, public ::testing::WithParamInterface<PriorityParams> {
|
||||
class OVClassExecutableNetworkGetMetricTest_Priority : public ::testing::WithParamInterface<PriorityParams>,
|
||||
public OVCompiledNetworkTestBase {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
ov::AnyMap configuration;
|
||||
std::shared_ptr<ngraph::Function> simpleNetwork;
|
||||
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(deviceName, configuration) = GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
simpleNetwork = ngraph::builder::subgraph::makeSingleConv();
|
||||
}
|
||||
};
|
||||
|
@ -18,18 +18,17 @@
|
||||
namespace ExecutionGraphTests {
|
||||
|
||||
class ExecGraphUniqueNodeNames : public testing::WithParamInterface<LayerTestsUtils::basicParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEExecutableNetworkTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<LayerTestsUtils::basicParams> obj);
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
|
||||
protected:
|
||||
std::string targetDevice;
|
||||
std::shared_ptr<ngraph::Function> fnPtr;
|
||||
};
|
||||
|
||||
class ExecGraphSerializationTest : public CommonTestUtils::TestsCommon, public testing::WithParamInterface<std::string> {
|
||||
class ExecGraphSerializationTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
|
||||
public testing::WithParamInterface<std::string> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
|
||||
void SetUp() override;
|
||||
@ -56,6 +55,6 @@ protected:
|
||||
std::pair<bool, std::string> compare_docs(const pugi::xml_document &doc1,
|
||||
const pugi::xml_document &doc2);
|
||||
|
||||
std::string deviceName, m_out_xml_path, m_out_bin_path;
|
||||
std::string m_out_xml_path, m_out_bin_path;
|
||||
};
|
||||
} // namespace ExecutionGraphTests
|
||||
|
@ -9,15 +9,16 @@
|
||||
#include "openvino/core/model.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
class ExecutableNetworkBaseTest : public testing::WithParamInterface<BehaviorTestsUtils::InferRequestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
class ExecutableNetworkBaseTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
|
||||
public testing::WithParamInterface<BehaviorTestsUtils::InferRequestParams> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<BehaviorTestsUtils::InferRequestParams> obj) {
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::tie(target_device, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
result << "target_device=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
using namespace CommonTestUtils;
|
||||
result << "config=" << configuration;
|
||||
@ -26,55 +27,49 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
ie = PluginCache::get().ie(targetDevice);
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
ov::test::behavior::APIBaseTest::SetUp();
|
||||
ie = PluginCache::get().ie(target_device);
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
InferenceEngine::CNNNetwork cnnNet;
|
||||
std::shared_ptr<InferenceEngine::Core> ie;
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) {
|
||||
ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration));
|
||||
ASSERT_NO_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration));
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) {
|
||||
std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }};
|
||||
ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, incorrectConfig));
|
||||
ASSERT_ANY_THROW(auto execNet = ie->LoadNetwork(cnnNet, target_device, incorrectConfig));
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(auto req = execNet.CreateInferRequest());
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
InferenceEngine::CNNNetwork execGraph = execNet.GetExecGraphInfo();
|
||||
ASSERT_NE(execGraph.getFunction(), nullptr);
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, checkGetMetric) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(execNet.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
for (const auto& configItem : configuration) {
|
||||
InferenceEngine::Parameter param;
|
||||
ASSERT_NO_THROW(param = execNet.GetConfig(configItem.first));
|
||||
@ -84,7 +79,7 @@ TEST_P(ExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckCo
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
std::map<std::string, InferenceEngine::Parameter> config;
|
||||
for (const auto& confItem : configuration) {
|
||||
config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)});
|
||||
@ -93,7 +88,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNet) {
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
std::map<std::string, std::string> incorrectConfig = {{ "abc", "def" }};
|
||||
std::map<std::string, InferenceEngine::Parameter> config;
|
||||
for (const auto& confItem : incorrectConfig) {
|
||||
@ -103,7 +98,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetWithIncorrectConfig) {
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
std::map<std::string, InferenceEngine::Parameter> config;
|
||||
for (const auto& confItem : configuration) {
|
||||
config.insert({confItem.first, InferenceEngine::Parameter(confItem.second)});
|
||||
@ -120,7 +115,7 @@ TEST_P(ExecutableNetworkBaseTest, canSetConfigToExecNetAndCheckConfigAndCheck) {
|
||||
TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
|
||||
std::vector<InferenceEngine::ExecutableNetwork> vec;
|
||||
for (auto i = 0; i < 2; i++) {
|
||||
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration)));
|
||||
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration)));
|
||||
ASSERT_NE(nullptr, cnnNet.getFunction());
|
||||
}
|
||||
}
|
||||
@ -128,24 +123,24 @@ TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
|
||||
TEST_P(ExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) {
|
||||
std::vector<InferenceEngine::ExecutableNetwork> vec;
|
||||
for (auto i = 0; i < 2; i++) {
|
||||
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, targetDevice, configuration)));
|
||||
ASSERT_NO_THROW(vec.push_back(ie->LoadNetwork(cnnNet, target_device, configuration)));
|
||||
ASSERT_NE(nullptr, vec[i].GetExecGraphInfo().getFunction());
|
||||
ASSERT_NE(vec.begin()->GetExecGraphInfo().getFunction(), vec[i].GetExecGraphInfo().getFunction());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfo) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(auto inInfo = execNet.GetInputsInfo());
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfo) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(auto outInfo = execNet.GetOutputsInfo());
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
auto inInfo = execNet.GetInputsInfo();
|
||||
auto inCnnInfo = cnnNet.getInputsInfo();
|
||||
for (const auto& itemInInfo : inCnnInfo) {
|
||||
@ -154,7 +149,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
auto outInfo = execNet.GetOutputsInfo();
|
||||
auto outCnnInfo = cnnNet.getOutputsInfo();
|
||||
for (const auto& itemOutInfo : outCnnInfo) {
|
||||
@ -165,7 +160,7 @@ TEST_P(ExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
|
||||
TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
|
||||
InferenceEngine::CNNNetwork execGraph;
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
|
||||
std::map<std::string, int> originalLayersMap;
|
||||
for (const auto &layer : function->get_ops()) {
|
||||
@ -215,7 +210,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
|
||||
TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
|
||||
InferenceEngine::CNNNetwork execGraph;
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
|
||||
std::map<std::string, int> originalLayersMap;
|
||||
for (const auto &layer : function->get_ops()) {
|
||||
@ -278,7 +273,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) {
|
||||
|
||||
InferenceEngine::CNNNetwork execGraph;
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
|
||||
ASSERT_NO_THROW(execGraph.serialize(out_xml_path, out_bin_path));
|
||||
CommonTestUtils::removeIRFiles(out_xml_path, out_bin_path);
|
||||
@ -287,7 +282,7 @@ TEST_P(ExecutableNetworkBaseTest, CheckExecGraphInfoSerialization) {
|
||||
TEST_P(ExecutableNetworkBaseTest, canExport) {
|
||||
auto ts = CommonTestUtils::GetTimestamp();
|
||||
std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts;
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
ASSERT_NO_THROW(execNet.Export(modelName));
|
||||
ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".xml"));
|
||||
ASSERT_TRUE(CommonTestUtils::fileExists(modelName + ".bin"));
|
||||
@ -300,14 +295,29 @@ TEST_P(ExecutableNetworkBaseTest, pluginDoesNotChangeOriginalNetwork) {
|
||||
compare_functions(cnnNet.getFunction(), referenceNetwork);
|
||||
}
|
||||
|
||||
using ExecNetSetPrecision = BehaviorTestsUtils::BehaviorTestsBasic;
|
||||
class ExecNetSetPrecision : public BehaviorTestsUtils::BehaviorTestsBasicBase,
|
||||
public BehaviorTestsUtils::IEExecutableNetworkTestBase {
|
||||
protected:
|
||||
void SetUp() override {
|
||||
std::tie(netPrecision, target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
}
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_P(ExecNetSetPrecision, canSetInputPrecisionForNetwork) {
|
||||
InferenceEngine::CNNNetwork cnnNet(function);
|
||||
InferenceEngine::InputsDataMap inputs_info = cnnNet.getInputsInfo();
|
||||
ASSERT_EQ(1u, inputs_info.size());
|
||||
inputs_info.begin()->second->setPrecision(netPrecision);
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration));
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
|
||||
}
|
||||
|
||||
TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) {
|
||||
@ -315,7 +325,7 @@ TEST_P(ExecNetSetPrecision, canSetOutputPrecisionForNetwork) {
|
||||
InferenceEngine::OutputsDataMap outputs_info = cnnNet.getOutputsInfo();
|
||||
ASSERT_EQ(outputs_info.size(), 1u);
|
||||
outputs_info.begin()->second->setPrecision(netPrecision);
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration));
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
|
||||
}
|
||||
TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) {
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
@ -337,7 +347,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV10Model) {
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
InferenceEngine::CNNNetwork cnnNet(function);
|
||||
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
|
||||
}
|
||||
|
||||
TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) {
|
||||
@ -360,7 +370,7 @@ TEST_P(ExecutableNetworkBaseTest, loadIncorrectV11Model) {
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
InferenceEngine::CNNNetwork cnnNet(function);
|
||||
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(ie->LoadNetwork(cnnNet, target_device, configuration));
|
||||
}
|
||||
|
||||
} // namespace BehaviorTestsDefinitions
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "common_test_utils/test_assertions.hpp"
|
||||
|
||||
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
|
||||
@ -32,18 +33,19 @@ namespace BehaviorTestsDefinitions {
|
||||
}
|
||||
|
||||
class IEClassExecutableNetworkGetMetricTestForSpecificConfig :
|
||||
public BehaviorTestsUtils::IEExecutableNetworkTestBase,
|
||||
public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>> {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string configKey;
|
||||
std::string configValue;
|
||||
public:
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
IEClassNetworkTest::SetUp();
|
||||
deviceName = std::get<0>(GetParam());
|
||||
target_device = std::get<0>(GetParam());
|
||||
std::tie(configKey, configValue) = std::get<1>(GetParam());
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
ov::test::behavior::APIBaseTest::SetUp();
|
||||
IEClassNetworkTest::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
@ -51,17 +53,18 @@ public:
|
||||
// Hetero Executable network case
|
||||
//
|
||||
class IEClassHeteroExecutableNetworkGetMetricTest :
|
||||
public BehaviorTestsUtils::IEExecutableNetworkTestBase,
|
||||
public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string> {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string heteroDeviceName;
|
||||
public:
|
||||
void SetUp() override {
|
||||
target_device = GetParam();
|
||||
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU;
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
ov::test::behavior::APIBaseTest::SetUp();
|
||||
IEClassNetworkTest::SetUp();
|
||||
deviceName = GetParam();
|
||||
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") + CommonTestUtils::DEVICE_CPU;
|
||||
}
|
||||
};
|
||||
|
||||
@ -70,13 +73,14 @@ public:
|
||||
// ImportExportNetwork
|
||||
//
|
||||
|
||||
using IEClassImportExportTestP = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassGetMetricP = BehaviorTestsUtils::IEExecNetClassBaseTestP;
|
||||
using IEClassImportExportTestP = IEClassGetMetricP;
|
||||
|
||||
TEST_P(IEClassImportExportTestP, smoke_ImportNetworkThrowsIfNoDeviceName) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
std::stringstream strm;
|
||||
InferenceEngine::ExecutableNetwork executableNetwork;
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device));
|
||||
ASSERT_NO_THROW(executableNetwork.Export(strm));
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
@ -88,9 +92,9 @@ TEST_P(IEClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
std::stringstream strm;
|
||||
InferenceEngine::ExecutableNetwork executableNetwork;
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device));
|
||||
ASSERT_NO_THROW(executableNetwork.Export(strm));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device));
|
||||
ASSERT_NO_THROW(executableNetwork.CreateInferRequest());
|
||||
}
|
||||
|
||||
@ -99,27 +103,28 @@ TEST_P(IEClassImportExportTestP, smoke_ExportUsingFileNameImportFromStreamNoThro
|
||||
InferenceEngine::ExecutableNetwork executableNetwork;
|
||||
std::string fileName{"ExportedNetwork"};
|
||||
{
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device));
|
||||
ASSERT_NO_THROW(executableNetwork.Export(fileName));
|
||||
}
|
||||
{
|
||||
{
|
||||
std::ifstream strm(fileName, std::ifstream::binary | std::ifstream::in);
|
||||
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, deviceName));
|
||||
ASSERT_NO_THROW(executableNetwork = ie.ImportNetwork(strm, target_device));
|
||||
}
|
||||
ASSERT_EQ(0, remove(fileName.c_str()));
|
||||
}
|
||||
ASSERT_NO_THROW(executableNetwork.CreateInferRequest());
|
||||
CommonTestUtils::removeFile(fileName);
|
||||
}
|
||||
|
||||
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkSetConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkSetConfigTest = IEClassGetMetricP;
|
||||
using IEClassExecutableNetworkGetConfigTest = IEClassGetMetricP;
|
||||
|
||||
//
|
||||
// ExecutableNetwork GetMetric / GetConfig
|
||||
@ -131,7 +136,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
@ -149,7 +154,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
|
||||
std::vector<std::string> metricValues = p;
|
||||
@ -167,7 +172,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(NETWORK_NAME)));
|
||||
std::string networkname = p;
|
||||
@ -181,7 +186,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)));
|
||||
unsigned int value = p;
|
||||
@ -195,7 +200,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow)
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), InferenceEngine::Exception);
|
||||
}
|
||||
@ -204,14 +209,14 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
InferenceEngine::Parameter defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey));
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
|
||||
ASSERT_FALSE(defaultValue.empty());
|
||||
}
|
||||
}
|
||||
@ -220,7 +225,7 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), InferenceEngine::Exception);
|
||||
}
|
||||
@ -229,7 +234,7 @@ TEST_P(IEClassExecutableNetworkSetConfigTest, SetConfigThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), InferenceEngine::Exception);
|
||||
}
|
||||
@ -238,7 +243,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(exeNetwork.SetConfig({{configKey, configValue}}));
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetConfig(configKey));
|
||||
@ -249,7 +254,7 @@ TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
|
||||
TEST_P(IEClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngine::Exception);
|
||||
}
|
||||
@ -258,10 +263,10 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> devConfigValues = p;
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> execConfigValues = p;
|
||||
@ -280,14 +285,14 @@ using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = IEClas
|
||||
using IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS = IEClassHeteroExecutableNetworkGetMetricTest;
|
||||
using IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME = IEClassHeteroExecutableNetworkGetMetricTest;
|
||||
using IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = IEClassHeteroExecutableNetworkGetMetricTest;
|
||||
using IEClassExecutableNetworkGetMetricTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassExecutableNetworkGetMetricTest = IEClassGetMetricP;
|
||||
|
||||
TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter pHetero, pDevice;
|
||||
|
||||
InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
|
||||
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
@ -320,7 +325,7 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN
|
||||
InferenceEngine::Parameter pHetero, pDevice;
|
||||
|
||||
InferenceEngine::ExecutableNetwork heteroExeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
|
||||
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork deviceExeNetwork = ie.LoadNetwork(actualCnnNetwork, target_device);
|
||||
|
||||
ASSERT_NO_THROW(pHetero = heteroExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
|
||||
ASSERT_NO_THROW(pDevice = deviceExeNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS)));
|
||||
@ -369,13 +374,13 @@ TEST_P(IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoT
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
setHeteroNetworkAffinity(deviceName);
|
||||
setHeteroNetworkAffinity(target_device);
|
||||
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(actualCnnNetwork, heteroDeviceName);
|
||||
|
||||
ASSERT_NO_THROW(p = exeNetwork.GetConfig("TARGET_FALLBACK"));
|
||||
std::string targets = p;
|
||||
auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU;
|
||||
auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU;
|
||||
|
||||
std::cout << "Exe network fallback targets: " << targets << std::endl;
|
||||
ASSERT_EQ(expectedTargets, targets);
|
||||
|
@ -5,9 +5,11 @@
|
||||
#pragma once
|
||||
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "ngraph_functions/subgraph_builders.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
|
||||
@ -16,13 +18,12 @@ typedef std::tuple<
|
||||
std::string> // Target device name
|
||||
LocaleParams;
|
||||
|
||||
class CustomLocaleTest : public CommonTestUtils::TestsCommon,
|
||||
class CustomLocaleTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase,
|
||||
public ::testing::WithParamInterface<LocaleParams> {
|
||||
protected:
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string localeName;
|
||||
std::string testName;
|
||||
std::string deviceName;
|
||||
|
||||
void SetUp() override;
|
||||
public:
|
||||
|
@ -118,7 +118,7 @@ TEST_P(InferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooS
|
||||
function = SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
InferenceEngine::InferRequest req;
|
||||
ASSERT_NO_THROW(req = execNet.CreateInferRequest());
|
||||
@ -145,7 +145,7 @@ TEST_P(InferRequestCallbackTests, ImplDoseNotCopyCallback) {
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
InferenceEngine::CNNNetwork cnnNet(function);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
auto req = execNet.CreateInferRequest();
|
||||
{
|
||||
auto somePtr = std::make_shared<int>(42);
|
||||
|
@ -10,18 +10,7 @@
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
|
||||
class InferRequestCancellationTests : public BehaviorTestsUtils::InferRequestTests {
|
||||
public:
|
||||
void SetUp() override {
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice, {1, 3, 640, 640});
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
}
|
||||
};
|
||||
using InferRequestCancellationTests = BehaviorTestsUtils::InferRequestTests;
|
||||
|
||||
TEST_P(InferRequestCancellationTests, canCancelAsyncRequest) {
|
||||
// Create InferRequest
|
||||
|
@ -18,35 +18,37 @@ typedef std::tuple<
|
||||
> InferRequestParams;
|
||||
|
||||
class InferRequestConfigTest : public testing::WithParamInterface<InferRequestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEInferRequestTestBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(streamExecutorNumber, target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(streamExecutorNumber, targetDevice, configuration) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
// Create CNNNetwork from ngrpah::Function
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
|
||||
std::string targetDevice;
|
||||
size_t streamExecutorNumber;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(streamExecutorNumber, targetDevice, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "streamExecutorNumber=" << targetDevice << "_";
|
||||
if (!configuration.empty()) {
|
||||
result << "config=" << configuration;
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
|
||||
std::string target_device;
|
||||
size_t streamExecutorNumber;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(streamExecutorNumber, target_device, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "target_device=" << target_device << "_";
|
||||
result << "streamExecutorNumber=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
result << "config=" << configuration;
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -54,20 +56,22 @@ protected:
|
||||
InferenceEngine::ExecutableNetwork execNet;
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
size_t streamExecutorNumber;
|
||||
|
||||
void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ie_infer_request; }
|
||||
|
||||
inline InferenceEngine::InferRequest createInferRequestWithConfig() {
|
||||
// Load config
|
||||
configuration.insert({CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(YES)});
|
||||
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
ie->SetConfig(configuration, targetDevice);
|
||||
if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
|
||||
ie->SetConfig(configuration, target_device);
|
||||
}
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
auto req = execNet.CreateInferRequest();
|
||||
return req;
|
||||
}
|
||||
@ -76,9 +80,10 @@ protected:
|
||||
TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) {
|
||||
ASSERT_EQ(0ul, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
ASSERT_NO_THROW(createInferRequestWithConfig());
|
||||
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
|
||||
ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
}
|
||||
}
|
||||
@ -86,9 +91,10 @@ TEST_P(InferRequestConfigTest, canSetExclusiveAsyncRequests) {
|
||||
TEST_P(InferRequestConfigTest, withoutExclusiveAsyncRequests) {
|
||||
ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
ASSERT_NO_THROW(createInferRequestWithConfig());
|
||||
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
|
||||
ASSERT_EQ(streamExecutorNumber, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
}
|
||||
}
|
||||
@ -101,20 +107,21 @@ TEST_P(InferRequestConfigTest, ReusableCPUStreamsExecutor) {
|
||||
// Load config
|
||||
std::map<std::string, std::string> config = {{CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS), CONFIG_VALUE(NO)}};
|
||||
config.insert(configuration.begin(), configuration.end());
|
||||
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
ASSERT_NO_THROW(ie->SetConfig(config, targetDevice));
|
||||
if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_BATCH) == std::string::npos) {
|
||||
ASSERT_NO_THROW(ie->SetConfig(config, target_device));
|
||||
}
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, config);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, config);
|
||||
execNet.CreateInferRequest();
|
||||
if ((targetDevice == CommonTestUtils::DEVICE_MYRIAD) ||
|
||||
(targetDevice == CommonTestUtils::DEVICE_KEEMBAY)) {
|
||||
if ((target_device == CommonTestUtils::DEVICE_MYRIAD) ||
|
||||
(target_device == CommonTestUtils::DEVICE_KEEMBAY)) {
|
||||
ASSERT_EQ(1u, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
ASSERT_EQ(0u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber());
|
||||
} else if ((targetDevice == CommonTestUtils::DEVICE_AUTO) ||
|
||||
(targetDevice == CommonTestUtils::DEVICE_MULTI)) {
|
||||
} else if ((target_device == CommonTestUtils::DEVICE_AUTO) ||
|
||||
(target_device == CommonTestUtils::DEVICE_MULTI)) {
|
||||
} else {
|
||||
ASSERT_EQ(0u, InferenceEngine::executorManager()->getExecutorsNumber());
|
||||
ASSERT_GE(2u, InferenceEngine::executorManager()->getIdleCPUStreamsExecutorsNumber());
|
||||
|
@ -11,8 +11,8 @@
|
||||
#include "shared_test_classes/subgraph/basic_lstm.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests;
|
||||
using namespace CommonTestUtils;
|
||||
using InferRequestIOBBlobTest = BehaviorTestsUtils::InferRequestTests;
|
||||
|
||||
TEST_P(InferRequestIOBBlobTest, CanCreateInferRequest) {
|
||||
// Create InferRequest
|
||||
@ -331,16 +331,25 @@ TEST_P(InferRequestIOBBlobTest, canInferWithGetOut) {
|
||||
ASSERT_NO_THROW(InferenceEngine::Blob::Ptr outputBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first));
|
||||
}
|
||||
|
||||
class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasic {
|
||||
public:
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(netPrecision, targetDevice, configuration) = this->GetParam();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
}
|
||||
class InferRequestIOBBlobSetPrecisionTest : public BehaviorTestsUtils::BehaviorTestsBasicBase,
|
||||
public BehaviorTestsUtils::IEInferRequestTestBase {
|
||||
protected:
|
||||
void SetUp() override {
|
||||
std::tie(netPrecision, target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
InferenceEngine::ExecutableNetwork execNet;
|
||||
InferenceEngine::CNNNetwork cnnNet;
|
||||
};
|
||||
@ -386,16 +395,16 @@ typedef std::tuple<
|
||||
> InferRequestIOBBlobSetLayoutParams;
|
||||
|
||||
class InferRequestIOBBlobSetLayoutTest : public testing::WithParamInterface<InferRequestIOBBlobSetLayoutParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public ov::test::behavior::APIBaseTest {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestIOBBlobSetLayoutParams> obj) {
|
||||
InferenceEngine::Layout layout;
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(layout, targetDevice, configuration) = obj.param;
|
||||
std::tie(layout, target_device, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "layout=" << layout << "_";
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "target_device=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
result << "config=" << configuration;
|
||||
}
|
||||
@ -403,17 +412,18 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(layout, target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(layout, targetDevice, configuration) = this->GetParam();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
@ -421,7 +431,6 @@ public:
|
||||
InferenceEngine::Layout layout;
|
||||
InferenceEngine::CNNNetwork cnnNet;
|
||||
InferenceEngine::ExecutableNetwork execNet;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include <ie_core.hpp>
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
typedef std::tuple<
|
||||
@ -16,7 +16,7 @@ typedef std::tuple<
|
||||
std::map<std::string, std::string>> // device configuration
|
||||
memoryStateParams;
|
||||
|
||||
class InferRequestVariableStateTest : public CommonTestUtils::TestsCommon,
|
||||
class InferRequestVariableStateTest : public BehaviorTestsUtils::IEInferRequestTestBase,
|
||||
public testing::WithParamInterface<memoryStateParams> {
|
||||
protected:
|
||||
InferenceEngine::CNNNetwork net;
|
||||
|
@ -10,15 +10,15 @@ namespace BehaviorTestsDefinitions {
|
||||
class InferRequestPerfCountersTest : public BehaviorTestsUtils::InferRequestTests {
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
ie = PluginCache::get().ie(targetDevice);
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
APIBaseTest::SetUp();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
configuration.insert({ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES });
|
||||
// Load CNNNetwork to target plugins
|
||||
execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -19,13 +19,14 @@ using InferRequestSetBlobByTypeParams = std::tuple<
|
||||
>;
|
||||
|
||||
class InferRequestSetBlobByType : public testing::WithParamInterface<InferRequestSetBlobByTypeParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestSetBlobByTypeParams> obj) {
|
||||
FuncTestUtils::BlobType BlobType;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(BlobType, targetDevice, configuration) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
|
||||
std::ostringstream result;
|
||||
result << "BlobType=" << BlobType << "_";
|
||||
@ -35,14 +36,15 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::map<std::string, std::string> config;
|
||||
std::tie(blobType, target_device, config) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::map<std::string, std::string> config;
|
||||
std::tie(blobType, targetDevice, config) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
std::shared_ptr<ngraph::Function> function = ngraph::builder::subgraph::makeConvPoolRelu(
|
||||
{4, 3, 6, 8}, ngraph::element::Type_t::u8);
|
||||
InferenceEngine::CNNNetwork cnnNetwork(function);
|
||||
executableNetwork = ie->LoadNetwork(cnnNetwork, targetDevice, config);
|
||||
executableNetwork = ie->LoadNetwork(cnnNetwork, target_device, config);
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -52,18 +54,18 @@ protected:
|
||||
return true;
|
||||
case FuncTestUtils::BlobType::Compound:
|
||||
case FuncTestUtils::BlobType::I420:
|
||||
// case FuncTestUtils::BlobType::Remote:
|
||||
case FuncTestUtils::BlobType::Remote:
|
||||
case FuncTestUtils::BlobType::NV12:
|
||||
return false;
|
||||
case FuncTestUtils::BlobType::Batched: {
|
||||
std::vector<std::string> supported_metrics = ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_METRICS));
|
||||
std::vector<std::string> supported_metrics = ie->GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS));
|
||||
if (std::find(supported_metrics.begin(), supported_metrics.end(),
|
||||
METRIC_KEY(OPTIMIZATION_CAPABILITIES)) == supported_metrics.end()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::vector<std::string> optimization_caps =
|
||||
ie->GetMetric(targetDevice, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
|
||||
ie->GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES));
|
||||
return std::find(optimization_caps.begin(), optimization_caps.end(),
|
||||
METRIC_VALUE(BATCHED_BLOB)) != optimization_caps.end();
|
||||
}
|
||||
@ -72,7 +74,6 @@ protected:
|
||||
}
|
||||
}
|
||||
|
||||
std::string targetDevice;
|
||||
FuncTestUtils::BlobType blobType;
|
||||
InferenceEngine::ExecutableNetwork executableNetwork;
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
|
@ -23,7 +23,8 @@ using SetBlobParams = std::tuple<InferenceEngine::Precision, // precision in C
|
||||
setType, // type for which blob is set
|
||||
std::string>; // Device name
|
||||
|
||||
class SetBlobTest : public testing::WithParamInterface<SetBlobParams>, virtual public LayerTestsUtils::LayerTestsCommon {
|
||||
class SetBlobTest : public testing::WithParamInterface<SetBlobParams>,
|
||||
virtual public LayerTestsUtils::LayerTestsCommon {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<SetBlobParams> obj);
|
||||
void Infer() override;
|
||||
|
@ -25,13 +25,14 @@ typedef std::tuple<
|
||||
> OVExecGraphImportExportTestParams;
|
||||
|
||||
class OVExecGraphImportExportTest : public testing::WithParamInterface<OVExecGraphImportExportTestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVCompiledNetworkTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<OVExecGraphImportExportTestParams> obj) {
|
||||
ov::element::Type_t elementType;
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
std::tie(elementType, targetDevice, configuration) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "elementType=" << elementType << "_";
|
||||
@ -48,27 +49,28 @@ class OVExecGraphImportExportTest : public testing::WithParamInterface<OVExecGra
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(elementType, target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(elementType, targetDevice, configuration) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
utils::PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
protected:
|
||||
std::shared_ptr<ov::Core> core = utils::PluginCache::get().core();
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
ov::element::Type_t elementType;
|
||||
std::shared_ptr<ov::Model> function;
|
||||
};
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -96,12 +98,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
|
||||
ngraph::ParameterVector{param1, param2});
|
||||
function->set_friendly_name("SingleRuLU");
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
|
||||
std::stringstream strm;
|
||||
execNet.export_model(strm);
|
||||
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 2);
|
||||
EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size());
|
||||
EXPECT_THROW(importedExecNet.input(), ov::Exception);
|
||||
@ -151,7 +153,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunction) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -167,11 +169,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
|
||||
function->set_friendly_name("ParamResult");
|
||||
}
|
||||
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
std::stringstream strm;
|
||||
execNet.export_model(strm);
|
||||
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 1);
|
||||
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
|
||||
EXPECT_NO_THROW(importedCompiledModel.input());
|
||||
@ -191,7 +193,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionParameterResultOnly) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -207,11 +209,11 @@ TEST_P(OVExecGraphImportExportTest, importExportedFunctionConstantResultOnly) {
|
||||
function->set_friendly_name("ConstResult");
|
||||
}
|
||||
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
std::stringstream strm;
|
||||
execNet.export_model(strm);
|
||||
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 0);
|
||||
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
|
||||
EXPECT_THROW(importedCompiledModel.input(), ov::Exception);
|
||||
@ -286,20 +288,20 @@ TEST_P(OVExecGraphImportExportTest, readFromV10IR) {
|
||||
EXPECT_NO_THROW(function->input("in1")); // remove if read_model does not change function names
|
||||
EXPECT_NO_THROW(function->output("round")); // remove if read_model does not change function names
|
||||
|
||||
ov::CompiledModel execNet = core->compile_model(function, targetDevice, configuration);
|
||||
ov::CompiledModel execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(execNet.inputs().size(), 1);
|
||||
EXPECT_EQ(execNet.outputs().size(), 1);
|
||||
EXPECT_NO_THROW(execNet.input("in1"));
|
||||
EXPECT_NO_THROW(execNet.output("round"));
|
||||
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
std::stringstream strm;
|
||||
execNet.export_model(strm);
|
||||
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(importedExecNet.inputs().size(), 1);
|
||||
EXPECT_EQ(importedExecNet.outputs().size(), 1);
|
||||
EXPECT_NO_THROW(importedExecNet.input("in1"));
|
||||
@ -327,7 +329,7 @@ static std::map<std::string, std::string> any_copy(const ov::AnyMap& params) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -356,12 +358,12 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
|
||||
ngraph::ParameterVector{param1, param2});
|
||||
function->set_friendly_name("SingleReLU");
|
||||
}
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration));
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
|
||||
|
||||
std::stringstream strm;
|
||||
execNet.Export(strm);
|
||||
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedExecNet = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 2);
|
||||
EXPECT_EQ(function->inputs().size(), importedExecNet.inputs().size());
|
||||
EXPECT_THROW(importedExecNet.input(), ov::Exception);
|
||||
@ -392,7 +394,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetwork) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -410,7 +412,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
|
||||
ngraph::ParameterVector{param});
|
||||
function->set_friendly_name("ParamResult");
|
||||
}
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration));
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
|
||||
|
||||
auto inputPrecision = InferenceEngine::details::convertPrecision(execNet.GetInputsInfo().at("param")->getPrecision());
|
||||
auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("param")->getPrecision());
|
||||
@ -418,7 +420,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
|
||||
std::stringstream strm;
|
||||
execNet.Export(strm);
|
||||
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 1);
|
||||
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
|
||||
EXPECT_NO_THROW(importedCompiledModel.input());
|
||||
@ -438,7 +440,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkParameterResultOnly)
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -456,14 +458,14 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
|
||||
ngraph::ParameterVector{});
|
||||
function->set_friendly_name("ConstResult");
|
||||
}
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), targetDevice, any_copy(configuration));
|
||||
execNet = ie->LoadNetwork(InferenceEngine::CNNNetwork(function), target_device, any_copy(configuration));
|
||||
|
||||
auto outputPrecision = InferenceEngine::details::convertPrecision(execNet.GetOutputsInfo().at("constant")->getPrecision());
|
||||
|
||||
std::stringstream strm;
|
||||
execNet.Export(strm);
|
||||
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, targetDevice, configuration);
|
||||
ov::CompiledModel importedCompiledModel = core->import_model(strm, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 0);
|
||||
EXPECT_EQ(function->inputs().size(), importedCompiledModel.inputs().size());
|
||||
EXPECT_THROW(importedCompiledModel.input(), ov::Exception);
|
||||
@ -483,7 +485,7 @@ TEST_P(OVExecGraphImportExportTest, importExportedIENetworkConstantResultOnly) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) {
|
||||
if (targetDevice == "MULTI" || targetDevice == "AUTO") {
|
||||
if (target_device == CommonTestUtils::DEVICE_MULTI || target_device == CommonTestUtils::DEVICE_AUTO) {
|
||||
GTEST_SKIP() << "MULTI / AUTO does not support import / export" << std::endl;
|
||||
}
|
||||
|
||||
@ -512,12 +514,12 @@ TEST_P(OVExecGraphImportExportTest, ieImportExportedFunction) {
|
||||
ngraph::ParameterVector{param1, param2});
|
||||
function->set_friendly_name("SingleReLU");
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
|
||||
std::stringstream strm;
|
||||
execNet.export_model(strm);
|
||||
|
||||
InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, targetDevice, any_copy(configuration));
|
||||
InferenceEngine::ExecutableNetwork importedExecNet = ie->ImportNetwork(strm, target_device, any_copy(configuration));
|
||||
EXPECT_EQ(function->inputs().size(), 2);
|
||||
EXPECT_EQ(function->inputs().size(), importedExecNet.GetInputsInfo().size());
|
||||
EXPECT_NO_THROW(importedExecNet.GetInputsInfo()["param1"]);
|
||||
|
@ -17,12 +17,14 @@ namespace test {
|
||||
namespace behavior {
|
||||
|
||||
class OVExecutableNetworkBaseTest : public testing::WithParamInterface<InferRequestParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVCompiledNetworkTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<InferRequestParams> obj) {
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
if (!configuration.empty()) {
|
||||
@ -36,16 +38,18 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(targetDevice);
|
||||
APIBaseTest::SetUp();
|
||||
function = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
utils::PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
bool compareTensors(const ov::Tensor& t1, const ov::Tensor& t2) {
|
||||
@ -70,13 +74,14 @@ public:
|
||||
|
||||
protected:
|
||||
std::shared_ptr<ov::Core> core = utils::PluginCache::get().core();
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
std::shared_ptr<ov::Model> function;
|
||||
|
||||
void set_api_entity() override { api_entity = ov::test::utils::ov_entity::ov_compiled_model; }
|
||||
};
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutable) {
|
||||
EXPECT_NO_THROW(auto execNet = core->compile_model(function, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(auto execNet = core->compile_model(function, target_device, configuration));
|
||||
}
|
||||
|
||||
TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
|
||||
@ -88,27 +93,27 @@ TEST(OVExecutableNetworkBaseTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableWithIncorrectConfig) {
|
||||
ov::AnyMap incorrectConfig = {{"abc", "def"}};
|
||||
EXPECT_ANY_THROW(auto execNet = core->compile_model(function, targetDevice, incorrectConfig));
|
||||
EXPECT_ANY_THROW(auto execNet = core->compile_model(function, target_device, incorrectConfig));
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCreateInferRequest) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(auto req = execNet.create_infer_request());
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, checkGetExecGraphInfoIsNotNullptr) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
auto execGraph = execNet.get_runtime_model();
|
||||
EXPECT_NE(execGraph, nullptr);
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, checkGetMetric) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(execNet.get_property(ov::supported_properties));
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheckConfig) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
for (const auto& configItem : configuration) {
|
||||
ov::Any param;
|
||||
EXPECT_NO_THROW(param = execNet.get_property(configItem.first));
|
||||
@ -118,7 +123,7 @@ TEST_P(OVExecutableNetworkBaseTest, canLoadCorrectNetworkToGetExecutableAndCheck
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) {
|
||||
auto execNet = core->compile_model(function, targetDevice);
|
||||
auto execNet = core->compile_model(function, target_device);
|
||||
std::map<std::string, ov::Any> config;
|
||||
for (const auto& confItem : configuration) {
|
||||
config.emplace(confItem.first, confItem.second);
|
||||
@ -127,7 +132,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNet) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) {
|
||||
auto execNet = core->compile_model(function, targetDevice);
|
||||
auto execNet = core->compile_model(function, target_device);
|
||||
std::map<std::string, std::string> incorrectConfig = {{"abc", "def"}};
|
||||
std::map<std::string, ov::Any> config;
|
||||
for (const auto& confItem : incorrectConfig) {
|
||||
@ -137,7 +142,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetWithIncorrectConfig) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck) {
|
||||
auto execNet = core->compile_model(function, targetDevice);
|
||||
auto execNet = core->compile_model(function, target_device);
|
||||
std::map<std::string, ov::Any> config;
|
||||
for (const auto& confItem : configuration) {
|
||||
config.emplace(confItem.first, confItem.second);
|
||||
@ -154,7 +159,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanSetConfigToExecNetAndCheckConfigAndCheck)
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
|
||||
std::vector<ov::CompiledModel> vec;
|
||||
for (auto i = 0; i < 2; i++) {
|
||||
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration)));
|
||||
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration)));
|
||||
EXPECT_NE(nullptr, function);
|
||||
}
|
||||
}
|
||||
@ -162,24 +167,24 @@ TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworks) {
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanCreateTwoExeNetworksAndCheckFunction) {
|
||||
std::vector<ov::CompiledModel> vec;
|
||||
for (auto i = 0; i < 2; i++) {
|
||||
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, targetDevice, configuration)));
|
||||
EXPECT_NO_THROW(vec.push_back(core->compile_model(function, target_device, configuration)));
|
||||
EXPECT_NE(nullptr, vec[i].get_runtime_model());
|
||||
EXPECT_NE(vec.begin()->get_runtime_model(), vec[i].get_runtime_model());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfo) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(auto inInfo = execNet.inputs());
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfo) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(auto outInfo = execNet.outputs());
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
auto inputs = execNet.inputs();
|
||||
std::vector<std::string> paramVec;
|
||||
for (const auto& input : inputs) {
|
||||
@ -193,7 +198,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetInputsInfoAndCheck) {
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
auto outputs = execNet.outputs();
|
||||
std::vector<std::string> resVec;
|
||||
for (const auto& out : outputs) {
|
||||
@ -209,7 +214,7 @@ TEST_P(OVExecutableNetworkBaseTest, CanGetOutputsInfoAndCheck) {
|
||||
TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
|
||||
std::shared_ptr<const ov::Model> execGraph;
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(execGraph = execNet.get_runtime_model());
|
||||
std::map<std::string, int> originalLayersMap;
|
||||
for (const auto& layer : function->get_ops()) {
|
||||
@ -259,7 +264,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoBeforeExecution) {
|
||||
TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
|
||||
std::shared_ptr<const ov::Model> execGraph;
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_NO_THROW(execGraph = execNet.get_runtime_model());
|
||||
std::map<std::string, int> originalLayersMap;
|
||||
for (const auto& layer : function->get_ops()) {
|
||||
@ -319,7 +324,7 @@ TEST_P(OVExecutableNetworkBaseTest, CheckExecGraphInfoAfterExecution) {
|
||||
TEST_P(OVExecutableNetworkBaseTest, canExport) {
|
||||
auto ts = CommonTestUtils::GetTimestamp();
|
||||
std::string modelName = GetTestName().substr(0, CommonTestUtils::maxFileNameLength) + "_" + ts;
|
||||
auto execNet = core->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = core->compile_model(function, target_device, configuration);
|
||||
std::ofstream out(modelName, std::ios::out);
|
||||
EXPECT_NO_THROW(execNet.export_model(out));
|
||||
out.close();
|
||||
@ -339,7 +344,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputFromFunctionWithSingleInput) {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
ov::CompiledModel execNet;
|
||||
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 1);
|
||||
EXPECT_EQ(function->inputs().size(), execNet.inputs().size());
|
||||
EXPECT_NO_THROW(execNet.input());
|
||||
@ -366,7 +371,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputFromFunctionWithSingleInput) {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
ov::CompiledModel execNet;
|
||||
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(function->outputs().size(), 1);
|
||||
EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
|
||||
EXPECT_NO_THROW(execNet.output());
|
||||
@ -414,7 +419,7 @@ TEST_P(OVExecutableNetworkBaseTest, getInputsFromFunctionWithSeveralInputs) {
|
||||
ngraph::ParameterVector{param1, param2});
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(function->inputs().size(), 2);
|
||||
EXPECT_EQ(function->inputs().size(), execNet.inputs().size());
|
||||
EXPECT_THROW(execNet.input(), ov::Exception);
|
||||
@ -485,7 +490,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromFunctionWithSeveralOutputs) {
|
||||
ngraph::ParameterVector{param1, param2});
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(function->outputs().size(), 2);
|
||||
EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
|
||||
EXPECT_THROW(execNet.output(), ov::Exception);
|
||||
@ -552,7 +557,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput
|
||||
std::make_shared<ngraph::Function>(ngraph::ResultVector{result1, result2}, ngraph::ParameterVector{param1});
|
||||
function->set_friendly_name("SingleSplit");
|
||||
}
|
||||
execNet = core->compile_model(function, targetDevice, configuration);
|
||||
execNet = core->compile_model(function, target_device, configuration);
|
||||
EXPECT_EQ(function->outputs().size(), 2);
|
||||
EXPECT_EQ(function->outputs().size(), execNet.outputs().size());
|
||||
EXPECT_THROW(execNet.output(), ov::Exception);
|
||||
@ -599,7 +604,7 @@ TEST_P(OVExecutableNetworkBaseTest, getOutputsFromSplitFunctionWithSeveralOutput
|
||||
// Load correct network to Plugin to get executable network
|
||||
TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalFunction) {
|
||||
ov::CompiledModel execNet;
|
||||
EXPECT_NO_THROW(execNet = core->compile_model(function, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(execNet = core->compile_model(function, target_device, configuration));
|
||||
|
||||
EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size());
|
||||
auto ref_parameter = function->get_parameters().back();
|
||||
@ -623,7 +628,7 @@ TEST_P(OVExecutableNetworkBaseTest, precisionsAsInOriginalIR) {
|
||||
ov::pass::Serialize(m_out_xml_path_1, m_out_bin_path_1).run_on_function(function);
|
||||
|
||||
ov::CompiledModel execNet;
|
||||
EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(execNet = core->compile_model(m_out_xml_path_1, target_device, configuration));
|
||||
CommonTestUtils::removeIRFiles(m_out_xml_path_1, m_out_bin_path_1);
|
||||
|
||||
EXPECT_EQ(function->get_parameters().size(), execNet.inputs().size());
|
||||
@ -645,7 +650,7 @@ TEST_P(OVExecutableNetworkBaseTest, getCompiledModelFromInferRequest) {
|
||||
ov::InferRequest req;
|
||||
{
|
||||
ov::CompiledModel compiled_model;
|
||||
ASSERT_NO_THROW(compiled_model = core->compile_model(function, targetDevice, configuration));
|
||||
ASSERT_NO_THROW(compiled_model = core->compile_model(function, target_device, configuration));
|
||||
ASSERT_NO_THROW(req = compiled_model.create_infer_request());
|
||||
ASSERT_NO_THROW(req.infer());
|
||||
}
|
||||
@ -677,7 +682,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV10Model) {
|
||||
function->get_rt_info()["version"] = int64_t(10);
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
EXPECT_THROW(core->compile_model(function, targetDevice, configuration), ov::Exception);
|
||||
EXPECT_THROW(core->compile_model(function, target_device, configuration), ov::Exception);
|
||||
}
|
||||
|
||||
TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) {
|
||||
@ -699,7 +704,7 @@ TEST_P(OVExecutableNetworkBaseTest, loadIncorrectV11Model) {
|
||||
function->get_rt_info()["version"] = int64_t(11);
|
||||
function->set_friendly_name("SimpleReLU");
|
||||
}
|
||||
EXPECT_NO_THROW(core->compile_model(function, targetDevice, configuration));
|
||||
EXPECT_NO_THROW(core->compile_model(function, target_device, configuration));
|
||||
}
|
||||
|
||||
} // namespace behavior
|
||||
|
@ -27,30 +27,32 @@ namespace behavior {
|
||||
ASSERT_NE(properties.end(), it); \
|
||||
}
|
||||
|
||||
using OVClassImportExportTestP = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkSetConfigTest = OVClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetConfigTest = OVClassBaseTestP;
|
||||
using OVCompiledModelClassBaseTest = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkImportExportTestP = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_NETWORK_NAME = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkSetConfigTest = OVCompiledModelClassBaseTestP;
|
||||
using OVClassExecutableNetworkGetConfigTest = OVCompiledModelClassBaseTestP;
|
||||
|
||||
class OVClassExecutableNetworkGetMetricTestForSpecificConfig :
|
||||
public OVClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>> {
|
||||
public ::testing::WithParamInterface<std::tuple<std::string, std::pair<std::string, std::string>>>,
|
||||
public OVCompiledNetworkTestBase {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string configKey;
|
||||
ov::Any configValue;
|
||||
|
||||
public:
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
OVClassNetworkTest::SetUp();
|
||||
deviceName = std::get<0>(GetParam());
|
||||
target_device = std::get<0>(GetParam());
|
||||
std::tie(configKey, configValue) = std::get<1>(GetParam());
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
OVClassNetworkTest::SetUp();
|
||||
}
|
||||
};
|
||||
|
||||
@ -62,18 +64,17 @@ using OVClassExecutableNetworkUnsupportedConfigTest = OVClassExecutableNetworkGe
|
||||
//
|
||||
class OVClassHeteroExecutableNetworkGetMetricTest :
|
||||
public OVClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::string> {
|
||||
public ::testing::WithParamInterface<std::string>,
|
||||
public OVCompiledNetworkTestBase {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string heteroDeviceName;
|
||||
|
||||
public:
|
||||
void SetUp() override {
|
||||
target_device = CommonTestUtils::DEVICE_HETERO + std::string(":") + GetParam() + std::string(",") + CommonTestUtils::DEVICE_CPU;;
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
OVClassNetworkTest::SetUp();
|
||||
deviceName = GetParam();
|
||||
heteroDeviceName = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + std::string(",") +
|
||||
CommonTestUtils::DEVICE_CPU;
|
||||
}
|
||||
};
|
||||
using OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS = OVClassHeteroExecutableNetworkGetMetricTest;
|
||||
@ -85,13 +86,13 @@ using OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK = OVClassHeter
|
||||
// ImportExportNetwork
|
||||
//
|
||||
|
||||
TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
|
||||
TEST_P(OVClassExecutableNetworkImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
std::stringstream strm;
|
||||
ov::CompiledModel executableNetwork;
|
||||
OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, deviceName));
|
||||
OV_ASSERT_NO_THROW(executableNetwork = ie.compile_model(actualNetwork, target_device));
|
||||
OV_ASSERT_NO_THROW(executableNetwork.export_model(strm));
|
||||
OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, deviceName));
|
||||
OV_ASSERT_NO_THROW(executableNetwork = ie.import_model(strm, target_device));
|
||||
OV_ASSERT_NO_THROW(executableNetwork.create_infer_request());
|
||||
}
|
||||
|
||||
@ -101,7 +102,7 @@ TEST_P(OVClassImportExportTestP, smoke_ImportNetworkNoThrowWithDeviceName) {
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> supported_properties;
|
||||
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
|
||||
@ -118,7 +119,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricNoT
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> supported_properties;
|
||||
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
|
||||
@ -135,7 +136,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricNoThrow
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::string model_name;
|
||||
OV_ASSERT_NO_THROW(model_name = compiled_model.get_property(ov::model_name));
|
||||
@ -148,7 +149,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThrow) {
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
unsigned int value = 0;
|
||||
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::optimal_number_of_infer_requests));
|
||||
@ -159,7 +160,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS, G
|
||||
}
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration);
|
||||
|
||||
ov::hint::Priority value;
|
||||
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::hint::model_priority));
|
||||
@ -168,7 +169,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_MODEL_PRIORITY, GetMetricNoThrow) {
|
||||
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName, configuration);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device, configuration);
|
||||
|
||||
std::string value;
|
||||
OV_ASSERT_NO_THROW(value = compiled_model.get_property(ov::device::priorities));
|
||||
@ -178,7 +179,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_DEVICE_PRIORITY, GetMetricNoThrow)
|
||||
TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception);
|
||||
}
|
||||
@ -186,7 +187,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow)
|
||||
TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> property_names;
|
||||
OV_ASSERT_NO_THROW(property_names = compiled_model.get_property(ov::supported_properties));
|
||||
@ -202,7 +203,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
ov::Any p;
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(compiled_model.get_property("unsupported_property"), ov::Exception);
|
||||
}
|
||||
@ -210,7 +211,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) {
|
||||
TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(compiled_model.set_property({{"unsupported_config", "some_value"}}), ov::Exception);
|
||||
}
|
||||
@ -219,7 +220,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
ov::Any p;
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
OV_ASSERT_NO_THROW(compiled_model.set_property({{configKey, configValue}}));
|
||||
OV_ASSERT_NO_THROW(p = compiled_model.get_property(configKey));
|
||||
ASSERT_EQ(p, configValue);
|
||||
@ -228,7 +229,7 @@ TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
|
||||
TEST_P(OVClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
ASSERT_THROW(compiled_model.set_property({{configKey, configValue}}), ov::Exception);
|
||||
}
|
||||
@ -237,9 +238,9 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
std::vector<ov::PropertyName> dev_property_names;
|
||||
OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(deviceName, ov::supported_properties));
|
||||
OV_ASSERT_NO_THROW(dev_property_names = ie.get_property(target_device, ov::supported_properties));
|
||||
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, deviceName);
|
||||
auto compiled_model = ie.compile_model(simpleNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> model_property_names;
|
||||
OV_ASSERT_NO_THROW(model_property_names = compiled_model.get_property(ov::supported_properties));
|
||||
@ -249,7 +250,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMet
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName);
|
||||
auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName);
|
||||
auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues;
|
||||
OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties));
|
||||
@ -285,7 +286,7 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS, GetMetricN
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
auto heteroExeNetwork = ie.compile_model(actualNetwork, heteroDeviceName);
|
||||
auto deviceExeNetwork = ie.compile_model(actualNetwork, deviceName);
|
||||
auto deviceExeNetwork = ie.compile_model(actualNetwork, target_device);
|
||||
|
||||
std::vector<ov::PropertyName> heteroConfigValues, deviceConfigValues;
|
||||
OV_ASSERT_NO_THROW(heteroConfigValues = heteroExeNetwork.get_property(ov::supported_properties));
|
||||
@ -331,13 +332,13 @@ TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME, GetMetricNoThro
|
||||
TEST_P(OVClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK, GetMetricNoThrow) {
|
||||
ov::Core ie = createCoreWithTemplate();
|
||||
|
||||
setHeteroNetworkAffinity(deviceName);
|
||||
setHeteroNetworkAffinity(target_device);
|
||||
|
||||
auto compiled_model = ie.compile_model(actualNetwork, heteroDeviceName);
|
||||
|
||||
std::string targets;
|
||||
OV_ASSERT_NO_THROW(targets = compiled_model.get_property(ov::device::priorities));
|
||||
auto expectedTargets = deviceName + "," + CommonTestUtils::DEVICE_CPU;
|
||||
auto expectedTargets = target_device + "," + CommonTestUtils::DEVICE_CPU;
|
||||
|
||||
std::cout << "Compiled model fallback targets: " << targets << std::endl;
|
||||
ASSERT_EQ(expectedTargets, targets);
|
||||
|
@ -16,11 +16,10 @@ namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
|
||||
class OVCompiledModelPropertiesBase : public CommonTestUtils::TestsCommon {
|
||||
class OVCompiledModelPropertiesBase : public OVCompiledNetworkTestBase {
|
||||
public:
|
||||
std::shared_ptr<Core> core = utils::PluginCache::get().core();
|
||||
std::shared_ptr<Model> model;
|
||||
std::string device_name;
|
||||
AnyMap properties;
|
||||
};
|
||||
|
||||
@ -28,7 +27,6 @@ class OVCompiledModelEmptyPropertiesTests : public testing::WithParamInterface<s
|
||||
public OVCompiledModelPropertiesBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
|
||||
|
||||
void SetUp() override;
|
||||
};
|
||||
|
||||
@ -38,9 +36,7 @@ class OVCompiledModelPropertiesTests : public testing::WithParamInterface<Proper
|
||||
public OVCompiledModelPropertiesBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<PropertiesParams> obj);
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
void TearDown() override;
|
||||
};
|
||||
|
||||
|
@ -14,13 +14,12 @@ namespace test {
|
||||
namespace behavior {
|
||||
|
||||
class OVInferRequestBatchedTests : public testing::WithParamInterface<std::string>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<std::string>& device_name);
|
||||
|
||||
protected:
|
||||
void SetUp() override;
|
||||
|
||||
void TearDown() override;
|
||||
|
||||
static std::string generateCacheDirName(const std::string& test_name);
|
||||
@ -28,7 +27,6 @@ protected:
|
||||
const PartialShape& shape, const ov::Layout& layout);
|
||||
|
||||
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
|
||||
std::string targetDevice;
|
||||
std::string m_cache_dir; // internal member
|
||||
bool m_need_reset_core = false;
|
||||
};
|
||||
|
@ -4,14 +4,127 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <future>
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
#include "shared_test_classes/subgraph/basic_lstm.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
struct OVInferRequestCallbackTests : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
};
|
||||
using OVInferRequestCallbackTests = OVInferRequestTests;
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
bool is_called = false;
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
|
||||
ASSERT_EQ(exception_ptr, nullptr);
|
||||
is_called = true;
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
ASSERT_TRUE(is_called);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
bool is_called = false;
|
||||
req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
ASSERT_EQ(nullptr, exception_ptr);
|
||||
is_called = true;
|
||||
});
|
||||
req.infer();
|
||||
ASSERT_FALSE(is_called);
|
||||
}
|
||||
|
||||
// test that can wait all callbacks on dtor
|
||||
TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) {
|
||||
const int NUM_ITER = 10;
|
||||
struct TestUserData {
|
||||
std::atomic<int> numIter = {0};
|
||||
std::promise<bool> promise;
|
||||
};
|
||||
TestUserData data;
|
||||
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
if (exception_ptr) {
|
||||
data.promise.set_exception(exception_ptr);
|
||||
} else {
|
||||
if (data.numIter.fetch_add(1) != NUM_ITER) {
|
||||
req.start_async();
|
||||
} else {
|
||||
data.promise.set_value(true);
|
||||
}
|
||||
}
|
||||
}));
|
||||
auto future = data.promise.get_future();
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
future.wait();
|
||||
auto callbackStatus = future.get();
|
||||
ASSERT_TRUE(callbackStatus);
|
||||
auto dataNumIter = data.numIter - 1;
|
||||
ASSERT_EQ(NUM_ITER, dataNumIter);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) {
|
||||
OPENVINO_UNREACHABLE("Throw");
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
ASSERT_THROW(req.wait(), ov::Exception);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) {
|
||||
// GetNetwork(3000, 380) make inference around 20ms on GNA SW
|
||||
// so increases chances for getting RESULT_NOT_READY
|
||||
OV_ASSERT_NO_THROW(execNet = core->compile_model(
|
||||
SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), target_device, configuration));
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
std::promise<std::chrono::system_clock::time_point> callbackTimeStamp;
|
||||
auto callbackTimeStampFuture = callbackTimeStamp.get_future();
|
||||
// add a callback to the request and capture the timestamp
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) {
|
||||
if (exception_ptr) {
|
||||
callbackTimeStamp.set_exception(exception_ptr);
|
||||
} else {
|
||||
callbackTimeStamp.set_value(std::chrono::system_clock::now());
|
||||
}
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
bool ready = false;
|
||||
OV_ASSERT_NO_THROW(ready = req.wait_for({}));
|
||||
// get timestamp taken AFTER return from the wait(STATUS_ONLY)
|
||||
const auto afterWaitTimeStamp = std::chrono::system_clock::now();
|
||||
// IF the callback timestamp is larger than the afterWaitTimeStamp
|
||||
// then we should observe false ready result
|
||||
if (afterWaitTimeStamp < callbackTimeStampFuture.get()) {
|
||||
ASSERT_FALSE(ready);
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
{
|
||||
auto somePtr = std::make_shared<int>(42);
|
||||
OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) {
|
||||
ASSERT_EQ(nullptr, exception_ptr);
|
||||
ASSERT_EQ(1, somePtr.use_count());
|
||||
}));
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
@ -6,14 +6,60 @@
|
||||
|
||||
#include <future>
|
||||
|
||||
#include "openvino/runtime/exception.hpp"
|
||||
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
struct OVInferRequestCancellationTests : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
};
|
||||
using OVInferRequestCancellationTests = OVInferRequestTests;
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
req.wait();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
req.wait();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
auto infer = std::async(std::launch::async, [&req]{req.infer();});
|
||||
while (!req.wait_for({})) {
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
infer.get();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
}
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
||||
|
@ -43,19 +43,16 @@ using OVInferRequestDynamicParams = std::tuple<
|
||||
>;
|
||||
|
||||
class OVInferRequestDynamicTests : public testing::WithParamInterface<OVInferRequestDynamicParams>,
|
||||
virtual public ov::test::SubgraphBaseTest {
|
||||
public OVInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj);
|
||||
|
||||
protected:
|
||||
void SetUp() override;
|
||||
|
||||
void TearDown() override;
|
||||
bool checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual);
|
||||
|
||||
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
|
||||
std::shared_ptr<Model> function;
|
||||
std::string targetDevice;
|
||||
ov::AnyMap configuration;
|
||||
std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes;
|
||||
};
|
||||
|
@ -71,21 +71,18 @@ inline OVInferReqInferParam roi_1d() {
|
||||
} // namespace tensor_roi
|
||||
|
||||
class OVInferRequestInferenceTests : public testing::WithParamInterface<OVInferRequestInferenceTestsParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVInferRequestTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestInferenceTestsParams>& device_name);
|
||||
|
||||
protected:
|
||||
void SetUp() override;
|
||||
|
||||
void TearDown() override;
|
||||
|
||||
static std::shared_ptr<Model> create_n_inputs(size_t num, element::Type type,
|
||||
const PartialShape& shape);
|
||||
|
||||
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core();
|
||||
OVInferReqInferParam m_param;
|
||||
std::string m_device_name;
|
||||
};
|
||||
|
||||
} // namespace behavior
|
||||
|
@ -15,7 +15,6 @@ namespace test {
|
||||
namespace behavior {
|
||||
|
||||
struct OVInferRequestIOTensorTest : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
ov::InferRequest req;
|
||||
@ -29,7 +28,7 @@ using OVInferRequestSetPrecisionParams = std::tuple<
|
||||
ov::AnyMap // Config
|
||||
>;
|
||||
struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterface<OVInferRequestSetPrecisionParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVInferRequestTestBase {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestSetPrecisionParams>& obj);
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
@ -37,7 +36,6 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa
|
||||
std::shared_ptr<ov::Model> function;
|
||||
ov::CompiledModel execNet;
|
||||
ov::InferRequest req;
|
||||
std::string target_device;
|
||||
ov::AnyMap config;
|
||||
element::Type element_type;
|
||||
};
|
||||
@ -45,7 +43,7 @@ struct OVInferRequestIOTensorSetPrecisionTest : public testing::WithParamInterfa
|
||||
using OVInferRequestCheckTensorPrecisionParams = OVInferRequestSetPrecisionParams;
|
||||
|
||||
struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface<OVInferRequestCheckTensorPrecisionParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public OVInferRequestTestBase {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestCheckTensorPrecisionParams>& obj);
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
@ -56,7 +54,6 @@ struct OVInferRequestCheckTensorPrecision : public testing::WithParamInterface<O
|
||||
CompiledModel compModel;
|
||||
InferRequest req;
|
||||
AnyMap config;
|
||||
std::string target_device;
|
||||
element::Type element_type;
|
||||
};
|
||||
|
||||
|
@ -4,14 +4,87 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <future>
|
||||
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
struct OVInferRequestMultithreadingTests : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
};
|
||||
using OVInferRequestMultithreadingTests = OVInferRequestTests;
|
||||
|
||||
TEST_P(OVInferRequestMultithreadingTests, canRun3SyncRequestsConsistentlyFromThreads) {
|
||||
ov::InferRequest req1, req2, req3;
|
||||
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
|
||||
|
||||
auto f1 = std::async(std::launch::async, [&] { req1.infer(); });
|
||||
auto f2 = std::async(std::launch::async, [&] { req2.infer(); });
|
||||
auto f3 = std::async(std::launch::async, [&] { req3.infer(); });
|
||||
|
||||
f1.wait();
|
||||
f2.wait();
|
||||
f3.wait();
|
||||
|
||||
OV_ASSERT_NO_THROW(f1.get());
|
||||
OV_ASSERT_NO_THROW(f2.get());
|
||||
OV_ASSERT_NO_THROW(f3.get());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyFromThreadsWithoutWait) {
|
||||
ov::InferRequest req1, req2, req3;
|
||||
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
|
||||
|
||||
OV_ASSERT_NO_THROW(req1.infer());
|
||||
OV_ASSERT_NO_THROW(req2.infer());
|
||||
OV_ASSERT_NO_THROW(req3.infer());
|
||||
|
||||
auto f1 = std::async(std::launch::async, [&] { req1.start_async(); });
|
||||
auto f2 = std::async(std::launch::async, [&] { req2.start_async(); });
|
||||
auto f3 = std::async(std::launch::async, [&] { req3.start_async(); });
|
||||
|
||||
f1.wait();
|
||||
f2.wait();
|
||||
f3.wait();
|
||||
|
||||
OV_ASSERT_NO_THROW(f1.get());
|
||||
OV_ASSERT_NO_THROW(f2.get());
|
||||
OV_ASSERT_NO_THROW(f3.get());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsConsistentlyWithWait) {
|
||||
ov::InferRequest req1, req2, req3;
|
||||
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
|
||||
|
||||
req1.start_async();
|
||||
OV_ASSERT_NO_THROW(req1.wait());
|
||||
|
||||
req2.start_async();
|
||||
OV_ASSERT_NO_THROW(req2.wait());
|
||||
|
||||
req3.start_async();
|
||||
OV_ASSERT_NO_THROW(req3.wait());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestMultithreadingTests, canRun3AsyncRequestsParallelWithWait) {
|
||||
ov::InferRequest req1, req2, req3;
|
||||
OV_ASSERT_NO_THROW(req1 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req2 = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req3 = execNet.create_infer_request());
|
||||
|
||||
req1.start_async();
|
||||
req2.start_async();
|
||||
req3.start_async();
|
||||
|
||||
OV_ASSERT_NO_THROW(req2.wait());
|
||||
OV_ASSERT_NO_THROW(req1.wait());
|
||||
OV_ASSERT_NO_THROW(req3.wait());
|
||||
}
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
||||
|
@ -9,8 +9,7 @@
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
struct OVInferRequestPerfCountersTest : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
struct OVInferRequestPerfCountersTest : public virtual OVInferRequestTests {
|
||||
void SetUp() override;
|
||||
ov::InferRequest req;
|
||||
};
|
||||
|
@ -10,7 +10,6 @@ namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
struct OVInferRequestWaitTests : public OVInferRequestTests {
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj);
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
ov::InferRequest req;
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "common_test_utils/unicode_utils.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
|
||||
#include <ie_core.hpp>
|
||||
#include <ie_common.h>
|
||||
@ -33,7 +34,8 @@ using compileModelCacheParams = std::tuple<
|
||||
>;
|
||||
|
||||
class CompileModelCacheTestBase : public testing::WithParamInterface<compileModelCacheParams>,
|
||||
virtual public SubgraphBaseTest {
|
||||
virtual public SubgraphBaseTest,
|
||||
virtual public OVPluginTestBase {
|
||||
std::string m_cacheFolderName;
|
||||
std::string m_functionName;
|
||||
ov::element::Type m_precision;
|
||||
@ -56,31 +58,17 @@ using compileKernelsCacheParams = std::tuple<
|
||||
std::pair<ov::AnyMap, std::string> // device and cache configuration
|
||||
>;
|
||||
class CompiledKernelsCacheTest : virtual public SubgraphBaseTest,
|
||||
virtual public OVPluginTestBase,
|
||||
public testing::WithParamInterface<compileKernelsCacheParams> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj);
|
||||
protected:
|
||||
std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string cache_path;
|
||||
std::vector<std::string> m_extList;
|
||||
void SetUp() override {
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
std::pair<ov::AnyMap, std::string> userConfig;
|
||||
std::tie(targetDevice, userConfig) = GetParam();
|
||||
configuration = userConfig.first;
|
||||
std::string ext = userConfig.second;
|
||||
std::string::size_type pos = 0;
|
||||
if ((pos = ext.find(",", pos)) != std::string::npos) {
|
||||
m_extList.push_back(ext.substr(0, pos));
|
||||
m_extList.push_back(ext.substr(pos + 1));
|
||||
} else {
|
||||
m_extList.push_back(ext);
|
||||
}
|
||||
std::replace(test_name.begin(), test_name.end(), '/', '_');
|
||||
std::replace(test_name.begin(), test_name.end(), '\\', '_');
|
||||
cache_path = "compiledModel" + test_name + "_cache";
|
||||
}
|
||||
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
};
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -10,30 +10,27 @@ namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
|
||||
class OVHoldersTest : public CommonTestUtils::TestsCommon,
|
||||
class OVHoldersTest : public OVPluginTestBase,
|
||||
public ::testing::WithParamInterface<std::string> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
void TearDown() override;
|
||||
|
||||
protected:
|
||||
std::string deathTestStyle;
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
};
|
||||
|
||||
class OVHoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon,
|
||||
class OVHoldersTestOnImportedNetwork : public OVPluginTestBase,
|
||||
public ::testing::WithParamInterface<std::string> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
|
||||
|
||||
void SetUp() override;
|
||||
void TearDown() override;
|
||||
|
||||
protected:
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
std::string deathTestStyle;
|
||||
};
|
||||
} // namespace behavior
|
||||
|
@ -16,11 +16,10 @@ namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
|
||||
class OVPropertiesBase : public CommonTestUtils::TestsCommon {
|
||||
class OVPropertiesBase : public OVPluginTestBase {
|
||||
public:
|
||||
std::shared_ptr<Core> core = utils::PluginCache::get().core();
|
||||
std::shared_ptr<Model> model;
|
||||
std::string device_name;
|
||||
AnyMap properties;
|
||||
};
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "openvino/runtime/compiled_model.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "functional_test_utils/ov_plugin_cache.hpp"
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
@ -20,7 +21,7 @@ using RemoteTensorParams = std::tuple<element::Type, // element type
|
||||
std::pair<ov::AnyMap, ov::AnyMap>>; // remote context and tensor parameters
|
||||
|
||||
class OVRemoteTest : public testing::WithParamInterface<RemoteTensorParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public ov::test::behavior::OVPluginTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<RemoteTensorParams> obj);
|
||||
protected:
|
||||
@ -28,7 +29,6 @@ protected:
|
||||
void TearDown() override;
|
||||
|
||||
element::Type element_type;
|
||||
std::string target_device;
|
||||
ov::AnyMap config;
|
||||
ov::AnyMap context_parameters;
|
||||
ov::AnyMap tensor_parameters;
|
||||
|
@ -13,6 +13,7 @@
|
||||
|
||||
#include "ngraph_functions/subgraph_builders.hpp"
|
||||
#include "functional_test_utils/blob_utils.hpp"
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
using namespace ::testing;
|
||||
using namespace InferenceEngine;
|
||||
@ -25,10 +26,10 @@ using AutoBatchTwoNetsParams = std::tuple<
|
||||
size_t, // number of requests
|
||||
size_t>; // batch size>
|
||||
|
||||
class AutoBatching_Test : public CommonTestUtils::TestsCommon,
|
||||
class AutoBatching_Test : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public testing::WithParamInterface<AutoBatchTwoNetsParams> {
|
||||
void SetUp() override {
|
||||
std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
|
||||
std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
|
||||
fn_ptrs = {ngraph::builder::subgraph::makeSingleConv(),
|
||||
ngraph::builder::subgraph::makeMultiSingleConv()};
|
||||
};
|
||||
@ -36,15 +37,14 @@ public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) {
|
||||
size_t streams, requests, batch;
|
||||
bool use_get_blob;
|
||||
std::string device_name;
|
||||
std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param;
|
||||
return device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" +
|
||||
std::string target_device;
|
||||
std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param;
|
||||
return target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") + "_batch_size_" +
|
||||
std::to_string(batch) +
|
||||
"_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests);
|
||||
}
|
||||
|
||||
protected:
|
||||
std::string device_name;
|
||||
bool use_get_blob;
|
||||
size_t num_streams;
|
||||
size_t num_requests;
|
||||
@ -70,16 +70,16 @@ protected:
|
||||
n.second->setPrecision(Precision::FP32);
|
||||
}
|
||||
std::map<std::string, std::string> config;
|
||||
if (device_name.find("GPU") != std::string::npos)
|
||||
if (target_device.find("GPU") != std::string::npos)
|
||||
config[CONFIG_KEY(GPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams);
|
||||
if (device_name.find("CPU") != std::string::npos) {
|
||||
if (target_device.find("CPU") != std::string::npos) {
|
||||
config[CONFIG_KEY(CPU_THROUGHPUT_STREAMS)] = std::to_string(num_streams);
|
||||
config[CONFIG_KEY(ENFORCE_BF16)] = CONFIG_VALUE(NO);
|
||||
}
|
||||
// minimize timeout to reduce test time
|
||||
config[CONFIG_KEY(AUTO_BATCH_TIMEOUT)] = std::to_string(1);
|
||||
auto exec_net_ref = ie.LoadNetwork(net, std::string(CommonTestUtils::DEVICE_BATCH) + ":" +
|
||||
device_name + "(" + std::to_string(num_batch) + ")",
|
||||
target_device + "(" + std::to_string(num_batch) + ")",
|
||||
config);
|
||||
|
||||
auto network_outputs = net.getOutputsInfo();
|
||||
@ -144,7 +144,7 @@ protected:
|
||||
class AutoBatching_Test_DetectionOutput : public AutoBatching_Test {
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(device_name, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
|
||||
std::tie(target_device, use_get_blob, num_streams, num_requests, num_batch) = this->GetParam();
|
||||
fn_ptrs = {ngraph::builder::subgraph::makeDetectionOutput(),
|
||||
ngraph::builder::subgraph::makeDetectionOutput()};
|
||||
};
|
||||
@ -152,9 +152,9 @@ public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<AutoBatchTwoNetsParams> &obj) {
|
||||
size_t streams, requests, batch;
|
||||
bool use_get_blob;
|
||||
std::string device_name;
|
||||
std::tie(device_name, use_get_blob, streams, requests, batch) = obj.param;
|
||||
return "DetectionOutput_HETERO_" + device_name + std::string(use_get_blob ? "_get_blob" : "_set_blob") +
|
||||
std::string target_device;
|
||||
std::tie(target_device, use_get_blob, streams, requests, batch) = obj.param;
|
||||
return "DetectionOutput_HETERO_" + target_device + std::string(use_get_blob ? "_get_blob" : "_set_blob") +
|
||||
"_batch_size_" + std::to_string(batch) +
|
||||
"_num_streams_" + std::to_string(streams) + "_num_req_" + std::to_string(requests);
|
||||
}
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "common_test_utils/unicode_utils.hpp"
|
||||
#include "openvino/util/common_util.hpp"
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
#include <ie_core.hpp>
|
||||
#include <ie_common.h>
|
||||
@ -30,6 +31,7 @@ using loadNetworkCacheParams = std::tuple<
|
||||
namespace LayerTestsDefinitions {
|
||||
|
||||
class LoadNetworkCacheTestBase : public testing::WithParamInterface<loadNetworkCacheParams>,
|
||||
virtual public BehaviorTestsUtils::IEPluginTestBase,
|
||||
virtual public LayerTestsUtils::LayerTestsCommon {
|
||||
std::string m_cacheFolderName;
|
||||
std::string m_functionName;
|
||||
@ -52,18 +54,21 @@ using compileKernelsCacheParams = std::tuple<
|
||||
std::pair<std::map<std::string, std::string>, std::string> // device and cache configuration
|
||||
>;
|
||||
class LoadNetworkCompiledKernelsCacheTest : virtual public LayerTestsUtils::LayerTestsCommon,
|
||||
virtual public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public testing::WithParamInterface<compileKernelsCacheParams> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<compileKernelsCacheParams> obj);
|
||||
protected:
|
||||
std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string cache_path;
|
||||
std::vector<std::string> m_extList;
|
||||
|
||||
void SetUp() override {
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
std::pair<std::map<std::string, std::string>, std::string> userConfig;
|
||||
std::tie(targetDevice, userConfig) = GetParam();
|
||||
target_device = targetDevice;
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
configuration = userConfig.first;
|
||||
std::string ext = userConfig.second;
|
||||
std::string::size_type pos = 0;
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include <tuple>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
|
||||
#include <ie_core.hpp>
|
||||
#include <ie_parameter.hpp>
|
||||
@ -17,6 +18,7 @@
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/file_utils.hpp"
|
||||
#include "functional_test_utils/plugin_cache.hpp"
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
|
||||
@ -34,7 +36,8 @@ using DefaultConfigurationParameters = std::tuple<
|
||||
DefaultParameter // default parameter key value comparator
|
||||
>;
|
||||
|
||||
struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public ::testing::WithParamInterface<DefaultConfigurationParameters> {
|
||||
struct DefaultConfigurationTest : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public ::testing::WithParamInterface<DefaultConfigurationParameters> {
|
||||
enum {
|
||||
DeviceName, DefaultParamterId
|
||||
};
|
||||
@ -43,16 +46,14 @@ struct DefaultConfigurationTest : public CommonTestUtils::TestsCommon, public ::
|
||||
|
||||
protected:
|
||||
std::shared_ptr<InferenceEngine::Core> _core = PluginCache::get().ie();
|
||||
std::string targetDevice;
|
||||
DefaultParameter defaultParameter;
|
||||
};
|
||||
|
||||
class ConfigBase : public CommonTestUtils::TestsCommon {
|
||||
class ConfigBase : public BehaviorTestsUtils::IEPluginTestBase {
|
||||
public:
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
InferenceEngine::CNNNetwork cnnNet;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
@ -60,17 +61,19 @@ class BehaviorTestsEmptyConfig : public testing::WithParamInterface<std::string>
|
||||
public ConfigBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) {
|
||||
std::string targetDevice;
|
||||
targetDevice = obj.param;
|
||||
std::string target_device;
|
||||
target_device = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice;
|
||||
result << "target_device=" << target_device;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void SetUp() override { // Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
// Create CNNNetwork from ngrpah::Function
|
||||
targetDevice = this->GetParam();
|
||||
target_device = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
@ -85,20 +88,24 @@ class BehaviorTestsSingleOptionDefault : public testing::WithParamInterface<Beha
|
||||
public ConfigBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<BehaviorParamsSingleOptionDefault> obj) {
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::pair<std::string, InferenceEngine::Parameter> configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::tie(target_device, configuration) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "config=" << "(" << configuration.first << "_" << configuration.second.as<std::string>() << ")";
|
||||
result << "target_device=" << target_device << "_";
|
||||
std::string config_value = configuration.second.as<std::string>();
|
||||
std::replace(config_value.begin(), config_value.end(), '-', '_');
|
||||
result << "config=" << "(" << configuration.first << "_" << config_value << ")";
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::pair<std::string, InferenceEngine::Parameter> entry;
|
||||
std::tie(targetDevice, entry) = this->GetParam();
|
||||
std::tie(target_device, entry) = this->GetParam();
|
||||
std::tie(key, value) = entry;
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
std::string key;
|
||||
@ -114,11 +121,12 @@ class CorrectConfigTests : public testing::WithParamInterface<CorrectConfigParam
|
||||
public ConfigBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<CorrectConfigParams> obj) {
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(targetDevice, configuration) = obj.param;
|
||||
std::tie(target_device, configuration) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "target_device=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
using namespace CommonTestUtils;
|
||||
result << "config=" << (configuration);
|
||||
@ -127,9 +135,10 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::map<std::string, std::string> entry;
|
||||
std::tie(targetDevice, configuration) = this->GetParam();
|
||||
std::tie(target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
@ -138,6 +147,7 @@ public:
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
|
||||
@ -152,7 +162,7 @@ public:
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tuple<std::string, std::string, InferenceEngine::Parameter> entry;
|
||||
std::tie(targetDevice, entry) = this->GetParam();
|
||||
std::tie(target_device, entry) = this->GetParam();
|
||||
std::tie(key, value, reference) = entry;
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
@ -172,8 +182,9 @@ class BehaviorTestsSingleOption : public testing::WithParamInterface<BehaviorPar
|
||||
public ConfigBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(target_device, key) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(targetDevice, key) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
@ -191,12 +202,13 @@ class SetPropLoadNetWorkGetPropTests : public testing::WithParamInterface<LoadNe
|
||||
public ConfigBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<LoadNetWorkPropertiesParams> obj) {
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::map<std::string, std::string> loadNetWorkConfig;
|
||||
std::tie(targetDevice, configuration, loadNetWorkConfig) = obj.param;
|
||||
std::tie(target_device, configuration, loadNetWorkConfig) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "target_device=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
result << "configItem=";
|
||||
for (auto& configItem : configuration) {
|
||||
@ -215,9 +227,10 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::map<std::string, std::string> entry;
|
||||
std::tie(targetDevice, configuration, loadNetWorkConfig) = this->GetParam();
|
||||
std::tie(target_device, configuration, loadNetWorkConfig) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
APIBaseTest::SetUp();
|
||||
function = ngraph::builder::subgraph::makeConvPoolRelu();
|
||||
cnnNet = InferenceEngine::CNNNetwork(function);
|
||||
}
|
||||
|
@ -25,19 +25,22 @@ namespace BehaviorTestsDefinitions {
|
||||
#define ASSERT_METRIC_SUPPORTED_IE(metricName) \
|
||||
{ \
|
||||
std::vector<std::string> metrics = \
|
||||
ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)); \
|
||||
ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)); \
|
||||
auto it = std::find(metrics.begin(), metrics.end(), metricName); \
|
||||
ASSERT_NE(metrics.end(), it); \
|
||||
}
|
||||
|
||||
class IEClassBasicTestP : public ::testing::Test, public ::testing::WithParamInterface<std::pair<std::string, std::string> > {
|
||||
class IEClassBasicTestP : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public ::testing::WithParamInterface<std::pair<std::string, std::string> > {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string pluginName;
|
||||
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(pluginName, target_device) = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(pluginName, deviceName) = GetParam();
|
||||
ov::test::behavior::APIBaseTest::SetUp();
|
||||
pluginName += IE_BUILD_POSTFIX;
|
||||
if (pluginName == (std::string("openvino_template_plugin") + IE_BUILD_POSTFIX)) {
|
||||
pluginName = ov::util::make_plugin_library_name(CommonTestUtils::getExecutableDirectory(), pluginName);
|
||||
@ -45,14 +48,14 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class IEClassSetDefaultDeviceIDTest : public ::testing::Test,
|
||||
class IEClassSetDefaultDeviceIDTest : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public ::testing::WithParamInterface<std::pair<std::string, std::string>> {
|
||||
protected:
|
||||
std::string deviceName;
|
||||
std::string deviceID;
|
||||
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(deviceName, deviceID) = GetParam();
|
||||
std::tie(target_device, deviceID) = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
@ -78,31 +81,35 @@ using IEClassGetMetricTest_RANGE_FOR_STREAMS = BehaviorTestsUtils::IEClassBaseTe
|
||||
using IEClassSetGlobalConfigTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassSpecificDeviceTestSetConfig = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
using IEClassSpecificDeviceTestGetConfig = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
|
||||
using IEClassLoadNetworkAfterCoreRecreateTest = BehaviorTestsUtils::IEClassBaseTestP;
|
||||
|
||||
class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
class IEClassSeveralDevicesTest : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public BehaviorTestsUtils::IEClassNetworkTest,
|
||||
public ::testing::WithParamInterface<std::vector<std::string>> {
|
||||
public:
|
||||
std::vector<std::string> deviceNames;
|
||||
std::vector<std::string> target_devices;
|
||||
void SetUp() override {
|
||||
target_device = CommonTestUtils::DEVICE_MULTI;
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
ov::test::behavior::APIBaseTest::SetUp();
|
||||
IEClassNetworkTest::SetUp();
|
||||
deviceNames = GetParam();
|
||||
target_devices = GetParam();
|
||||
}
|
||||
};
|
||||
|
||||
using IEClassSeveralDevicesTestLoadNetwork = IEClassSeveralDevicesTest;
|
||||
using IEClassSeveralDevicesTestQueryNetwork = IEClassSeveralDevicesTest;
|
||||
using IEClassSeveralDevicesTestDefaultCore = IEClassSeveralDevicesTest;
|
||||
|
||||
bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &deviceName) {
|
||||
auto supportedMetricKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
bool supportsAvaliableDevices(InferenceEngine::Core &ie, const std::string &target_device) {
|
||||
auto supportedMetricKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)).as<std::vector<std::string>>();
|
||||
return supportedMetricKeys.end() != std::find(std::begin(supportedMetricKeys),
|
||||
std::end(supportedMetricKeys),
|
||||
METRIC_KEY(AVAILABLE_DEVICES));
|
||||
}
|
||||
|
||||
bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &deviceName) {
|
||||
auto supportedConfigKeys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
bool supportsDeviceID(InferenceEngine::Core &ie, const std::string &target_device) {
|
||||
auto supportedConfigKeys = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)).as<std::vector<std::string>>();
|
||||
return supportedConfigKeys.end() != std::find(std::begin(supportedConfigKeys),
|
||||
std::end(supportedConfigKeys),
|
||||
CONFIG_KEY(DEVICE_ID));
|
||||
@ -117,7 +124,7 @@ TEST(IEClassBasicTest, smoke_createDefault) {
|
||||
|
||||
TEST_P(IEClassBasicTestP, registerExistingPluginThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_THROW(ie.RegisterPlugin(pluginName, deviceName), InferenceEngine::Exception);
|
||||
ASSERT_THROW(ie.RegisterPlugin(pluginName, target_device), InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) {
|
||||
@ -159,7 +166,6 @@ TEST(IEClassBasicTest, smoke_createMockEngineConfigThrows) {
|
||||
ASSERT_THROW(InferenceEngine::Core ie(filename), InferenceEngine::Exception);
|
||||
CommonTestUtils::removeFile(filename.c_str());
|
||||
}
|
||||
|
||||
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
|
||||
|
||||
TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
|
||||
@ -184,7 +190,7 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
|
||||
ASSERT_NO_THROW(ie.RegisterPlugins(ov::util::wstring_to_string(pluginsXmlW)));
|
||||
CommonTestUtils::removeFile(pluginsXmlW);
|
||||
ASSERT_NO_THROW(ie.GetVersions("mock")); // from pluginXM
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device));
|
||||
GTEST_COUT << "Plugin created " << testIndex << std::endl;
|
||||
|
||||
ASSERT_NO_THROW(ie.RegisterPlugin(pluginName, "TEST_DEVICE"));
|
||||
@ -211,17 +217,17 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
|
||||
|
||||
TEST_P(IEClassBasicTestP, getVersionsByExactDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName + ".0"));
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device + ".0"));
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, getVersionsByDeviceClassNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, getVersionsNonEmpty) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName).size());
|
||||
ASSERT_EQ(2, ie.GetVersions(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device).size());
|
||||
}
|
||||
|
||||
//
|
||||
@ -231,22 +237,22 @@ TEST_P(IEClassBasicTestP, getVersionsNonEmpty) {
|
||||
TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
// device instance is not created yet
|
||||
ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception);
|
||||
ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception);
|
||||
|
||||
// make the first call to IE which created device instance
|
||||
ie.GetVersions(deviceName);
|
||||
ie.GetVersions(target_device);
|
||||
// now, we can unregister device
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName));
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngine::Exception);
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName));
|
||||
ASSERT_NO_THROW(ie.SetConfig({}, deviceName));
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName));
|
||||
ASSERT_THROW(ie.UnregisterPlugin(target_device), InferenceEngine::Exception);
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device));
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
|
||||
ASSERT_NO_THROW(ie.SetConfig({}, target_device));
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device));
|
||||
ASSERT_NO_THROW(ie.UnregisterPlugin(target_device));
|
||||
}
|
||||
|
||||
TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) {
|
||||
@ -261,7 +267,7 @@ TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) {
|
||||
TEST_P(IEClassBasicTestP, SetConfigAllThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.SetConfig({{"unsupported_key", "4"}}));
|
||||
ASSERT_ANY_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_ANY_THROW(ie.GetVersions(target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) {
|
||||
@ -272,13 +278,13 @@ TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) {
|
||||
TEST_P(IEClassBasicTestP, SetConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}},
|
||||
deviceName));
|
||||
target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, SetConfigAllNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}));
|
||||
ASSERT_NO_THROW(ie.GetVersions(deviceName));
|
||||
ASSERT_NO_THROW(ie.GetVersions(target_device));
|
||||
}
|
||||
|
||||
TEST(IEClassBasicTest, smoke_SetConfigHeteroThrows) {
|
||||
@ -291,17 +297,17 @@ TEST_P(IEClassBasicTestP, SetGetConfigForTbbTerminateThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
bool value = false;
|
||||
ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(YES)}}));
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
|
||||
ASSERT_TRUE(value);
|
||||
|
||||
ASSERT_NO_THROW(ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(NO)}}));
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(target_device, CONFIG_KEY(FORCE_TBB_TERMINATE)).as<bool>());
|
||||
ASSERT_FALSE(value);
|
||||
}
|
||||
|
||||
TEST_P(IEClassBasicTestP, SetConfigHeteroTargetFallbackThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", deviceName}}, CommonTestUtils::DEVICE_HETERO));
|
||||
ASSERT_NO_THROW(ie.SetConfig({{"TARGET_FALLBACK", target_device}}, CommonTestUtils::DEVICE_HETERO));
|
||||
}
|
||||
|
||||
TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) {
|
||||
@ -322,23 +328,23 @@ TEST(IEClassBasicTest, smoke_SetConfigHeteroNoThrow) {
|
||||
TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
std::string deviceID, cleartarget_device;
|
||||
auto pos = target_device.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
cleartarget_device = target_device.substr(0, pos);
|
||||
deviceID = target_device.substr(pos + 1, target_device.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, deviceName));
|
||||
ASSERT_NO_THROW(ie.SetConfig({{InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES}}, target_device));
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
@ -349,8 +355,8 @@ TEST_P(IEClassSpecificDeviceTestSetConfig, SetConfigSpecificDeviceNoThrow) {
|
||||
TEST_P(IEClassBasicTestP, ImportNetworkThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (deviceName == CommonTestUtils::DEVICE_GPU) {
|
||||
ASSERT_THROW(ie.ImportNetwork("model", deviceName), InferenceEngine::NetworkNotRead);
|
||||
if (target_device == CommonTestUtils::DEVICE_GPU) {
|
||||
ASSERT_THROW(ie.ImportNetwork("model", target_device), InferenceEngine::NetworkNotRead);
|
||||
|
||||
const std::string modelName = "compiled_blob.blob";
|
||||
{
|
||||
@ -358,7 +364,7 @@ TEST_P(IEClassBasicTestP, ImportNetworkThrows) {
|
||||
file << "content";
|
||||
}
|
||||
|
||||
EXPECT_THROW(ie.ImportNetwork(modelName, deviceName), InferenceEngine::NotImplemented);
|
||||
EXPECT_THROW(ie.ImportNetwork(modelName, target_device), InferenceEngine::NotImplemented);
|
||||
ASSERT_EQ(0, std::remove(modelName.c_str()));
|
||||
}
|
||||
}
|
||||
@ -387,14 +393,14 @@ TEST_P(IEClassBasicTestP, ImportNetworkWithNullContextThrows) {
|
||||
|
||||
TEST_P(IEClassNetworkTestP, QueryNetworkActualThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName));
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, QueryNetworkActualNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
try {
|
||||
ie.QueryNetwork(actualCnnNetwork, deviceName);
|
||||
ie.QueryNetwork(actualCnnNetwork, target_device);
|
||||
} catch (const InferenceEngine::Exception& ex) {
|
||||
std::string message = ex.what();
|
||||
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
|
||||
@ -405,12 +411,12 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
try {
|
||||
auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName);
|
||||
auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device);
|
||||
auto rl_map = rres.supportedLayersMap;
|
||||
auto func = ksoCnnNetwork.getFunction();
|
||||
for (const auto & op : func->get_ops()) {
|
||||
if (!rl_map.count(op->get_friendly_name())) {
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName;
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
|
||||
}
|
||||
}
|
||||
} catch (const InferenceEngine::Exception& ex) {
|
||||
@ -422,26 +428,26 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) {
|
||||
TEST_P(IEClassSeveralDevicesTestQueryNetwork, QueryNetworkActualSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
std::string cleartarget_device;
|
||||
auto pos = target_devices.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
cleartarget_device = target_devices.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < target_devices.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : target_devices) {
|
||||
multitarget_device += dev_name;
|
||||
if (&dev_name != &(target_devices.back())) {
|
||||
multitarget_device += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multiDeviceName));
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, multitarget_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) {
|
||||
@ -477,18 +483,18 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) {
|
||||
}
|
||||
InferenceEngine::CNNNetwork net(func);
|
||||
|
||||
auto rres = ie.QueryNetwork(net, deviceName);
|
||||
auto rres = ie.QueryNetwork(net, target_device);
|
||||
auto rl_map = rres.supportedLayersMap;
|
||||
for (const auto & op : func->get_ops()) {
|
||||
if (!rl_map.count(op->get_friendly_name())) {
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName;
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
|
||||
}
|
||||
}
|
||||
for (const auto & op : net.getFunction()->get_ops()) {
|
||||
std::string affinity = rl_map[op->get_friendly_name()];
|
||||
op->get_rt_info()["affinity"] = affinity;
|
||||
}
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device);
|
||||
} catch (const InferenceEngine::NotImplemented & ex) {
|
||||
std::string message = ex.what();
|
||||
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
|
||||
@ -499,19 +505,19 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
try {
|
||||
auto rres = ie.QueryNetwork(ksoCnnNetwork, deviceName);
|
||||
auto rres = ie.QueryNetwork(ksoCnnNetwork, target_device);
|
||||
auto rl_map = rres.supportedLayersMap;
|
||||
auto func = ksoCnnNetwork.getFunction();
|
||||
for (const auto & op : func->get_ops()) {
|
||||
if (!rl_map.count(op->get_friendly_name())) {
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName;
|
||||
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << target_device;
|
||||
}
|
||||
}
|
||||
for (const auto & op : ksoCnnNetwork.getFunction()->get_ops()) {
|
||||
std::string affinity = rl_map[op->get_friendly_name()];
|
||||
op->get_rt_info()["affinity"] = affinity;
|
||||
}
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, deviceName);
|
||||
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoCnnNetwork, target_device);
|
||||
} catch (const InferenceEngine::Exception& ex) {
|
||||
std::string message = ex.what();
|
||||
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
|
||||
@ -521,7 +527,7 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) {
|
||||
TEST_P(IEClassNetworkTestP, QueryNetworkHeteroActualNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::QueryNetworkResult res;
|
||||
ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}}));
|
||||
ASSERT_NO_THROW(res = ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}}));
|
||||
ASSERT_LT(0, res.supportedLayersMap.size());
|
||||
}
|
||||
|
||||
@ -533,9 +539,9 @@ TEST_P(IEClassNetworkTestP, QueryNetworkMultiThrows) {
|
||||
TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
std::string deviceName = CommonTestUtils::DEVICE_HETERO;
|
||||
std::string target_device = CommonTestUtils::DEVICE_HETERO;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Supported HETERO metrics: " << std::endl;
|
||||
@ -549,9 +555,9 @@ TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) {
|
||||
TEST(IEClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
std::string deviceName = CommonTestUtils::DEVICE_HETERO;
|
||||
std::string target_device = CommonTestUtils::DEVICE_HETERO;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Supported HETERO config keys: " << std::endl;
|
||||
@ -573,7 +579,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_METRICS)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Supported metrics: " << std::endl;
|
||||
@ -588,7 +594,7 @@ TEST_P(IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Supported config values: " << std::endl;
|
||||
@ -603,7 +609,7 @@ TEST_P(IEClassGetMetricTest_AVAILABLE_DEVICES, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Available devices: " << std::endl;
|
||||
@ -618,7 +624,7 @@ TEST_P(IEClassGetMetricTest_FULL_DEVICE_NAME, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(FULL_DEVICE_NAME)));
|
||||
std::string t = p;
|
||||
std::cout << "Full device name: " << std::endl << t << std::endl;
|
||||
|
||||
@ -629,7 +635,7 @@ TEST_P(IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES, GetMetricAndPrintNoThrow)
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(OPTIMIZATION_CAPABILITIES)));
|
||||
std::vector<std::string> t = p;
|
||||
|
||||
std::cout << "Optimization capabilities: " << std::endl;
|
||||
@ -644,7 +650,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_GOPS, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_GOPS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_GOPS)));
|
||||
std::map<InferenceEngine::Precision, float> t = p;
|
||||
|
||||
std::cout << "Device GOPS: " << std::endl;
|
||||
@ -659,7 +665,7 @@ TEST_P(IEClassGetMetricTest_DEVICE_TYPE, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(DEVICE_TYPE)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(DEVICE_TYPE)));
|
||||
InferenceEngine::Metrics::DeviceType t = p;
|
||||
|
||||
std::cout << "Device Type: " << t << std::endl;
|
||||
@ -671,7 +677,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_WAITING_INFER_REQUESTS, GetMetricAndPrintN
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS)));
|
||||
unsigned int t = p;
|
||||
|
||||
std::cout << "Number of waiting infer requests: " << std::endl << t << std::endl;
|
||||
@ -683,7 +689,7 @@ TEST_P(IEClassGetMetricTest_NUMBER_OF_EXEC_INFER_REQUESTS, GetMetricAndPrintNoTh
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS)));
|
||||
unsigned int t = p;
|
||||
|
||||
std::cout << "Number of executing infer requests: " << std::endl << t << std::endl;
|
||||
@ -695,7 +701,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS, GetMetricAndPrintNoT
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS)));
|
||||
std::tuple<unsigned int, unsigned int, unsigned int> t = p;
|
||||
|
||||
unsigned int start = std::get<0>(t);
|
||||
@ -717,7 +723,7 @@ TEST_P(IEClassGetMetricTest_RANGE_FOR_STREAMS, GetMetricAndPrintNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(RANGE_FOR_STREAMS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(RANGE_FOR_STREAMS)));
|
||||
std::tuple<unsigned int, unsigned int> t = p;
|
||||
|
||||
unsigned int start = std::get<0>(t);
|
||||
@ -736,19 +742,19 @@ TEST_P(IEClassGetMetricTest_ThrowUnsupported, GetMetricThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_THROW(p = ie.GetMetric(deviceName, "unsupported_metric"), InferenceEngine::Exception);
|
||||
ASSERT_THROW(p = ie.GetMetric(target_device, "unsupported_metric"), InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
TEST_P(IEClassGetConfigTest, GetConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
InferenceEngine::Parameter defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey));
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
|
||||
ASSERT_FALSE(defaultValue.empty());
|
||||
}
|
||||
}
|
||||
@ -757,11 +763,11 @@ TEST_P(IEClassGetConfigTest, GetConfigHeteroNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
ASSERT_NO_THROW(ie.GetConfig(deviceName, confKey));
|
||||
ASSERT_NO_THROW(ie.GetConfig(target_device, confKey));
|
||||
}
|
||||
}
|
||||
|
||||
@ -776,7 +782,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)),
|
||||
ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)),
|
||||
InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
@ -784,33 +790,33 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), InferenceEngine::Exception);
|
||||
ASSERT_THROW(p = ie.GetConfig(target_device, "unsupported_config"), InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSpecificDeviceTestGetConfig, GetConfigSpecificDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
InferenceEngine::Parameter p;
|
||||
|
||||
std::string deviceID, clearDeviceName;
|
||||
auto pos = deviceName.find('.');
|
||||
std::string deviceID, cleartarget_device;
|
||||
auto pos = target_device.find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceName.substr(0, pos);
|
||||
deviceID = deviceName.substr(pos + 1, deviceName.size());
|
||||
cleartarget_device = target_device.substr(0, pos);
|
||||
deviceID = target_device.substr(pos + 1, target_device.size());
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto &&confKey : configValues) {
|
||||
InferenceEngine::Parameter defaultValue;
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(deviceName, confKey));
|
||||
ASSERT_NO_THROW(defaultValue = ie.GetConfig(target_device, confKey));
|
||||
ASSERT_FALSE(defaultValue.empty());
|
||||
}
|
||||
}
|
||||
@ -824,7 +830,7 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
bool deviceFound = false;
|
||||
std::cout << "Available devices: " << std::endl;
|
||||
for (auto &&device : devices) {
|
||||
if (device.find(deviceName) != std::string::npos) {
|
||||
if (device.find(target_device) != std::string::npos) {
|
||||
deviceFound = true;
|
||||
}
|
||||
|
||||
@ -842,12 +848,12 @@ TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (deviceIDs.empty())
|
||||
GTEST_SKIP();
|
||||
ASSERT_NO_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
|
||||
{{"TARGET_FALLBACK", deviceName + "." + deviceIDs[0] + "," + deviceName}}));
|
||||
{{"TARGET_FALLBACK", target_device + "." + deviceIDs[0] + "," + target_device}}));
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -856,9 +862,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithDeviceIDNoThrow) {
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
try {
|
||||
ie.QueryNetwork(simpleCnnNetwork, deviceName + ".0");
|
||||
ie.QueryNetwork(simpleCnnNetwork, target_device + ".0");
|
||||
} catch (const InferenceEngine::Exception& ex) {
|
||||
std::string message = ex.what();
|
||||
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
|
||||
@ -871,8 +877,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) {
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".110"), InferenceEngine::Exception);
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".110"), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -881,8 +887,8 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) {
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception);
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -891,9 +897,9 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) {
|
||||
TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.QueryNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
|
||||
{{"TARGET_FALLBACK", deviceName + ".100," + deviceName}}), InferenceEngine::Exception);
|
||||
{{"TARGET_FALLBACK", target_device + ".100," + target_device}}), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -915,22 +921,22 @@ TEST(IEClassBasicTest, smoke_LoadNetworkToDefaultDeviceNoThrow) {
|
||||
TEST_P(IEClassNetworkTestP, LoadNetworkActualNoThrow) {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName));
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDeviceNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName));
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}}));
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", target_device}}));
|
||||
}
|
||||
|
||||
TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
auto net = ie.LoadNetwork(actualCnnNetwork, deviceName);
|
||||
auto net = ie.LoadNetwork(actualCnnNetwork, target_device);
|
||||
auto exec_function = net.GetExecGraphInfo().getFunction();
|
||||
ASSERT_NE(nullptr, exec_function);
|
||||
auto actual_parameters = exec_function->get_parameters();
|
||||
@ -960,32 +966,32 @@ TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
|
||||
TEST_P(IEClassLoadNetworkTestWithThrow, LoadNetworkActualWithThrow) {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName), InferenceEngine::Exception);
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device), InferenceEngine::Exception);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSeveralDevicesTestLoadNetwork, LoadNetworkActualSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
std::string cleartarget_device;
|
||||
auto pos = target_devices.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
cleartarget_device = target_devices.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < target_devices.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
std::string multiDeviceName = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : deviceNames) {
|
||||
multiDeviceName += dev_name;
|
||||
if (&dev_name != &(deviceNames.back())) {
|
||||
multiDeviceName += ",";
|
||||
std::string multitarget_device = CommonTestUtils::DEVICE_MULTI + std::string(":");
|
||||
for (auto& dev_name : target_devices) {
|
||||
multitarget_device += dev_name;
|
||||
if (&dev_name != &(target_devices.back())) {
|
||||
multitarget_device += ",";
|
||||
}
|
||||
}
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multiDeviceName));
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, multitarget_device));
|
||||
}
|
||||
|
||||
using IEClassLoadNetworkTest = IEClassQueryNetworkTest;
|
||||
@ -995,11 +1001,11 @@ using IEClassLoadNetworkTest = IEClassQueryNetworkTest;
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (deviceIDs.empty())
|
||||
GTEST_SKIP();
|
||||
std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName + "." + deviceIDs[0] + "," + deviceName;
|
||||
std::string heteroDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + target_device + "." + deviceIDs[0] + "," + target_device;
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, heteroDevice));
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
@ -1009,11 +1015,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithDeviceIDNoThrow) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
auto deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
auto deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
if (deviceIDs.empty())
|
||||
GTEST_SKIP();
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, deviceName + "." + deviceIDs[0]));
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(simpleCnnNetwork, target_device + "." + deviceIDs[0]));
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -1022,8 +1028,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithDeviceIDNoThrow) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".10"), InferenceEngine::Exception);
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".10"), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -1032,8 +1038,8 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, deviceName + ".l0"), InferenceEngine::Exception);
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, target_device + ".l0"), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -1042,9 +1048,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, "HETERO",
|
||||
{{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception);
|
||||
{{"TARGET_FALLBACK", target_device + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -1053,9 +1059,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device)) {
|
||||
ASSERT_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO,
|
||||
{{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU},
|
||||
{{"TARGET_FALLBACK", target_device + "," + CommonTestUtils::DEVICE_CPU},
|
||||
{CONFIG_KEY(DEVICE_ID), "110"}}), InferenceEngine::Exception);
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
@ -1068,16 +1074,16 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
|
||||
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
|
||||
std::string devices;
|
||||
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
for (auto &&device : availableDevices) {
|
||||
devices += deviceName + '.' + device;
|
||||
devices += target_device + '.' + device;
|
||||
if (&device != &(availableDevices.back())) {
|
||||
devices += ',';
|
||||
}
|
||||
}
|
||||
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName);
|
||||
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device);
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_HETERO, {
|
||||
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
|
||||
{"TARGET_FALLBACK", targetFallback}}));
|
||||
@ -1089,9 +1095,9 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROwithMULTINoThrow) {
|
||||
TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
|
||||
std::string devices;
|
||||
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
for (auto &&device : availableDevices) {
|
||||
devices += CommonTestUtils::DEVICE_HETERO + std::string(".") + device;
|
||||
if (&device != &(availableDevices.back())) {
|
||||
@ -1100,7 +1106,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
|
||||
}
|
||||
ASSERT_NO_THROW(ie.LoadNetwork(actualCnnNetwork, CommonTestUtils::DEVICE_MULTI, {
|
||||
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
|
||||
{"TARGET_FALLBACK", deviceName + "," + deviceName}}));
|
||||
{"TARGET_FALLBACK", target_device + "," + target_device}}));
|
||||
} else {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
@ -1113,11 +1119,11 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkMULTIwithHETERONoThrow) {
|
||||
TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
|
||||
std::string devices;
|
||||
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
for (auto &&device : availableDevices) {
|
||||
devices += deviceName + '.' + device;
|
||||
devices += target_device + '.' + device;
|
||||
if (&device != &(availableDevices.back())) {
|
||||
devices += ',';
|
||||
}
|
||||
@ -1129,7 +1135,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
|
||||
expectedLayers.emplace(node->get_friendly_name());
|
||||
}
|
||||
InferenceEngine::QueryNetworkResult result;
|
||||
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + deviceName);
|
||||
std::string targetFallback(CommonTestUtils::DEVICE_MULTI + std::string(",") + target_device);
|
||||
ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_HETERO, {
|
||||
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
|
||||
{"TARGET_FALLBACK", targetFallback}}));
|
||||
@ -1147,9 +1153,9 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROWithMULTINoThrow_V10) {
|
||||
TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
if (supportsDeviceID(ie, deviceName) && supportsAvaliableDevices(ie, deviceName)) {
|
||||
if (supportsDeviceID(ie, target_device) && supportsAvaliableDevices(ie, target_device)) {
|
||||
std::string devices;
|
||||
auto availableDevices = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
auto availableDevices = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES)).as<std::vector<std::string>>();
|
||||
for (auto &&device : availableDevices) {
|
||||
devices += "HETERO." + device;
|
||||
if (&device != &(availableDevices.back())) {
|
||||
@ -1165,7 +1171,7 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
|
||||
InferenceEngine::QueryNetworkResult result;
|
||||
ASSERT_NO_THROW(result = ie.QueryNetwork(multinputCnnNetwork, CommonTestUtils::DEVICE_MULTI, {
|
||||
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
|
||||
{"TARGET_FALLBACK", deviceName + "," + deviceName}}));
|
||||
{"TARGET_FALLBACK", target_device + "," + target_device}}));
|
||||
|
||||
std::unordered_set<std::string> actualLayers;
|
||||
for (auto &&layer : result.supportedLayersMap) {
|
||||
@ -1180,50 +1186,50 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIWithHETERONoThrow_V10) {
|
||||
TEST_P(IEClassLoadNetworkAfterCoreRecreateTest, LoadAfterRecreateCoresAndPlugins) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
{
|
||||
auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + deviceName + "," + CommonTestUtils::DEVICE_CPU);
|
||||
auto versions = ie.GetVersions(std::string(CommonTestUtils::DEVICE_MULTI) + ":" + target_device + "," + CommonTestUtils::DEVICE_CPU);
|
||||
ASSERT_EQ(3, versions.size());
|
||||
}
|
||||
std::map<std::string, std::string> config;
|
||||
if (deviceName == CommonTestUtils::DEVICE_CPU) {
|
||||
if (target_device == CommonTestUtils::DEVICE_CPU) {
|
||||
config.insert({"CPU_THREADS_NUM", "3"});
|
||||
}
|
||||
ASSERT_NO_THROW({
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
std::string name = actualCnnNetwork.getInputsInfo().begin()->first;
|
||||
actualCnnNetwork.getInputsInfo().at(name)->setPrecision(InferenceEngine::Precision::U8);
|
||||
auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, deviceName, config);
|
||||
auto executableNetwork = ie.LoadNetwork(actualCnnNetwork, target_device, config);
|
||||
});
|
||||
};
|
||||
|
||||
TEST_P(IEClassSetDefaultDeviceIDTest, SetDefaultDeviceIDNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (std::find(deviceIDs.begin(), deviceIDs.end(), deviceID) == deviceIDs.end()) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::string value;
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_DEVICE_ID, deviceID },
|
||||
{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }},
|
||||
deviceName));
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
target_device));
|
||||
ASSERT_NO_THROW(value = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT).as<std::string>());
|
||||
ASSERT_EQ(value, InferenceEngine::PluginConfigParams::YES);
|
||||
}
|
||||
|
||||
TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
|
||||
InferenceEngine::Core ie = BehaviorTestsUtils::createIECoreWithTemplate();
|
||||
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(deviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(target_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
InferenceEngine::Parameter ref, src;
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::NO }},
|
||||
deviceName + "." + dev_id));
|
||||
target_device + "." + dev_id));
|
||||
}
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, deviceName));
|
||||
ASSERT_NO_THROW(ref = ie.GetConfig(deviceName, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}, target_device));
|
||||
ASSERT_NO_THROW(ref = ie.GetConfig(target_device, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
|
||||
for (auto& dev_id : deviceIDs) {
|
||||
ASSERT_NO_THROW(src = ie.GetConfig(deviceName + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
ASSERT_NO_THROW(src = ie.GetConfig(target_device + "." + dev_id, InferenceEngine::PluginConfigParams::KEY_PERF_COUNT));
|
||||
ASSERT_EQ(src, ref);
|
||||
}
|
||||
}
|
||||
@ -1231,24 +1237,24 @@ TEST_P(IEClassSetGlobalConfigTest, SetGlobalConfigNoThrow) {
|
||||
TEST_P(IEClassSeveralDevicesTestDefaultCore, DefaultCoreSeveralDevicesNoThrow) {
|
||||
InferenceEngine::Core ie;
|
||||
|
||||
std::string clearDeviceName;
|
||||
auto pos = deviceNames.begin()->find('.');
|
||||
std::string cleartarget_device;
|
||||
auto pos = target_devices.begin()->find('.');
|
||||
if (pos != std::string::npos) {
|
||||
clearDeviceName = deviceNames.begin()->substr(0, pos);
|
||||
cleartarget_device = target_devices.begin()->substr(0, pos);
|
||||
}
|
||||
if (!supportsDeviceID(ie, clearDeviceName) || !supportsAvaliableDevices(ie, clearDeviceName)) {
|
||||
if (!supportsDeviceID(ie, cleartarget_device) || !supportsAvaliableDevices(ie, cleartarget_device)) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(clearDeviceName, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < deviceNames.size())
|
||||
std::vector<std::string> deviceIDs = ie.GetMetric(cleartarget_device, METRIC_KEY(AVAILABLE_DEVICES));
|
||||
if (deviceIDs.size() < target_devices.size())
|
||||
GTEST_SKIP();
|
||||
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, deviceNames[i]));
|
||||
for (size_t i = 0; i < target_devices.size(); ++i) {
|
||||
ASSERT_NO_THROW(ie.SetConfig({{ InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS, std::to_string(i + 2) }}, target_devices[i]));
|
||||
}
|
||||
std::string res;
|
||||
for (size_t i = 0; i < deviceNames.size(); ++i) {
|
||||
ASSERT_NO_THROW(res = ie.GetConfig(deviceNames[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>());
|
||||
for (size_t i = 0; i < target_devices.size(); ++i) {
|
||||
ASSERT_NO_THROW(res = ie.GetConfig(target_devices[i], InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS).as<std::string>());
|
||||
ASSERT_EQ(res, std::to_string(i + 2));
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include <common_test_utils/file_utils.hpp>
|
||||
#include <common_test_utils/test_assertions.hpp>
|
||||
#include <common_test_utils/test_constants.hpp>
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <thread>
|
||||
@ -23,6 +24,7 @@
|
||||
#include <chrono>
|
||||
#include <fstream>
|
||||
#include <functional_test_utils/skip_tests_config.hpp>
|
||||
#include "base/ov_behavior_test_utils.hpp"
|
||||
|
||||
using Device = std::string;
|
||||
using Config = std::map<std::string, std::string>;
|
||||
@ -49,7 +51,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void safePluginUnregister(InferenceEngine::Core & ie) {
|
||||
void safePluginUnregister(InferenceEngine::Core & ie, const std::string& deviceName) {
|
||||
try {
|
||||
ie.UnregisterPlugin(deviceName);
|
||||
} catch (const InferenceEngine::Exception & ex) {
|
||||
@ -69,7 +71,6 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
Device deviceName;
|
||||
Config config;
|
||||
};
|
||||
|
||||
@ -77,24 +78,27 @@ public:
|
||||
// Common threading plugin tests
|
||||
//
|
||||
|
||||
class CoreThreadingTests : public CoreThreadingTestsBase,
|
||||
public ::testing::TestWithParam<Params> {
|
||||
class CoreThreadingTests : public testing::WithParamInterface<Params>,
|
||||
public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public CoreThreadingTestsBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
std::tie(target_device, config) = GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(deviceName, config) = GetParam();
|
||||
}
|
||||
|
||||
static std::string getTestCaseName(testing::TestParamInfo<Params> obj) {
|
||||
std::string deviceName;
|
||||
Config config;
|
||||
std::tie(deviceName, config) = obj.param;
|
||||
std::replace(deviceName.begin(), deviceName.end(), ':', '.');
|
||||
char separator('_');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << deviceName << separator;
|
||||
result << "config=";
|
||||
for (auto& confItem : config) {
|
||||
result << confItem.first << ":" << confItem.second << separator;
|
||||
result << confItem.first << "=" << confItem.second << separator;
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
@ -104,9 +108,9 @@ public:
|
||||
TEST_P(CoreThreadingTests, smoke_GetVersions) {
|
||||
InferenceEngine::Core ie;
|
||||
runParallel([&] () {
|
||||
auto versions = ie.GetVersions(deviceName);
|
||||
auto versions = ie.GetVersions(target_device);
|
||||
ASSERT_LE(1u, versions.size());
|
||||
safePluginUnregister(ie);
|
||||
safePluginUnregister(ie, target_device);
|
||||
});
|
||||
}
|
||||
|
||||
@ -115,7 +119,7 @@ TEST_P(CoreThreadingTests, smoke_SetConfigPluginExists) {
|
||||
InferenceEngine::Core ie;
|
||||
|
||||
ie.SetConfig(config);
|
||||
auto versions = ie.GetVersions(deviceName);
|
||||
auto versions = ie.GetVersions(target_device);
|
||||
|
||||
runParallel([&] () {
|
||||
ie.SetConfig(config);
|
||||
@ -129,8 +133,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) {
|
||||
|
||||
ie.SetConfig(config);
|
||||
runParallel([&] () {
|
||||
ie.GetConfig(deviceName, configKey);
|
||||
safePluginUnregister(ie);
|
||||
ie.GetConfig(target_device, configKey);
|
||||
safePluginUnregister(ie, target_device);
|
||||
});
|
||||
}
|
||||
|
||||
@ -138,8 +142,8 @@ TEST_P(CoreThreadingTests, smoke_GetConfig) {
|
||||
TEST_P(CoreThreadingTests, smoke_GetMetric) {
|
||||
InferenceEngine::Core ie;
|
||||
runParallel([&] () {
|
||||
ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
|
||||
safePluginUnregister(ie);
|
||||
ie.GetMetric(target_device, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
|
||||
safePluginUnregister(ie, target_device);
|
||||
});
|
||||
}
|
||||
|
||||
@ -148,12 +152,12 @@ TEST_P(CoreThreadingTests, smoke_QueryNetwork) {
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::CNNNetwork network(ngraph::builder::subgraph::make2InputSubtract());
|
||||
|
||||
ie.SetConfig(config, deviceName);
|
||||
InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
InferenceEngine::QueryNetworkResult refResult = ie.QueryNetwork(network, target_device);
|
||||
|
||||
runParallel([&] () {
|
||||
const auto result = ie.QueryNetwork(network, deviceName);
|
||||
safePluginUnregister(ie);
|
||||
const auto result = ie.QueryNetwork(network, target_device);
|
||||
safePluginUnregister(ie, target_device);
|
||||
|
||||
// compare QueryNetworkResult with reference
|
||||
for (auto && r : refResult.supportedLayersMap) {
|
||||
@ -179,12 +183,13 @@ enum struct ModelClass : unsigned {
|
||||
|
||||
using CoreThreadingParams = std::tuple<Params, Threads, Iterations, ModelClass>;
|
||||
|
||||
class CoreThreadingTestsWithIterations : public ::testing::TestWithParam<CoreThreadingParams>,
|
||||
class CoreThreadingTestsWithIterations : public testing::WithParamInterface<CoreThreadingParams>,
|
||||
public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public CoreThreadingTestsBase {
|
||||
public:
|
||||
void SetUp() override {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(deviceName, config) = std::get<0>(GetParam());
|
||||
std::tie(target_device, config) = std::get<0>(GetParam());
|
||||
numThreads = std::get<1>(GetParam());
|
||||
numIterations = std::get<2>(GetParam());
|
||||
modelClass = std::get<3>(GetParam());
|
||||
@ -195,6 +200,7 @@ public:
|
||||
std::string deviceName;
|
||||
Config config;
|
||||
std::tie(deviceName, config) = std::get<0>(obj.param);
|
||||
std::replace(deviceName.begin(), deviceName.end(), ':', '.');
|
||||
numThreads = std::get<1>(obj.param);
|
||||
numIterations = std::get<2>(obj.param);
|
||||
char separator('_');
|
||||
@ -202,13 +208,15 @@ public:
|
||||
result << "targetDevice=" << deviceName << separator;
|
||||
result << "config=";
|
||||
for (auto& confItem : config) {
|
||||
result << confItem.first << ":" << confItem.second << separator;
|
||||
result << confItem.first << "=" << confItem.second << separator;
|
||||
}
|
||||
result << "numThreads=" << numThreads << separator;
|
||||
result << "numIter=" << numIterations;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
|
||||
protected:
|
||||
ModelClass modelClass;
|
||||
unsigned int numIterations;
|
||||
unsigned int numThreads;
|
||||
@ -236,10 +244,10 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork) {
|
||||
|
||||
SetupNetworks();
|
||||
|
||||
ie.SetConfig(config, deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName);
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], target_device);
|
||||
}, numIterations, numThreads);
|
||||
}
|
||||
|
||||
@ -250,7 +258,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore)
|
||||
|
||||
SetupNetworks();
|
||||
|
||||
ie.SetConfig(config, deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
@ -264,7 +272,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy_SingleIECore)
|
||||
}
|
||||
|
||||
auto getOutputBlob = [&](InferenceEngine::Core & core) {
|
||||
auto exec = core.LoadNetwork(network, deviceName);
|
||||
auto exec = core.LoadNetwork(network, target_device);
|
||||
auto req = exec.CreateInferRequest();
|
||||
req.SetInput(blobs);
|
||||
|
||||
@ -293,7 +301,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
|
||||
|
||||
SetupNetworks();
|
||||
|
||||
ie.SetConfig(config, deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
auto network = networks[value % networks.size()];
|
||||
@ -306,7 +314,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
|
||||
}
|
||||
|
||||
auto getOutputBlob = [&](InferenceEngine::Core & core) {
|
||||
auto exec = core.LoadNetwork(network, deviceName);
|
||||
auto exec = core.LoadNetwork(network, target_device);
|
||||
auto req = exec.CreateInferRequest();
|
||||
req.SetInput(blobs);
|
||||
|
||||
@ -325,7 +333,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetworkAccuracy) {
|
||||
// compare actual value using the second Core
|
||||
{
|
||||
InferenceEngine::Core ie2;
|
||||
ie2.SetConfig(config, deviceName);
|
||||
ie2.SetConfig(config, target_device);
|
||||
auto outputRef = getOutputBlob(ie2);
|
||||
|
||||
FuncTestUtils::compareBlobs(outputActual, outputRef);
|
||||
@ -342,8 +350,8 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_SingleIECore) {
|
||||
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
ie.SetConfig(config, deviceName);
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], target_device);
|
||||
}, numIterations, numThreads);
|
||||
}
|
||||
|
||||
@ -356,7 +364,7 @@ TEST_P(CoreThreadingTestsWithIterations, smoke_LoadNetwork_MultipleIECores) {
|
||||
runParallel([&] () {
|
||||
auto value = counter++;
|
||||
InferenceEngine::Core ie;
|
||||
ie.SetConfig(config, deviceName);
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], deviceName);
|
||||
ie.SetConfig(config, target_device);
|
||||
(void)ie.LoadNetwork(networks[value % networks.size()], target_device);
|
||||
}, numIterations, numThreads);
|
||||
}
|
||||
|
@ -15,10 +15,10 @@
|
||||
#include <common_test_utils/test_constants.hpp>
|
||||
#include <cpp/ie_cnn_network.h>
|
||||
#include "gtest/gtest.h"
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/crash_handler.hpp"
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
#include "functional_test_utils/precision_utils.hpp"
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
#include <ie_core.hpp>
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
@ -27,29 +27,27 @@ typedef std::tuple<
|
||||
std::vector<int>> // Order
|
||||
HoldersParams;
|
||||
|
||||
class HoldersTest : public CommonTestUtils::TestsCommon,
|
||||
class HoldersTest : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public ::testing::WithParamInterface<HoldersParams> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<HoldersParams> obj);
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
protected:
|
||||
std::vector<int> order;
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
};
|
||||
|
||||
using HoldersTestImportNetwork = HoldersTest;
|
||||
|
||||
class HoldersTestOnImportedNetwork : public CommonTestUtils::TestsCommon,
|
||||
class HoldersTestOnImportedNetwork : public BehaviorTestsUtils::IEPluginTestBase,
|
||||
public ::testing::WithParamInterface<std::string> {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
protected:
|
||||
std::shared_ptr<ngraph::Function> function;
|
||||
std::string targetDevice;
|
||||
};
|
||||
|
||||
} // namespace BehaviorTestsDefinitions
|
@ -25,7 +25,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInputInfo) {
|
||||
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
|
||||
preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
{
|
||||
@ -44,7 +44,7 @@ TEST_P(InferRequestPreprocessTest, SetPreProcessToInferRequest) {
|
||||
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
|
||||
preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo();
|
||||
@ -96,7 +96,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessGetBlob) {
|
||||
}
|
||||
preProcess.setVariant(InferenceEngine::MEAN_IMAGE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
auto inBlob = req.GetBlob("param");
|
||||
@ -163,7 +163,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessSetBlob) {
|
||||
}
|
||||
preProcess.setVariant(InferenceEngine::MEAN_IMAGE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
@ -225,7 +225,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessGetBlob) {
|
||||
preProcess[2]->stdScale = 1;
|
||||
preProcess.setVariant(InferenceEngine::MEAN_VALUE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
auto inBlob = req.GetBlob("param");
|
||||
@ -285,7 +285,7 @@ TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessSetBlob) {
|
||||
preProcess[2]->stdScale = 1;
|
||||
preProcess.setVariant(InferenceEngine::MEAN_VALUE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
@ -340,7 +340,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessGetBlob) {
|
||||
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
|
||||
preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
auto inBlob = req.GetBlob("param");
|
||||
@ -401,7 +401,7 @@ TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessSetBlob) {
|
||||
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
|
||||
preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
@ -472,7 +472,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessGetBlob) {
|
||||
preProcess[2]->meanValue = 0;
|
||||
preProcess.setVariant(InferenceEngine::MEAN_VALUE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
auto inBlob = req.GetBlob("param");
|
||||
@ -532,7 +532,7 @@ TEST_P(InferRequestPreprocessTest, SetScalePreProcessSetBlob) {
|
||||
preProcess[2]->meanValue = 0;
|
||||
preProcess.setVariant(InferenceEngine::MEAN_VALUE);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
@ -577,18 +577,19 @@ typedef std::tuple<
|
||||
> PreprocessConversionParams;
|
||||
|
||||
class InferRequestPreprocessConversionTest : public testing::WithParamInterface<PreprocessConversionParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEPluginTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<PreprocessConversionParams> obj) {
|
||||
InferenceEngine::Precision netPrecision, iPrecision, oPrecision;
|
||||
InferenceEngine::Layout netLayout, iLayout, oLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(netPrecision, iPrecision, oPrecision,
|
||||
netLayout, iLayout, oLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = obj.param;
|
||||
target_device, configuration) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '_');
|
||||
std::ostringstream result;
|
||||
result << "netPRC=" << netPrecision.name() << "_";
|
||||
result << "iPRC=" << iPrecision.name() << "_";
|
||||
@ -598,7 +599,7 @@ public:
|
||||
result << "oLT=" << oLayout << "_";
|
||||
result << "setIBlob=" << setInputBlob << "_";
|
||||
result << "setOBlob=" << setOutputBlob << "_";
|
||||
result << "targetDevice=" << targetDevice;
|
||||
result << "target_device=" << target_device;
|
||||
if (!configuration.empty()) {
|
||||
for (auto& configItem : configuration) {
|
||||
result << "configItem=" << configItem.first << "_" << configItem.second << "_";
|
||||
@ -626,25 +627,26 @@ public:
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(netPrecision, iPrecision, oPrecision,
|
||||
netLayout, iLayout, oLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = this->GetParam();
|
||||
target_device, configuration) = this->GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
InferenceEngine::Precision netPrecision, iPrecision, oPrecision;
|
||||
InferenceEngine::Layout netLayout, iLayout, oLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
@ -676,7 +678,7 @@ TEST_P(InferRequestPreprocessConversionTest, Infer) {
|
||||
cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout);
|
||||
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
@ -765,7 +767,7 @@ typedef std::tuple<
|
||||
> PreprocessSetBlobCheckParams;
|
||||
|
||||
class InferRequestPreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface<PreprocessSetBlobCheckParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEPluginTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<PreprocessSetBlobCheckParams> obj) {
|
||||
InferenceEngine::Precision netPrecision;
|
||||
@ -773,12 +775,13 @@ public:
|
||||
bool changeIPrecision, changeOPrecision;
|
||||
bool changeILayout, changeOLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(netPrecision, changeIPrecision, changeOPrecision,
|
||||
netLayout, changeILayout, changeOLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = obj.param;
|
||||
target_device, configuration) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '_');
|
||||
std::ostringstream result;
|
||||
result << "netPRC=" << netPrecision.name() << "_";
|
||||
result << "iPRC=" << changeIPrecision << "_";
|
||||
@ -788,7 +791,7 @@ public:
|
||||
result << "oLT=" << changeOLayout << "_";
|
||||
result << "setIBlob=" << setInputBlob << "_";
|
||||
result << "setOBlob=" << setOutputBlob << "_";
|
||||
result << "targetDevice=" << targetDevice;
|
||||
result << "target_device=" << target_device;
|
||||
if (!configuration.empty()) {
|
||||
for (auto& configItem : configuration) {
|
||||
result << "configItem=" << configItem.first << "_" << configItem.second << "_";
|
||||
@ -821,13 +824,15 @@ public:
|
||||
std::tie(netPrecision, changeIPrecision, changeOPrecision,
|
||||
netLayout, changeILayout, changeOLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = this->GetParam();
|
||||
target_device, configuration) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
@ -836,7 +841,6 @@ public:
|
||||
InferenceEngine::Layout netLayout;
|
||||
bool changeILayout, changeOLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
@ -863,7 +867,7 @@ TEST_P(InferRequestPreprocessDynamicallyInSetBlobTest, Infer) {
|
||||
InferenceEngine::CNNNetwork cnnNet(ngraph);
|
||||
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
auto req = execNet.CreateInferRequest();
|
||||
InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr;
|
||||
|
||||
@ -997,7 +1001,7 @@ TEST_P(InferRequestPreprocessTest, InferWithRGB2BGRConversion) {
|
||||
auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess();
|
||||
preProcess.setColorFormat(InferenceEngine::ColorFormat::BGR);
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration);
|
||||
// Create InferRequest
|
||||
auto req = execNet.CreateInferRequest();
|
||||
|
||||
|
@ -15,34 +15,35 @@
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
class VersionTest : public testing::WithParamInterface<std::string>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public BehaviorTestsUtils::IEPluginTestBase {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj) {
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> config;
|
||||
targetDevice = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
|
||||
std::ostringstream result;
|
||||
result << "targetDevice=" << targetDevice;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
target_device = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
targetDevice = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
std::string targetDevice;
|
||||
};
|
||||
|
||||
// Load unsupported network type to the Plugin
|
||||
TEST_P(VersionTest, pluginCurrentVersionIsCorrect) {
|
||||
if (targetDevice.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
std::map<std::string, InferenceEngine::Version> versions = ie->GetVersions(targetDevice);
|
||||
if (target_device.find(CommonTestUtils::DEVICE_AUTO) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
|
||||
target_device.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
|
||||
std::map<std::string, InferenceEngine::Version> versions = ie->GetVersions(target_device);
|
||||
ASSERT_EQ(versions.size(), 1);
|
||||
ASSERT_EQ(versions.begin()->first, targetDevice);
|
||||
ASSERT_EQ(versions.begin()->first, target_device);
|
||||
auto version = versions.begin()->second;
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
ASSERT_EQ(version.apiVersion.major, 2);
|
||||
|
@ -9,7 +9,7 @@
|
||||
#include "common_test_utils/test_common.hpp"
|
||||
#include "common_test_utils/common_utils.hpp"
|
||||
|
||||
#include "functional_test_utils/layer_test_utils/summary.hpp"
|
||||
#include "functional_test_utils/summary/op_summary.hpp"
|
||||
#include "functional_test_utils/ov_plugin_cache.hpp"
|
||||
|
||||
namespace ov {
|
||||
@ -24,7 +24,7 @@ using OpImplParams = std::tuple<
|
||||
class OpImplCheckTest : public testing::WithParamInterface<OpImplParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
protected:
|
||||
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();
|
||||
ov::test::utils::OpSummary& summary = ov::test::utils::OpSummary::getInstance();
|
||||
std::shared_ptr<ov::Core> core = ov::test::utils::PluginCache::get().core();
|
||||
std::shared_ptr<ov::Model> function;
|
||||
std::string targetDevice;
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <functional_test_utils/layer_test_utils/summary.hpp>
|
||||
#include <functional_test_utils/summary/op_summary.hpp>
|
||||
#include <ngraph_functions/subgraph_builders.hpp>
|
||||
|
||||
namespace ov {
|
||||
@ -16,7 +16,7 @@ OpGenerator getOpGeneratorMap();
|
||||
|
||||
static const std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> createFunctions() {
|
||||
std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> res;
|
||||
auto opsets = LayerTestsUtils::Summary::getInstance().getOpSets();
|
||||
auto opsets = ov::test::utils::OpSummary::getInstance().getOpSets();
|
||||
auto opGenerator = getOpGeneratorMap();
|
||||
std::set<ngraph::NodeTypeInfo> opsInfo;
|
||||
for (const auto& opset : opsets) {
|
||||
|
@ -244,12 +244,17 @@ const char expected_serialized_model[] = R"V0G0N(
|
||||
|
||||
std::string ExecGraphSerializationTest::getTestCaseName(testing::TestParamInfo<std::string> obj) {
|
||||
std::ostringstream result;
|
||||
std::string targetDevice = obj.param;
|
||||
result << "TargetDevice=" << targetDevice;
|
||||
std::string target_device = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
result << "TargetDevice=" << target_device;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void ExecGraphSerializationTest::SetUp() {
|
||||
target_device = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
|
||||
const std::string XML_EXT = ".xml";
|
||||
const std::string BIN_EXT = ".bin";
|
||||
|
||||
@ -257,11 +262,10 @@ void ExecGraphSerializationTest::SetUp() {
|
||||
|
||||
m_out_xml_path = model_name + XML_EXT;
|
||||
m_out_bin_path = model_name + BIN_EXT;
|
||||
|
||||
deviceName = this->GetParam();
|
||||
}
|
||||
|
||||
void ExecGraphSerializationTest::TearDown() {
|
||||
APIBaseTest::TearDown();
|
||||
CommonTestUtils::removeIRFiles(m_out_xml_path, m_out_bin_path);
|
||||
}
|
||||
|
||||
@ -340,10 +344,10 @@ std::pair<bool, std::string> ExecGraphSerializationTest::compare_docs(const pugi
|
||||
}
|
||||
|
||||
TEST_P(ExecGraphSerializationTest, ExecutionGraph) {
|
||||
auto ie = PluginCache::get().ie(deviceName);
|
||||
auto ie = PluginCache::get().ie(target_device);
|
||||
InferenceEngine::Blob::Ptr a;
|
||||
auto cnnNet = ie->ReadNetwork(serialize_test_model, a);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, deviceName);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
auto execGraph = execNet.GetExecGraphInfo();
|
||||
InferenceEngine::InferRequest req = execNet.CreateInferRequest();
|
||||
execGraph.serialize(m_out_xml_path, m_out_bin_path);
|
||||
@ -365,6 +369,7 @@ std::string ExecGraphUniqueNodeNames::getTestCaseName(testing::TestParamInfo<Lay
|
||||
InferenceEngine::SizeVector inputShapes, newInputShapes;
|
||||
std::string targetDevice;
|
||||
std::tie(netPrecision, inputShapes, targetDevice) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '_');
|
||||
|
||||
std::ostringstream result;
|
||||
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
|
||||
@ -375,11 +380,12 @@ std::string ExecGraphUniqueNodeNames::getTestCaseName(testing::TestParamInfo<Lay
|
||||
}
|
||||
|
||||
void ExecGraphUniqueNodeNames::SetUp() {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
|
||||
std::vector<size_t> inputShape;
|
||||
InferenceEngine::Precision netPrecision;
|
||||
std::tie(netPrecision, inputShape, targetDevice) = this->GetParam();
|
||||
std::tie(netPrecision, inputShape, target_device) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
|
||||
APIBaseTest::SetUp();
|
||||
|
||||
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
|
||||
auto params = ngraph::builder::makeParams(ngPrc, {inputShape});
|
||||
@ -390,15 +396,11 @@ void ExecGraphUniqueNodeNames::SetUp() {
|
||||
fnPtr = std::make_shared<ngraph::Function>(results, params, "SplitConvConcat");
|
||||
}
|
||||
|
||||
void ExecGraphUniqueNodeNames::TearDown() {
|
||||
fnPtr.reset();
|
||||
}
|
||||
|
||||
TEST_P(ExecGraphUniqueNodeNames, CheckUniqueNodeNames) {
|
||||
InferenceEngine::CNNNetwork cnnNet(fnPtr);
|
||||
|
||||
auto ie = PluginCache::get().ie(targetDevice);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice);
|
||||
auto ie = PluginCache::get().ie(target_device);
|
||||
auto execNet = ie->LoadNetwork(cnnNet, target_device);
|
||||
|
||||
InferenceEngine::CNNNetwork execGraphInfo = execNet.GetExecGraphInfo();
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
//
|
||||
|
||||
#include "behavior/executable_network/locale.hpp"
|
||||
#include "functional_test_utils/summary/api_summary.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
|
||||
@ -24,15 +25,19 @@ inline std::shared_ptr<ngraph::Function> makeTestModel(std::vector<size_t> input
|
||||
|
||||
std::string CustomLocaleTest::getTestCaseName(const testing::TestParamInfo<LocaleParams> &obj) {
|
||||
std::ostringstream results;
|
||||
std::string deviceName, localeName;
|
||||
std::tie(localeName, deviceName) = obj.param;
|
||||
std::string targetDevice, localeName;
|
||||
std::tie(localeName, targetDevice) = obj.param;
|
||||
std::replace(localeName.begin(), localeName.end(), '-', '.');
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
results << "locale=" << localeName << "_"
|
||||
<< "targetDevice=" << deviceName;
|
||||
<< "targetDevice=" << targetDevice;
|
||||
return results.str();
|
||||
}
|
||||
|
||||
void CustomLocaleTest::SetUp() {
|
||||
std::tie(localeName, deviceName) = GetParam();
|
||||
std::tie(localeName, target_device) = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
testName = ::testing::UnitTest::GetInstance()->current_test_info()->name();
|
||||
function = makeTestModel();
|
||||
}
|
||||
@ -45,9 +50,9 @@ TEST_P(CustomLocaleTest, CanLoadNetworkWithCustomLocale) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(deviceName);
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie(target_device);
|
||||
InferenceEngine::CNNNetwork cnnNet(function);
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, deviceName));
|
||||
ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device));
|
||||
|
||||
std::locale::global(prev);
|
||||
}
|
||||
|
@ -25,9 +25,10 @@ std::string InferRequestVariableStateTest::getTestCaseName(const testing::TestPa
|
||||
}
|
||||
|
||||
void InferRequestVariableStateTest::SetUp() {
|
||||
std::tie(net, statesToQuery, deviceName, configuration) = GetParam();
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(net, statesToQuery, deviceName, configuration) = GetParam();
|
||||
IEInferRequestTestBase::SetUp();
|
||||
}
|
||||
|
||||
InferenceEngine::ExecutableNetwork InferRequestVariableStateTest::PrepareNetwork() {
|
||||
|
@ -11,21 +11,23 @@ namespace test {
|
||||
namespace behavior {
|
||||
|
||||
std::string OVCompiledModelEmptyPropertiesTests::getTestCaseName(testing::TestParamInfo<std::string> obj) {
|
||||
return "device_name=" + obj.param;
|
||||
return "target_device=" + obj.param;
|
||||
}
|
||||
|
||||
void OVCompiledModelEmptyPropertiesTests::SetUp() {
|
||||
target_device = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
device_name = this->GetParam();
|
||||
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name);
|
||||
APIBaseTest::SetUp();
|
||||
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
}
|
||||
|
||||
std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamInfo<PropertiesParams> obj) {
|
||||
std::string device_name;
|
||||
std::string targetDevice;
|
||||
AnyMap properties;
|
||||
std::tie(device_name, properties) = obj.param;
|
||||
std::tie(targetDevice, properties) = obj.param;
|
||||
std::replace(targetDevice.begin(), targetDevice.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "device_name=" << device_name << "_";
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
if (!properties.empty()) {
|
||||
result << "properties=" << util::join(util::split(util::to_string(properties), ' '), "_");
|
||||
}
|
||||
@ -33,34 +35,36 @@ std::string OVCompiledModelPropertiesTests::getTestCaseName(testing::TestParamIn
|
||||
}
|
||||
|
||||
void OVCompiledModelPropertiesTests::SetUp() {
|
||||
std::tie(target_device, properties) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED();
|
||||
std::tie(device_name, properties) = this->GetParam();
|
||||
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(device_name);
|
||||
APIBaseTest::SetUp();
|
||||
model = ov::test::behavior::getDefaultNGraphFunctionForTheDevice(target_device);
|
||||
}
|
||||
|
||||
void OVCompiledModelPropertiesTests::TearDown() {
|
||||
if (!properties.empty()) {
|
||||
utils::PluginCache::get().reset();
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelEmptyPropertiesTests, CanCompileModelWithEmptyProperties) {
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, AnyMap{}));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, AnyMap{}));
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesTests, CanCompileModelWithCorrectProperties) {
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesTests, CanUseCache) {
|
||||
core->set_property(ov::cache_dir("./test_cache"));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, properties));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
|
||||
CommonTestUtils::removeDir("./test_cache");
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetProperty) {
|
||||
auto compiled_model = core->compile_model(model, device_name, properties);
|
||||
auto compiled_model = core->compile_model(model, target_device, properties);
|
||||
auto supported_properties = compiled_model.get_property(ov::supported_properties);
|
||||
for (const auto& property_item : properties) {
|
||||
if (util::contains(supported_properties, property_item.first)) {
|
||||
@ -73,26 +77,26 @@ TEST_P(OVCompiledModelPropertiesTests, canCompileModelWithPropertiesAndCheckGetP
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesIncorrectTests, CanNotCompileModelWithIncorrectProperties) {
|
||||
ASSERT_THROW(core->compile_model(model, device_name, properties), ov::Exception);
|
||||
ASSERT_THROW(core->compile_model(model, target_device, properties), ov::Exception);
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesDefaultTests, CanCompileWithDefaultValueFromPlugin) {
|
||||
std::vector<ov::PropertyName> supported_properties;
|
||||
OV_ASSERT_NO_THROW(supported_properties = core->get_property(device_name, ov::supported_properties));
|
||||
OV_ASSERT_NO_THROW(supported_properties = core->get_property(target_device, ov::supported_properties));
|
||||
AnyMap default_rw_properties;
|
||||
for (auto& supported_property : supported_properties) {
|
||||
if (supported_property.is_mutable()) {
|
||||
Any property;
|
||||
OV_ASSERT_NO_THROW(property = core->get_property(device_name, supported_property));
|
||||
OV_ASSERT_NO_THROW(property = core->get_property(target_device, supported_property));
|
||||
default_rw_properties.emplace(supported_property, property);
|
||||
std::cout << supported_property << ":" << property.as<std::string>() << std::endl;
|
||||
}
|
||||
}
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, device_name, default_rw_properties));
|
||||
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, default_rw_properties));
|
||||
}
|
||||
|
||||
TEST_P(OVCompiledModelPropertiesDefaultTests, CheckDefaultValues) {
|
||||
auto compiled_model = core->compile_model(model, device_name);
|
||||
auto compiled_model = core->compile_model(model, target_device);
|
||||
std::vector<ov::PropertyName> supported_properties;
|
||||
OV_ASSERT_NO_THROW(supported_properties = compiled_model.get_property(ov::supported_properties));
|
||||
std::cout << "SUPPORTED PROPERTIES: " << std::endl;
|
||||
|
@ -14,7 +14,7 @@ namespace test {
|
||||
namespace behavior {
|
||||
|
||||
std::string OVInferRequestBatchedTests::getTestCaseName(const testing::TestParamInfo<std::string>& obj) {
|
||||
return "targetDevice=" + obj.param;
|
||||
return "target_device=" + obj.param;
|
||||
}
|
||||
|
||||
std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string& test_name) {
|
||||
@ -29,8 +29,9 @@ std::string OVInferRequestBatchedTests::generateCacheDirName(const std::string&
|
||||
}
|
||||
|
||||
void OVInferRequestBatchedTests::SetUp() {
|
||||
target_device = GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
targetDevice = GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
m_cache_dir = generateCacheDirName(GetTestName());
|
||||
}
|
||||
|
||||
@ -42,6 +43,7 @@ void OVInferRequestBatchedTests::TearDown() {
|
||||
CommonTestUtils::removeFilesWithExt(m_cache_dir, "blob");
|
||||
CommonTestUtils::removeDir(m_cache_dir);
|
||||
}
|
||||
APIBaseTest::TearDown();
|
||||
}
|
||||
|
||||
std::shared_ptr<Model> OVInferRequestBatchedTests::create_n_inputs(size_t n, element::Type type,
|
||||
@ -74,7 +76,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
|
||||
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
|
||||
std::vector<float> buffer(one_shape_size * batch * 2, 0);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -108,7 +110,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsAsync) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
|
||||
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
|
||||
std::vector<float> buffer(one_shape_size * batch * 2, 0);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -143,7 +145,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_override_with_set) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
|
||||
std::vector<float> buffer(one_shape_size * batch, 4);
|
||||
std::vector<float> buffer2(one_shape_size * batch, 5);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -179,8 +181,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensorsBase_Caching) {
|
||||
auto one_shape_size = ov::shape_size(one_shape);
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "N...");
|
||||
ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}});
|
||||
auto execNet_no_cache = ie->compile_model(model, targetDevice);
|
||||
auto execNet_cache = ie->compile_model(model, targetDevice);
|
||||
auto execNet_no_cache = ie->compile_model(model, target_device);
|
||||
auto execNet_cache = ie->compile_model(model, target_device);
|
||||
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
|
||||
std::vector<float> buffer(one_shape_size * batch * 2, 0);
|
||||
|
||||
@ -219,7 +221,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Multiple_Infer) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "N...");
|
||||
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
|
||||
std::vector<float> buffer(one_shape_size * batch * 2, 0);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -256,7 +258,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Can_Infer_Dynamic) {
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, PartialShape({-1, 2, 2, 2}), "N...");
|
||||
// Allocate 8 chunks, set 'user tensors' to 0, 2, 4, 6 chunks
|
||||
std::vector<float> buffer(one_shape_size * batch * 2, 0);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -292,7 +294,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Batch1) {
|
||||
auto one_shape = Shape{1, 3, 10, 10};
|
||||
auto one_shape_size = ov::shape_size(one_shape);
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, one_shape, "N...");
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
@ -323,7 +325,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Get_Tensor_Not_Allowed) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -337,7 +339,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Batch_No_Batch) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "DCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -350,7 +352,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_Name) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "undefined";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -363,7 +365,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_No_Name) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "undefined";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -376,7 +378,7 @@ TEST_P(OVInferRequestBatchedTests, SetTensors_Friendly_Name) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -388,7 +390,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_No_index) {
|
||||
auto one_shape = Shape{1, 3, 3, 3};
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -400,7 +402,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_no_name_multiple_inputs) {
|
||||
auto one_shape = Shape{1, 3, 3, 3};
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch, ov::Tensor(element::f32, one_shape));
|
||||
@ -413,7 +415,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_count) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch + 1, ov::Tensor(element::f32, one_shape));
|
||||
@ -425,7 +427,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Empty_Array) {
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors;
|
||||
@ -436,7 +438,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_diff_batches) {
|
||||
auto batch_shape = Shape{3, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors;
|
||||
@ -451,7 +453,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Correct_all) {
|
||||
auto batch_shape = Shape{2, 3, 3, 3};
|
||||
std::vector<float> buffer(ov::shape_size(batch_shape), 1);
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors;
|
||||
@ -468,8 +470,8 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Cache_CheckDeepCopy) {
|
||||
std::vector<float> buffer_out(ov::shape_size(batch_shape), 1);
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(2, element::f32, batch_shape, "NCHW");
|
||||
ie->set_property({{CONFIG_KEY(CACHE_DIR), m_cache_dir}});
|
||||
auto execNet_no_cache = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet_no_cache = ie->compile_model(model, target_device);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
model->input(0).set_names({"updated_input0"}); // Change param name of original model
|
||||
@ -490,7 +492,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_element_type
|
||||
auto batch_shape = Shape{batch, 3, 3, 3};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));
|
||||
@ -504,7 +506,7 @@ TEST_P(OVInferRequestBatchedTests, SetInputTensors_Incorrect_tensor_shape) {
|
||||
auto batch_shape = Shape{batch, 4, 4, 4};
|
||||
auto model = OVInferRequestBatchedTests::create_n_inputs(1, element::f32, batch_shape, "NCHW");
|
||||
const std::string tensor_name = "tensor_input0";
|
||||
auto execNet = ie->compile_model(model, targetDevice);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
std::vector<ov::Tensor> tensors(batch - 1, ov::Tensor(element::f32, one_shape));
|
||||
|
@ -1,132 +0,0 @@
|
||||
// Copyright (C) 2018-2022 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <future>
|
||||
|
||||
#include "shared_test_classes/subgraph/basic_lstm.hpp"
|
||||
#include "behavior/ov_infer_request/callback.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
|
||||
std::string OVInferRequestCallbackTests::getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj) {
|
||||
return OVInferRequestTests::getTestCaseName(obj);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, canCallAsyncWithCompletionCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
bool is_called = false;
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
|
||||
ASSERT_EQ(exception_ptr, nullptr);
|
||||
is_called = true;
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
ASSERT_TRUE(is_called);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, syncInferDoesNotCallCompletionCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
bool is_called = false;
|
||||
req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
ASSERT_EQ(nullptr, exception_ptr);
|
||||
is_called = true;
|
||||
});
|
||||
req.infer();
|
||||
ASSERT_FALSE(is_called);
|
||||
}
|
||||
|
||||
// test that can wait all callbacks on dtor
|
||||
TEST_P(OVInferRequestCallbackTests, canStartSeveralAsyncInsideCompletionCallbackWithSafeDtor) {
|
||||
const int NUM_ITER = 10;
|
||||
struct TestUserData {
|
||||
std::atomic<int> numIter = {0};
|
||||
std::promise<bool> promise;
|
||||
};
|
||||
TestUserData data;
|
||||
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&] (std::exception_ptr exception_ptr) {
|
||||
if (exception_ptr) {
|
||||
data.promise.set_exception(exception_ptr);
|
||||
} else {
|
||||
if (data.numIter.fetch_add(1) != NUM_ITER) {
|
||||
req.start_async();
|
||||
} else {
|
||||
data.promise.set_value(true);
|
||||
}
|
||||
}
|
||||
}));
|
||||
auto future = data.promise.get_future();
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
future.wait();
|
||||
auto callbackStatus = future.get();
|
||||
ASSERT_TRUE(callbackStatus);
|
||||
auto dataNumIter = data.numIter - 1;
|
||||
ASSERT_EQ(NUM_ITER, dataNumIter);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, returnGeneralErrorIfCallbackThrowException) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.set_callback([] (std::exception_ptr) {
|
||||
OPENVINO_UNREACHABLE("Throw");
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
ASSERT_THROW(req.wait(), ov::Exception);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) {
|
||||
// GetNetwork(3000, 380) make inference around 20ms on GNA SW
|
||||
// so increases chances for getting RESULT_NOT_READY
|
||||
OV_ASSERT_NO_THROW(execNet = core->compile_model(
|
||||
SubgraphTestsDefinitions::Basic_LSTM_S::GetNetwork(300, 38), targetDevice, configuration));
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
std::promise<std::chrono::system_clock::time_point> callbackTimeStamp;
|
||||
auto callbackTimeStampFuture = callbackTimeStamp.get_future();
|
||||
// add a callback to the request and capture the timestamp
|
||||
OV_ASSERT_NO_THROW(req.set_callback([&](std::exception_ptr exception_ptr) {
|
||||
if (exception_ptr) {
|
||||
callbackTimeStamp.set_exception(exception_ptr);
|
||||
} else {
|
||||
callbackTimeStamp.set_value(std::chrono::system_clock::now());
|
||||
}
|
||||
}));
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
bool ready = false;
|
||||
OV_ASSERT_NO_THROW(ready = req.wait_for({}));
|
||||
// get timestamp taken AFTER return from the wait(STATUS_ONLY)
|
||||
const auto afterWaitTimeStamp = std::chrono::system_clock::now();
|
||||
// IF the callback timestamp is larger than the afterWaitTimeStamp
|
||||
// then we should observe false ready result
|
||||
if (afterWaitTimeStamp < callbackTimeStampFuture.get()) {
|
||||
ASSERT_FALSE(ready);
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCallbackTests, ImplDoesNotCopyCallback) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
{
|
||||
auto somePtr = std::make_shared<int>(42);
|
||||
OV_ASSERT_NO_THROW(req.set_callback([somePtr] (std::exception_ptr exception_ptr) {
|
||||
ASSERT_EQ(nullptr, exception_ptr);
|
||||
ASSERT_EQ(1, somePtr.use_count());
|
||||
}));
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
@ -1,66 +0,0 @@
|
||||
// Copyright (C) 2018-2022 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <future>
|
||||
|
||||
#include "behavior/ov_infer_request/cancellation.hpp"
|
||||
#include "openvino/runtime/exception.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
namespace behavior {
|
||||
|
||||
std::string OVInferRequestCancellationTests::getTestCaseName(const testing::TestParamInfo<InferRequestParams>& obj) {
|
||||
return OVInferRequestTests::getTestCaseName(obj);
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
req.wait();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, CanResetAfterCancelAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
req.wait();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.start_async());
|
||||
OV_ASSERT_NO_THROW(req.wait());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelBeforeAsyncRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
}
|
||||
|
||||
TEST_P(OVInferRequestCancellationTests, canCancelInferRequest) {
|
||||
ov::InferRequest req;
|
||||
OV_ASSERT_NO_THROW(req = execNet.create_infer_request());
|
||||
auto infer = std::async(std::launch::async, [&req]{req.infer();});
|
||||
while (!req.wait_for({})) {
|
||||
}
|
||||
OV_ASSERT_NO_THROW(req.cancel());
|
||||
try {
|
||||
infer.get();
|
||||
} catch (const ov::Cancelled&) {
|
||||
SUCCEED();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
} // namespace ov
|
@ -37,9 +37,10 @@ namespace behavior {
|
||||
std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<OVInferRequestDynamicParams> obj) {
|
||||
std::shared_ptr<Model> func;
|
||||
std::vector<std::pair<std::vector<size_t>, std::vector<size_t>>> inOutShapes;
|
||||
std::string targetDevice;
|
||||
std::string target_device;
|
||||
ov::AnyMap configuration;
|
||||
std::tie(func, inOutShapes, targetDevice, configuration) = obj.param;
|
||||
std::tie(func, inOutShapes, target_device, configuration) = obj.param;
|
||||
std::replace(target_device.begin(), target_device.end(), ':', '.');
|
||||
std::ostringstream result;
|
||||
result << "function=" << func->get_friendly_name() << "_";
|
||||
result << "inOutShape=(";
|
||||
@ -47,7 +48,7 @@ std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<O
|
||||
result << "(" << CommonTestUtils::vec2str(inOutShape.first) << "_" << CommonTestUtils::vec2str(inOutShape.second) << ")";
|
||||
}
|
||||
result << ")_";
|
||||
result << "targetDevice=" << targetDevice << "_";
|
||||
result << "targetDevice=" << target_device << "_";
|
||||
if (!configuration.empty()) {
|
||||
for (auto& configItem : configuration) {
|
||||
result << "configItem=" << configItem.first << "_";
|
||||
@ -59,8 +60,9 @@ std::string OVInferRequestDynamicTests::getTestCaseName(testing::TestParamInfo<O
|
||||
}
|
||||
|
||||
void OVInferRequestDynamicTests::SetUp() {
|
||||
std::tie(function, inOutShapes, target_device, configuration) = this->GetParam();
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::tie(function, inOutShapes, targetDevice, configuration) = this->GetParam();
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, const ov::runtime::Tensor& actual) {
|
||||
@ -81,13 +83,6 @@ bool OVInferRequestDynamicTests::checkOutput(const ov::runtime::Tensor& in, cons
|
||||
return result;
|
||||
}
|
||||
|
||||
void OVInferRequestDynamicTests::TearDown() {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
function.reset();
|
||||
}
|
||||
|
||||
/*
|
||||
We have to check that we don't get a segmentation fault during
|
||||
inference if we set the first two times to the same shape and
|
||||
@ -106,7 +101,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetwork) {
|
||||
};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
const std::string outputname = function->outputs().back().get_any_name();
|
||||
@ -127,7 +122,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetUnexpectedOutputTensorB
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::runtime::Tensor tensor, otensor;
|
||||
@ -152,7 +147,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputTensorPreAllocate
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::runtime::Tensor tensor;
|
||||
@ -177,7 +172,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkSetOutputShapeBeforeInfer)
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::runtime::Tensor tensor, otensor;
|
||||
@ -199,7 +194,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithoutSetShape) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -213,7 +208,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkBoundWithoutSetShape) {
|
||||
shapes[tensor_name] = {ov::Dimension(0, 5), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -230,7 +225,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor, otensor;
|
||||
@ -260,7 +255,7 @@ TEST_P(OVInferRequestDynamicTests, InferUpperBoundNetworkWithGetTensor) {
|
||||
shapes[tensor_name] = {ov::Dimension(0, 19), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor, otensor;
|
||||
@ -288,7 +283,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithGetTensor) {
|
||||
shapes[tensor_name] = ov::PartialShape::dynamic();
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor, otensor;
|
||||
@ -317,7 +312,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorLower
|
||||
shapes[tensor_name] = {ov::Dimension(2, 3), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -336,7 +331,7 @@ TEST_P(OVInferRequestDynamicTests, InferOutOfRangeShapeNetworkWithGetTensorUpper
|
||||
shapes[tensor_name] = {ov::Dimension(1, 2), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -357,7 +352,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithGetTensor2times) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -392,7 +387,7 @@ TEST_P(OVInferRequestDynamicTests, GetSameTensor2times) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor;
|
||||
@ -412,7 +407,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor(ov::element::f32, refShape);
|
||||
@ -436,7 +431,7 @@ TEST_P(OVInferRequestDynamicTests, InferFullyDynamicNetworkWithSetTensor) {
|
||||
shapes[tensor_name] = ov::PartialShape::dynamic();
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor(ov::element::f32, refShape), otensor;
|
||||
@ -469,7 +464,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithSetTensor2times) {
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
const std::string outputName = function->outputs().back().get_any_name();
|
||||
// Load ov::Model to target plugins
|
||||
auto execNet = ie->compile_model(function, targetDevice, configuration);
|
||||
auto execNet = ie->compile_model(function, target_device, configuration);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
ov::Tensor tensor(ov::element::f32, refShape);
|
||||
@ -504,7 +499,7 @@ TEST_P(OVInferRequestDynamicTests, InferDynamicNetworkWithLocalCore) {
|
||||
shapes[tensor_name] = {ov::Dimension::dynamic(), 4, 20, 20};
|
||||
OV_ASSERT_NO_THROW(function->reshape(shapes));
|
||||
// Load ov::Model to target plugins
|
||||
compiled_model = local_core.compile_model(function, targetDevice, configuration);
|
||||
compiled_model = local_core.compile_model(function, target_device, configuration);
|
||||
}
|
||||
// Create InferRequest
|
||||
OV_ASSERT_NO_THROW(compiled_model.create_infer_request());
|
||||
@ -522,7 +517,7 @@ TEST_P(OVNotSupportRequestDynamicTests, InferDynamicNotSupported) {
|
||||
const std::string outputName = function->outputs().back().get_any_name();
|
||||
// Load ov::Function to target plugins
|
||||
ov::CompiledModel execNet;
|
||||
ASSERT_THROW((execNet = ie->compile_model(function, targetDevice, configuration)), ov::Exception);
|
||||
ASSERT_THROW((execNet = ie->compile_model(function, target_device, configuration)), ov::Exception);
|
||||
}
|
||||
} // namespace behavior
|
||||
} // namespace test
|
||||
|
@ -17,12 +17,10 @@ std::string OVInferRequestInferenceTests::getTestCaseName(
|
||||
}
|
||||
|
||||
void OVInferRequestInferenceTests::SetUp() {
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
m_param = std::get<0>(GetParam());
|
||||
m_device_name = std::get<1>(GetParam());
|
||||
}
|
||||
|
||||
void OVInferRequestInferenceTests::TearDown() {
|
||||
target_device = std::get<1>(GetParam());
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
APIBaseTest::SetUp();
|
||||
}
|
||||
|
||||
std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n,
|
||||
@ -50,7 +48,7 @@ std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n,
|
||||
TEST_P(OVInferRequestInferenceTests, Inference_ROI_Tensor) {
|
||||
auto shape_size = ov::shape_size(m_param.m_shape);
|
||||
auto model = OVInferRequestInferenceTests::create_n_inputs(1, element::f32, m_param.m_shape);
|
||||
auto execNet = ie->compile_model(model, m_device_name);
|
||||
auto execNet = ie->compile_model(model, target_device);
|
||||
// Create InferRequest
|
||||
ov::InferRequest req;
|
||||
req = execNet.create_infer_request();
|
||||
|
@ -83,9 +83,9 @@ std::shared_ptr<ov::Model> OVInferenceChaining::getThirdStaticFunction(const ov:
|
||||
|
||||
void OVInferenceChaining::Run() {
|
||||
ov::CompiledModel execNet0, execNet1, execNet2;
|
||||
OV_ASSERT_NO_THROW(execNet0 = core->compile_model(function0, targetDevice, configuration));
|
||||
OV_ASSERT_NO_THROW(execNet1 = core->compile_model(function1, targetDevice, configuration));
|
||||
OV_ASSERT_NO_THROW(execNet2 = core->compile_model(function2, targetDevice, configuration));
|
||||
OV_ASSERT_NO_THROW(execNet0 = core->compile_model(function0, target_device, configuration));
|
||||
OV_ASSERT_NO_THROW(execNet1 = core->compile_model(function1, target_device, configuration));
|
||||
OV_ASSERT_NO_THROW(execNet2 = core->compile_model(function2, target_device, configuration));
|
||||
|
||||
ov::InferRequest r0, r1, r2;
|
||||
OV_ASSERT_NO_THROW(r0 = execNet0.create_infer_request());
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user