Tests for dynamic preprocessing in SetBlob (#5798)
* Corrected tests to match CVS-53713 * Fixed tests configs * Skip tests on GPU * Commented condition inside main SetBlob because of MYRIAD * Adopted tests
This commit is contained in:
parent
91fd2574df
commit
26bfa6f0ac
@ -14,7 +14,7 @@ addIeTargetTest(
|
||||
IE::funcSharedTests
|
||||
INCLUDES
|
||||
"${IE_MAIN_TEMPLATE_PLUGIN_SOURCE_DIR}/include"
|
||||
ADD_CLANG_FORMAT
|
||||
ADD_CPPLINT
|
||||
LABELS
|
||||
TEMPLATE
|
||||
)
|
||||
|
@ -4,4 +4,5 @@
|
||||
|
||||
#include "functional_test_utils/core_config.hpp"
|
||||
|
||||
void CoreConfiguration(LayerTestsUtils::LayerTestsCommon* test) {}
|
||||
void CoreConfiguration(LayerTestsUtils::LayerTestsCommon* test) {
|
||||
}
|
||||
|
@ -7,14 +7,19 @@
|
||||
using namespace LayerTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
static const std::vector<ngraph::element::Type> precisionsTemplate = {
|
||||
ngraph::element::f32,
|
||||
};
|
||||
static const std::vector<ngraph::element::Type> precisionsTemplate = {
|
||||
ngraph::element::f32,
|
||||
};
|
||||
|
||||
static const std::vector<std::size_t> batchSizesTemplate = {1, 2};
|
||||
static const std::vector<std::size_t> batchSizesTemplate = {
|
||||
1, 2
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_CachingSupportCase_Template, LoadNetworkCacheTestBase,
|
||||
::testing::Combine(::testing::ValuesIn(LoadNetworkCacheTestBase::getStandardFunctions()), ::testing::ValuesIn(precisionsTemplate),
|
||||
::testing::ValuesIn(batchSizesTemplate), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
LoadNetworkCacheTestBase::getTestCaseName);
|
||||
} // namespace
|
||||
INSTANTIATE_TEST_CASE_P(smoke_CachingSupportCase_Template, LoadNetworkCacheTestBase,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(LoadNetworkCacheTestBase::getStandardFunctions()),
|
||||
::testing::ValuesIn(precisionsTemplate),
|
||||
::testing::ValuesIn(batchSizesTemplate),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
LoadNetworkCacheTestBase::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -2,17 +2,19 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/config.hpp"
|
||||
|
||||
#include <template/template_config.hpp>
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/config.hpp"
|
||||
#include <template/template_config.hpp>
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{{TEMPLATE_CONFIG_KEY(THROUGHPUT_STREAMS), InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}},
|
||||
@ -25,23 +27,32 @@ const std::vector<std::map<std::string, std::string>> inconfigs = {
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(inconfigs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(inconfigs)),
|
||||
IncorrectConfigTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(inconfigs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(inconfigs)),
|
||||
IncorrectConfigAPITests::getTestCaseName);
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CorrectConfigAPITests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
CorrectConfigAPITests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Multi_BehaviorTests, CorrectConfigTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
CorrectConfigAPITests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
@ -2,12 +2,12 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/core_integration.hpp"
|
||||
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/core_integration.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
@ -16,31 +16,54 @@ namespace {
|
||||
// IE Class Common tests with <pluginName, deviceName params>
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassBasicTestP, IEClassBasicTestP, ::testing::Values(std::make_pair("templatePlugin", CommonTestUtils::DEVICE_TEMPLATE)));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassBasicTestP, IEClassBasicTestP,
|
||||
::testing::Values(std::make_pair("templatePlugin", CommonTestUtils::DEVICE_TEMPLATE)));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassNetworkTestP, IEClassNetworkTestP, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassNetworkTestP, IEClassNetworkTestP,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
//
|
||||
// IE Class GetMetric
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_METRICS, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_AVAILABLE_DEVICES, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_AVAILABLE_DEVICES,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_FULL_DEVICE_NAME, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_FULL_DEVICE_NAME,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_OPTIMIZATION_CAPABILITIES,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_RANGE_FOR_ASYNC_INFER_REQUESTS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetMetricTest, IEClassGetMetricTest_ThrowUnsupported, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetMetricTest, IEClassGetMetricTest_ThrowUnsupported,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetConfigTest, IEClassGetConfigTest_ThrowUnsupported, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetConfigTest, IEClassGetConfigTest_ThrowUnsupported,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetAvailableDevices, IEClassGetAvailableDevices,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetAvailableDevices, IEClassGetAvailableDevices, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
//
|
||||
// IE Class SetConfig
|
||||
@ -88,7 +111,9 @@ TEST_F(IEClassSetConfigTestHETERO, smoke_SetConfigNoThrow) {
|
||||
// IE Class GetConfig
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassGetConfigTest, IEClassGetConfigTest, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassGetConfigTest, IEClassGetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
using IEClassGetConfigTestTEMPLATE = IEClassNetworkTest;
|
||||
|
||||
@ -100,7 +125,7 @@ TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) {
|
||||
ASSERT_NO_THROW(p = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS)));
|
||||
std::vector<std::string> configValues = p;
|
||||
|
||||
for (auto&& confKey : configValues) {
|
||||
for (auto &&confKey : configValues) {
|
||||
if (CONFIG_KEY(DEVICE_ID) == confKey) {
|
||||
std::string defaultDeviceID = ie.GetConfig(deviceName, CONFIG_KEY(DEVICE_ID));
|
||||
std::cout << CONFIG_KEY(DEVICE_ID) << " : " << defaultDeviceID << std::endl;
|
||||
@ -118,37 +143,48 @@ TEST_F(IEClassGetConfigTestTEMPLATE, smoke_GetConfigNoThrow) {
|
||||
// Executable Network GetMetric
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetMetricTest, IEClassExecutableNetworkGetMetricTest_OPTIMAL_NUMBER_OF_INFER_REQUESTS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, IEClassExecutableNetworkGetMetricTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, IEClassExecutableNetworkGetMetricTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "MULTI:TEMPLATE", "HETERO:TEMPLATE"));
|
||||
//
|
||||
// Executable Network GetConfig / SetConfig
|
||||
//
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkGetConfigTest, IEClassExecutableNetworkGetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkGetConfigTest, IEClassExecutableNetworkGetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassExecutableNetworkSetConfigTest, IEClassExecutableNetworkSetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassExecutableNetworkSetConfigTest, IEClassExecutableNetworkSetConfigTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
// IE Class Query network
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassQueryNetworkTest, IEClassQueryNetworkTest, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassQueryNetworkTest, IEClassQueryNetworkTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
// IE Class Load network
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassLoadNetworkTest, IEClassLoadNetworkTest, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassLoadNetworkTest, IEClassLoadNetworkTest,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
//
|
||||
// Hetero Executable Network GetMetric
|
||||
@ -156,17 +192,21 @@ INSTANTIATE_TEST_CASE_P(smoke_IEClassLoadNetworkTest, IEClassLoadNetworkTest, ::
|
||||
|
||||
#ifdef ENABLE_MKL_DNN
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_CONFIG_KEYS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_SUPPORTED_METRICS,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_NETWORK_NAME,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_IEClassHeteroExecutableNetworlGetMetricTest, IEClassHeteroExecutableNetworkGetMetricTest_TARGET_FALLBACK,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE));
|
||||
|
||||
#endif // ENABLE_MKL_DNN
|
||||
} // namespace
|
||||
} // namespace
|
@ -8,20 +8,32 @@ using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<std::vector<int>> orders = {
|
||||
const std::vector<std::vector<int >> orders = {
|
||||
// 0 - plugin
|
||||
// 1 - executable_network
|
||||
// 2 - infer_request
|
||||
{0, 1, 2}, {0, 2, 1}, {1, 0, 2}, {1, 2, 0}, {2, 0, 1}, {2, 1, 0}};
|
||||
{0, 1, 2},
|
||||
{0, 2, 1},
|
||||
{1, 0, 2},
|
||||
{1, 2, 0},
|
||||
{2, 0, 1},
|
||||
{2, 1, 0}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, HoldersTest, ::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), ::testing::ValuesIn(orders)),
|
||||
HoldersTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, HoldersTest,
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(orders)),
|
||||
HoldersTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, HoldersTestImportNetwork,
|
||||
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE"), ::testing::ValuesIn(orders)),
|
||||
HoldersTest::getTestCaseName);
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE"),
|
||||
::testing::ValuesIn(orders)),
|
||||
HoldersTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, HoldersTestOnImportedNetwork, ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE"),
|
||||
HoldersTestOnImportedNetwork::getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, HoldersTestOnImportedNetwork,
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE"),
|
||||
HoldersTestOnImportedNetwork::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,21 +2,28 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/exec_graph_info.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/exec_graph_info.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, ExecGraphTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
ExecGraphTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
@ -2,21 +2,28 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/infer_request.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,20 +2,27 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/infer_request_callback.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request_callback.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, CallbackTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
CallbackTests::getTestCaseName);
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
CallbackTests::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -2,21 +2,28 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/infer_request_config.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "behavior/infer_request_config.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferConfigTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferConfigTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,21 +2,28 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/infer_request_input.hpp"
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/infer_request_input.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestInputTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestInputTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,21 +2,28 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/infer_request_output.hpp"
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/infer_request_output.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestOutputTests,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
InferRequestOutputTests::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -8,16 +8,31 @@ using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
const std::vector<InferenceEngine::Layout> Layout = {InferenceEngine::Layout::NCHW, InferenceEngine::Layout::CHW, InferenceEngine::Layout::NC,
|
||||
InferenceEngine::Layout::C};
|
||||
const std::vector<InferenceEngine::Layout> Layout = {
|
||||
InferenceEngine::Layout::NCHW,
|
||||
InferenceEngine::Layout::CHW,
|
||||
InferenceEngine::Layout::NC,
|
||||
InferenceEngine::Layout::C
|
||||
};
|
||||
|
||||
const std::vector<std::vector<size_t>> inputShapes = {{1, 3, 16, 16}, {3, 32, 16}, {1, 3}, {3}};
|
||||
const std::vector<std::vector<size_t>> inputShapes = {
|
||||
{ 1, 3, 16, 16 },
|
||||
{ 3, 32, 16 },
|
||||
{ 1, 3 },
|
||||
{ 3 }
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, LayoutTest,
|
||||
::testing::Combine(::testing::Values(InferenceEngine::Precision::FP32), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs), ::testing::ValuesIn(Layout), ::testing::ValuesIn(inputShapes)),
|
||||
::testing::Combine(
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs),
|
||||
::testing::ValuesIn(Layout),
|
||||
::testing::ValuesIn(inputShapes)),
|
||||
LayoutTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
@ -2,30 +2,39 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/preprocessing.hpp"
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/preprocessing.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> inputPrecisions = {InferenceEngine::Precision::U8, InferenceEngine::Precision::FP32};
|
||||
const std::vector<InferenceEngine::Precision> inputPrecisions = {
|
||||
InferenceEngine::Precision::U8,
|
||||
InferenceEngine::Precision::FP32
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_PreprocessingPrecisionConvertTestsViaSetInput, PreprocessingPrecisionConvertTest,
|
||||
::testing::Combine(::testing::ValuesIn(inputPrecisions),
|
||||
::testing::Values(4), // Number of input tensor channels
|
||||
::testing::Values(true), // Use SetInput
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), ::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(inputPrecisions),
|
||||
::testing::Values(4), // Number of input tensor channels
|
||||
::testing::Values(true), // Use SetInput
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessingPrecisionConvertTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_PreprocessingPrecisionConvertTestsViaGetBlob, PreprocessingPrecisionConvertTest,
|
||||
::testing::Combine(::testing::ValuesIn(inputPrecisions),
|
||||
::testing::Values(4), // Number of input tensor channels (blob_copy only supports 4d and 5d tensors)
|
||||
::testing::Values(false), // use GetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), ::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(inputPrecisions),
|
||||
::testing::Values(4), // Number of input tensor channels (blob_copy only supports 4d and 5d tensors)
|
||||
::testing::Values(false), // use GetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessingPrecisionConvertTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,50 +2,93 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "behavior/set_preprocess.hpp"
|
||||
|
||||
#include "multi-device/multi_device_config.hpp"
|
||||
|
||||
#include "behavior/set_preprocess.hpp"
|
||||
|
||||
using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> multiConfigs = {
|
||||
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES, CommonTestUtils::DEVICE_TEMPLATE}}};
|
||||
{{ InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES,
|
||||
CommonTestUtils::DEVICE_TEMPLATE }}
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> heteroConfigs = {{{"TARGET_FALLBACK", CommonTestUtils::DEVICE_TEMPLATE}}};
|
||||
const std::vector<std::map<std::string, std::string>> heteroConfigs = {
|
||||
{{ "TARGET_FALLBACK", CommonTestUtils::DEVICE_TEMPLATE }}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessTest,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Multi_BehaviorTests, PreprocessTest,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(multiConfigs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(multiConfigs)),
|
||||
PreprocessTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Hetero_BehaviorTests, PreprocessTest,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(heteroConfigs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(heteroConfigs)),
|
||||
PreprocessTest::getTestCaseName);
|
||||
|
||||
const std::vector<InferenceEngine::Precision> ioPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::U8};
|
||||
const std::vector<InferenceEngine::Precision> ioPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::U8
|
||||
};
|
||||
const std::vector<InferenceEngine::Layout> netLayouts = {
|
||||
InferenceEngine::Layout::NCHW,
|
||||
// InferenceEngine::Layout::NHWC
|
||||
};
|
||||
|
||||
const std::vector<InferenceEngine::Layout> ioLayouts = {InferenceEngine::Layout::NCHW, InferenceEngine::Layout::NHWC};
|
||||
const std::vector<InferenceEngine::Layout> ioLayouts = {
|
||||
InferenceEngine::Layout::NCHW,
|
||||
InferenceEngine::Layout::NHWC
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessConversionTest,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(ioPrecisions), ::testing::ValuesIn(ioPrecisions),
|
||||
::testing::ValuesIn(netLayouts), ::testing::ValuesIn(ioLayouts), ::testing::ValuesIn(ioLayouts), ::testing::Bool(),
|
||||
::testing::Bool(), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), ::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::ValuesIn(ioPrecisions),
|
||||
::testing::ValuesIn(ioPrecisions),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::ValuesIn(ioLayouts),
|
||||
::testing::ValuesIn(ioLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
@ -8,23 +8,34 @@ using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16};
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTests,
|
||||
::testing::Combine(::testing::Values(InferenceEngine::Precision::FP32), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
BehaviorTests::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTestInput,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
BehaviorTestInput::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, BehaviorTestOutput,
|
||||
::testing::Combine(::testing::ValuesIn(netPrecisions), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
BehaviorTestOutput::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -8,11 +8,15 @@ using namespace BehaviorTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
|
||||
const std::vector<std::map<std::string, std::string>> configs = {{}};
|
||||
const std::vector<std::map<std::string, std::string>> configs = {
|
||||
{}
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, VersionTest,
|
||||
::testing::Combine(::testing::Values(InferenceEngine::Precision::FP32), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
::testing::Combine(
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::ValuesIn(configs)),
|
||||
VersionTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,10 +2,9 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "hetero/query_network.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "hetero/query_network.hpp"
|
||||
#include "ngraph_functions/builders.hpp"
|
||||
#include "ngraph_functions/subgraph_builders.hpp"
|
||||
|
||||
@ -15,7 +14,8 @@ using namespace HeteroTests;
|
||||
auto ConvBias = ngraph::builder::subgraph::makeConvBias();
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_FullySupportedTopologies, QueryNetworkTest,
|
||||
::testing::Combine(::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE", "MULTI:TEMPLATE"),
|
||||
::testing::Values(ConvBias)),
|
||||
::testing::Combine(
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE, "HETERO:TEMPLATE", "MULTI:TEMPLATE"),
|
||||
::testing::Values(ConvBias)),
|
||||
QueryNetworkTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -2,10 +2,9 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "hetero/synthetic.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "hetero/synthetic.hpp"
|
||||
#include "ngraph_functions/builders.hpp"
|
||||
#include "ngraph_functions/subgraph_builders.hpp"
|
||||
|
||||
@ -13,12 +12,14 @@ namespace {
|
||||
using namespace HeteroTests;
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_SingleMajorNode, HeteroSyntheticTest,
|
||||
::testing::Combine(::testing::Values(std::vector<PluginParameter> {{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
|
||||
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_singleMajorNodeFunctions)),
|
||||
::testing::Combine(
|
||||
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
|
||||
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_singleMajorNodeFunctions)),
|
||||
HeteroSyntheticTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(nightly_RandomMajorNodes, HeteroSyntheticTest,
|
||||
::testing::Combine(::testing::Values(std::vector<PluginParameter> {{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
|
||||
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_randomMajorNodeFunctions)),
|
||||
::testing::Combine(
|
||||
::testing::Values(std::vector<PluginParameter>{{"TEMPLATE0", "templatePlugin"}, {"TEMPLATE1", "templatePlugin"}}),
|
||||
::testing::ValuesIn(HeteroTests::HeteroSyntheticTest::_randomMajorNodeFunctions)),
|
||||
HeteroSyntheticTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -2,10 +2,9 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "single_layer_tests/convolution.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "single_layer_tests/convolution.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
using namespace LayerTestsDefinitions;
|
||||
@ -20,72 +19,122 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
|
||||
/* ============= 2D Convolution ============= */
|
||||
|
||||
const std::vector<std::vector<size_t>> kernels = {{3, 3}, {3, 5}};
|
||||
const std::vector<std::vector<size_t>> strides = {{1, 1}, {1, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> padBegins = {{0, 0}, {0, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> padEnds = {{0, 0}, {0, 3}};
|
||||
const std::vector<std::vector<size_t>> dilations = {{1, 1}, {3, 1}};
|
||||
const std::vector<std::vector<size_t >> kernels = {{3, 3},
|
||||
{3, 5}};
|
||||
const std::vector<std::vector<size_t >> strides = {{1, 1},
|
||||
{1, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> padBegins = {{0, 0},
|
||||
{0, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> padEnds = {{0, 0},
|
||||
{0, 3}};
|
||||
const std::vector<std::vector<size_t >> dilations = {{1, 1},
|
||||
{3, 1}};
|
||||
const std::vector<size_t> numOutChannels = {1, 5};
|
||||
const std::vector<ngraph::op::PadType> padTypes = {ngraph::op::PadType::EXPLICIT, ngraph::op::PadType::VALID};
|
||||
const std::vector<ngraph::op::PadType> padTypes = {
|
||||
ngraph::op::PadType::EXPLICIT,
|
||||
ngraph::op::PadType::VALID
|
||||
};
|
||||
|
||||
const auto conv2DParams_ExplicitPadding =
|
||||
::testing::Combine(::testing::ValuesIn(kernels), ::testing::ValuesIn(strides), ::testing::ValuesIn(padBegins), ::testing::ValuesIn(padEnds),
|
||||
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels), ::testing::Values(ngraph::op::PadType::EXPLICIT));
|
||||
const auto conv2DParams_ExplicitPadding = ::testing::Combine(
|
||||
::testing::ValuesIn(kernels),
|
||||
::testing::ValuesIn(strides),
|
||||
::testing::ValuesIn(padBegins),
|
||||
::testing::ValuesIn(padEnds),
|
||||
::testing::ValuesIn(dilations),
|
||||
::testing::ValuesIn(numOutChannels),
|
||||
::testing::Values(ngraph::op::PadType::EXPLICIT)
|
||||
);
|
||||
// ! [test_convolution:declare_parameters]
|
||||
|
||||
const auto conv2DParams_AutoPadValid =
|
||||
::testing::Combine(::testing::ValuesIn(kernels), ::testing::ValuesIn(strides), ::testing::Values(std::vector<ptrdiff_t>({0, 0})),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0})), ::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
|
||||
::testing::Values(ngraph::op::PadType::VALID));
|
||||
const auto conv2DParams_AutoPadValid = ::testing::Combine(
|
||||
::testing::ValuesIn(kernels),
|
||||
::testing::ValuesIn(strides),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
|
||||
::testing::ValuesIn(dilations),
|
||||
::testing::ValuesIn(numOutChannels),
|
||||
::testing::Values(ngraph::op::PadType::VALID)
|
||||
);
|
||||
|
||||
// ! [test_convolution:instantiate]
|
||||
INSTANTIATE_TEST_CASE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
|
||||
::testing::Combine(conv2DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
::testing::Combine(
|
||||
conv2DParams_ExplicitPadding,
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
ConvolutionLayerTest::getTestCaseName);
|
||||
// ! [test_convolution:instantiate]
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
|
||||
::testing::Combine(conv2DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
::testing::Combine(
|
||||
conv2DParams_AutoPadValid,
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
ConvolutionLayerTest::getTestCaseName);
|
||||
|
||||
/* ============= 3D Convolution ============= */
|
||||
|
||||
const std::vector<std::vector<size_t>> kernels3d = {{3, 3, 3}, {3, 5, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> paddings3d = {{0, 0, 0}, {0, 2, 0}};
|
||||
const std::vector<std::vector<size_t >> kernels3d = {{3, 3, 3},
|
||||
{3, 5, 3}};
|
||||
const std::vector<std::vector<ptrdiff_t>> paddings3d = {{0, 0, 0},
|
||||
{0, 2, 0}};
|
||||
|
||||
const std::vector<std::vector<size_t>> strides3d = {{1, 1, 1}, {1, 2, 1}};
|
||||
const std::vector<std::vector<size_t>> dilations3d = {{1, 1, 1}, {1, 2, 1}};
|
||||
const std::vector<std::vector<size_t >> strides3d = {{1, 1, 1},
|
||||
{1, 2, 1}};
|
||||
const std::vector<std::vector<size_t >> dilations3d = {{1, 1, 1},
|
||||
{1, 2, 1}};
|
||||
|
||||
const auto conv3DParams_ExplicitPadding =
|
||||
::testing::Combine(::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d), ::testing::ValuesIn(paddings3d), ::testing::ValuesIn(paddings3d),
|
||||
::testing::ValuesIn(dilations3d), ::testing::Values(5), ::testing::Values(ngraph::op::PadType::EXPLICIT));
|
||||
const auto conv3DParams_AutoPadValid =
|
||||
::testing::Combine(::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d), ::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})), ::testing::ValuesIn(dilations3d), ::testing::Values(5),
|
||||
::testing::Values(ngraph::op::PadType::VALID));
|
||||
const auto conv3DParams_ExplicitPadding = ::testing::Combine(
|
||||
::testing::ValuesIn(kernels3d),
|
||||
::testing::ValuesIn(strides3d),
|
||||
::testing::ValuesIn(paddings3d),
|
||||
::testing::ValuesIn(paddings3d),
|
||||
::testing::ValuesIn(dilations3d),
|
||||
::testing::Values(5),
|
||||
::testing::Values(ngraph::op::PadType::EXPLICIT)
|
||||
);
|
||||
const auto conv3DParams_AutoPadValid = ::testing::Combine(
|
||||
::testing::ValuesIn(kernels3d),
|
||||
::testing::ValuesIn(strides3d),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
|
||||
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
|
||||
::testing::ValuesIn(dilations3d),
|
||||
::testing::Values(5),
|
||||
::testing::Values(ngraph::op::PadType::VALID)
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
|
||||
::testing::Combine(conv3DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
::testing::Combine(
|
||||
conv3DParams_ExplicitPadding,
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
ConvolutionLayerTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTest,
|
||||
::testing::Combine(conv3DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
::testing::Combine(
|
||||
conv3DParams_AutoPadValid,
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
ConvolutionLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,34 +2,43 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "single_layer_tests/reshape.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "single_layer_tests/reshape.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
using namespace LayerTestsDefinitions;
|
||||
|
||||
namespace {
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP32,
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_ReshapeCheckDynBatch, ReshapeLayerTest,
|
||||
::testing::Combine(::testing::Values(true), ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({30, 30, 30, 30})),
|
||||
::testing::Values(std::vector<size_t>({30, 30, 30, 30})), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::Values(std::map<std::string, std::string>({}))),
|
||||
ReshapeLayerTest::getTestCaseName);
|
||||
::testing::Combine(
|
||||
::testing::Values(true),
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t>({30, 30, 30, 30})),
|
||||
::testing::Values(std::vector<size_t>({30, 30, 30, 30})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::Values(std::map<std::string, std::string>({}))),
|
||||
ReshapeLayerTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_ReshapeCheck, ReshapeLayerTest,
|
||||
::testing::Combine(::testing::Values(true), ::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({10, 10, 10, 10})),
|
||||
::testing::Values(std::vector<size_t>({10, 0, 100})), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::Values(std::map<std::string, std::string>({}))),
|
||||
ReshapeLayerTest::getTestCaseName);
|
||||
::testing::Combine(
|
||||
::testing::Values(true),
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t>({10, 10, 10, 10})),
|
||||
::testing::Values(std::vector<size_t>({10, 0, 100})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
::testing::Values(std::map<std::string, std::string>({}))),
|
||||
ReshapeLayerTest::getTestCaseName);
|
||||
} // namespace
|
@ -2,10 +2,9 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "single_layer_tests/softmax.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "single_layer_tests/softmax.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
using namespace LayerTestsDefinitions;
|
||||
@ -26,14 +25,28 @@ const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
|
||||
InferenceEngine::SizeVector {10, 10},
|
||||
};
|
||||
|
||||
const std::vector<size_t> axis2D = {0, 1};
|
||||
const std::vector<size_t> axis2D = {
|
||||
0, 1
|
||||
};
|
||||
|
||||
const auto params2D = testing::Combine(testing::ValuesIn(netPrecisions), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::ValuesIn(inputLayouts2D),
|
||||
testing::Values(InferenceEngine::Layout::ANY), testing::ValuesIn(inputShapes2D), testing::ValuesIn(axis2D),
|
||||
testing::Values(CommonTestUtils::DEVICE_TEMPLATE), testing::Values(std::map<std::string, std::string>()));
|
||||
const auto params2D = testing::Combine(
|
||||
testing::ValuesIn(netPrecisions),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::ValuesIn(inputLayouts2D),
|
||||
testing::Values(InferenceEngine::Layout::ANY),
|
||||
testing::ValuesIn(inputShapes2D),
|
||||
testing::ValuesIn(axis2D),
|
||||
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
testing::Values(std::map<std::string, std::string>())
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_SoftMax2D, SoftMaxLayerTest, params2D, SoftMaxLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_SoftMax2D,
|
||||
SoftMaxLayerTest,
|
||||
params2D,
|
||||
SoftMaxLayerTest::getTestCaseName
|
||||
);
|
||||
|
||||
const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
|
||||
InferenceEngine::SizeVector {1, 100, 1, 1},
|
||||
@ -43,11 +56,23 @@ const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
|
||||
|
||||
const std::vector<size_t> axis4D = {0, 1, 2, 3};
|
||||
|
||||
const auto params4D = testing::Combine(testing::ValuesIn(netPrecisions), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Layout::NCHW),
|
||||
testing::Values(InferenceEngine::Layout::ANY), testing::ValuesIn(inputShapes4D), testing::ValuesIn(axis4D),
|
||||
testing::Values(CommonTestUtils::DEVICE_TEMPLATE), testing::Values(std::map<std::string, std::string>()));
|
||||
const auto params4D = testing::Combine(
|
||||
testing::ValuesIn(netPrecisions),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
testing::Values(InferenceEngine::Layout::NCHW),
|
||||
testing::Values(InferenceEngine::Layout::ANY),
|
||||
testing::ValuesIn(inputShapes4D),
|
||||
testing::ValuesIn(axis4D),
|
||||
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
|
||||
testing::Values(std::map<std::string, std::string>())
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_SoftMax4D, SoftMaxLayerTest, params4D, SoftMaxLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
smoke_SoftMax4D,
|
||||
SoftMaxLayerTest,
|
||||
params4D,
|
||||
SoftMaxLayerTest::getTestCaseName
|
||||
);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,10 +2,9 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "single_layer_tests/split.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "single_layer_tests/split.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
using namespace LayerTestsDefinitions;
|
||||
@ -13,11 +12,17 @@ using namespace LayerTestsDefinitions;
|
||||
namespace {
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_NumSplitsCheck, SplitLayerTest,
|
||||
::testing::Combine(::testing::Values(1, 2, 3, 5, 6, 10, 30), ::testing::Values(0, 1, 2, 3),
|
||||
::testing::Values(InferenceEngine::Precision::FP32), ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED), ::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY), ::testing::Values(std::vector<size_t>({30, 30, 30, 30})),
|
||||
::testing::Values(std::vector<size_t>({})), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
::testing::Combine(
|
||||
::testing::Values(1, 2, 3, 5, 6, 10, 30),
|
||||
::testing::Values(0, 1, 2, 3),
|
||||
::testing::Values(InferenceEngine::Precision::FP32),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(std::vector<size_t>({30, 30, 30, 30})),
|
||||
::testing::Values(std::vector<size_t>({})),
|
||||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
|
||||
SplitLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
|
@ -2,10 +2,10 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "functional_test_utils/skip_tests_config.hpp"
|
||||
|
||||
std::vector<std::string> disabledTestPatterns() {
|
||||
return {
|
||||
@ -14,5 +14,6 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(.*SplitLayerTest.*numSplits\=30.*)",
|
||||
// CVS-51758
|
||||
".*PreprocessConversionTest.*oLT=NHWC.*",
|
||||
".*PreprocessDynamicallyInSetBlobTest.*oPRC=0.*oLT=1.*",
|
||||
};
|
||||
}
|
@ -18,9 +18,11 @@
|
||||
|
||||
// #include "common_test_utils/ngraph_test_utils.hpp"
|
||||
|
||||
|
||||
// using namespace testing;
|
||||
// using namespace ngraph;
|
||||
|
||||
|
||||
// TEST(TransformationTests, Preprocessing_AddStdScale) {
|
||||
// std::shared_ptr<Function> f(nullptr), f_ref(nullptr);
|
||||
|
||||
|
@ -4,11 +4,12 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <queue>
|
||||
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include <ngraph/opsets/opset3.hpp>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <transformations/init_node_info.hpp>
|
||||
#include <transformations/utils/utils.hpp>
|
||||
|
||||
@ -23,11 +24,11 @@ TEST(TransformationTests, DISABLED_TemplateTest) {
|
||||
// f_ref - ngraph::Function that is expected after applying transformation
|
||||
{
|
||||
// Example function
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape {3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape {1}, {1.5});
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.5});
|
||||
auto divide = std::make_shared<ngraph::opset3::Divide>(data, divide_constant);
|
||||
|
||||
f = std::make_shared<ngraph::Function>(ngraph::NodeVector {divide}, ngraph::ParameterVector {data});
|
||||
f = std::make_shared<ngraph::Function>(ngraph::NodeVector{divide}, ngraph::ParameterVector{data});
|
||||
|
||||
// This transformation init runtime info attributes
|
||||
ngraph::pass::InitNodeInfo().run_on_function(f);
|
||||
@ -41,12 +42,13 @@ TEST(TransformationTests, DISABLED_TemplateTest) {
|
||||
|
||||
{
|
||||
// Example reference function
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape {3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape {1}, {1.5});
|
||||
auto pow = std::make_shared<ngraph::opset3::Power>(divide_constant, ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape {1}, {-1}));
|
||||
auto data = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
|
||||
auto divide_constant = ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.5});
|
||||
auto pow = std::make_shared<ngraph::opset3::Power>(divide_constant,
|
||||
ngraph::opset3::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {-1}));
|
||||
auto mul = std::make_shared<ngraph::opset3::Multiply>(data, pow);
|
||||
|
||||
f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector {mul}, ngraph::ParameterVector {data});
|
||||
f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{mul}, ngraph::ParameterVector{data});
|
||||
}
|
||||
|
||||
// Compare that processed function and expected function are the same
|
||||
|
@ -61,6 +61,8 @@ void IInferRequestInternal::SetBlob(const std::string& name, const Blob::Ptr& us
|
||||
DataPtr foundOutput;
|
||||
size_t dataSize = userBlob->size();
|
||||
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
|
||||
// ilavreno: the condition below is obsolete, but we need an exact list of precisions
|
||||
// which are supports by G-API preprocessing
|
||||
if (foundInput->getPrecision() != userBlob->getTensorDesc().getPrecision()) {
|
||||
IE_THROW(ParameterMismatch) << "Failed to set Blob with precision not corresponding to user input precision";
|
||||
}
|
||||
@ -96,6 +98,11 @@ void IInferRequestInternal::SetBlob(const std::string& name, const Blob::Ptr& us
|
||||
if (foundOutput->getPrecision() != userBlob->getTensorDesc().getPrecision()) {
|
||||
IE_THROW(ParameterMismatch) << "Failed to set Blob with precision not corresponding to user output precision";
|
||||
}
|
||||
// ilavreno: this condition is valid for most plugins except MYRIAD
|
||||
// it is able to perform layout conversion for output blob dynamically
|
||||
// if (foundOutput->getLayout() != userBlob->getTensorDesc().getLayout()) {
|
||||
// IE_THROW(ParameterMismatch) << "Failed to set Blob with layout not corresponding to user output layout";
|
||||
// }
|
||||
_outputs[name] = userBlob;
|
||||
}
|
||||
}
|
||||
|
@ -90,6 +90,20 @@ namespace {
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_CPU),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Hetero_BehaviorTests, PreprocessConversionTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
@ -104,6 +118,20 @@ namespace {
|
||||
::testing::ValuesIn(heteroConfigs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Hetero_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_HETERO),
|
||||
::testing::ValuesIn(heteroConfigs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Multi_BehaviorTests, PreprocessConversionTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
@ -118,6 +146,20 @@ namespace {
|
||||
::testing::ValuesIn(multiConfigs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Multi_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_MULTI),
|
||||
::testing::ValuesIn(multiConfigs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PreprocessConversionTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
@ -132,4 +174,18 @@ namespace {
|
||||
::testing::ValuesIn(autoConfigs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_Auto_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_AUTO),
|
||||
::testing::ValuesIn(autoConfigs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
@ -29,6 +29,8 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(.*(PreprocessTest).*(SetMeanValuePreProcessSetBlob).*)",
|
||||
R"(.*(PreprocessTest).*(SetMeanImagePreProcessSetBlob).*)",
|
||||
R"(.*(PreprocessTest).*(ReverseInputChannelsPreProcessGetBlob).*)",
|
||||
R"(.*PreprocessDynamicallyInSetBlobTest.*iPRC=0.*_iLT=1.*)",
|
||||
R"(.*PreprocessDynamicallyInSetBlobTest.*oPRC=0.*_oLT=1.*)",
|
||||
// TODO: Issue: 34348
|
||||
R"(.*IEClassGetAvailableDevices.*)",
|
||||
// TODO: Issue: 25533
|
||||
|
@ -89,4 +89,18 @@ namespace {
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_GPU),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
@ -22,6 +22,7 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(.*(PreprocessTest).*(SetMeanValuePreProcessSetBlob).*)",
|
||||
R"(.*(PreprocessTest).*(SetMeanImagePreProcessSetBlob).*)",
|
||||
R"(.*(PreprocessTest).*(ReverseInputChannelsPreProcessGetBlob).*)",
|
||||
R"(.*(PreprocessDynamicallyInSetBlobTest).*)",
|
||||
// TODO: Issue: 51764
|
||||
".*PreprocessConversionTest.*",
|
||||
// TODO: Issue: 41462
|
||||
|
@ -63,4 +63,19 @@ namespace {
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessConversionTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, PreprocessDynamicallyInSetBlobTest,
|
||||
::testing::Combine(
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::ValuesIn(netLayouts),
|
||||
::testing::Bool(),
|
||||
::testing::Bool(),
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(true), // only SetBlob
|
||||
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
|
||||
::testing::ValuesIn(configs)),
|
||||
PreprocessDynamicallyInSetBlobTest::getTestCaseName);
|
||||
|
||||
|
||||
} // namespace
|
@ -13,6 +13,7 @@
|
||||
#include "functional_test_utils/blob_utils.hpp"
|
||||
#include "ie_preprocess.hpp"
|
||||
#include "base/behavior_test_utils.hpp"
|
||||
#include "ie_ngraph_utils.hpp"
|
||||
|
||||
namespace BehaviorTestsDefinitions {
|
||||
using PreprocessTest = BehaviorTestsUtils::BehaviorTestsBasic;
|
||||
@ -674,7 +675,7 @@ TEST_P(PreprocessConversionTest, Infer) {
|
||||
unsigned int shape_size = 9, channels = 3, batch = 1, offset = 0;
|
||||
{
|
||||
ngraph::PartialShape shape({batch, channels, shape_size, shape_size});
|
||||
ngraph::element::Type type(ngraph::element::Type_t::f32);
|
||||
ngraph::element::Type type(InferenceEngine::details::convertPrecision(netPrecision));
|
||||
auto param = std::make_shared<ngraph::op::Parameter>(type, shape);
|
||||
param->set_friendly_name("param");
|
||||
auto relu = std::make_shared<ngraph::op::Relu>(param);
|
||||
@ -736,11 +737,11 @@ TEST_P(PreprocessConversionTest, Infer) {
|
||||
auto lockedMem = inBlob->buffer();
|
||||
auto desc = inBlob->getTensorDesc();
|
||||
|
||||
if (iPrecision == InferenceEngine::Precision::FP32) {
|
||||
if (desc.getPrecision() == InferenceEngine::Precision::FP32) {
|
||||
auto *inData = lockedMem.as<float*>();
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
inData[desc.offset(i)] = static_cast<float>(i);
|
||||
} else if (iPrecision == InferenceEngine::Precision::U8) {
|
||||
} else if (desc.getPrecision() == InferenceEngine::Precision::U8) {
|
||||
auto *inData = lockedMem.as<std::uint8_t*>();
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
inData[desc.offset(i)] = static_cast<std::uint8_t>(i);
|
||||
@ -756,12 +757,12 @@ TEST_P(PreprocessConversionTest, Infer) {
|
||||
auto outMem = outBlob->cbuffer();
|
||||
auto desc = outBlob->getTensorDesc();
|
||||
|
||||
if (oPrecision == InferenceEngine::Precision::FP32) {
|
||||
if (desc.getPrecision() == InferenceEngine::Precision::FP32) {
|
||||
const auto* outData = outMem.as<const float *>();
|
||||
ASSERT_EQ(inBlob->size(), outBlob->size());
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
ASSERT_EQ(i, outData[desc.offset(i)]) << i;
|
||||
} else if (oPrecision == InferenceEngine::Precision::U8) {
|
||||
} else if (desc.getPrecision() == InferenceEngine::Precision::U8) {
|
||||
const auto* outData = outMem.as<const std::uint8_t *>();
|
||||
ASSERT_EQ(inBlob->size(), outBlob->size());
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
@ -772,14 +773,101 @@ TEST_P(PreprocessConversionTest, Infer) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(PreprocessConversionTest, FailedToChangeBlobFormatAfterNetworkCompilation) {
|
||||
typedef std::tuple<
|
||||
InferenceEngine::Precision, // Network precision
|
||||
bool, // Change input precision
|
||||
bool, // Change output precision
|
||||
InferenceEngine::Layout, // Network layout - always NCHW
|
||||
bool, // Change input layout
|
||||
bool, // Change output layout
|
||||
bool, // SetBlob or GetBlob for input blob
|
||||
bool, // SetBlob or GetBlob for output blob
|
||||
std::string, // Device name
|
||||
std::map<std::string, std::string> // Config
|
||||
> PreprocessSetBlobCheckParams;
|
||||
|
||||
class PreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface<PreprocessSetBlobCheckParams>,
|
||||
public CommonTestUtils::TestsCommon {
|
||||
public:
|
||||
static std::string getTestCaseName(testing::TestParamInfo<PreprocessSetBlobCheckParams> obj) {
|
||||
InferenceEngine::Precision netPrecision;
|
||||
InferenceEngine::Layout netLayout;
|
||||
bool changeIPrecision, changeOPrecision;
|
||||
bool changeILayout, changeOLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
std::tie(netPrecision, changeIPrecision, changeOPrecision,
|
||||
netLayout, changeILayout, changeOLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "netPRC=" << netPrecision.name() << "_";
|
||||
result << "iPRC=" << changeIPrecision << "_";
|
||||
result << "oPRC=" << changeOPrecision << "_";
|
||||
result << "netLT=" << netLayout << "_";
|
||||
result << "iLT=" << changeILayout << "_";
|
||||
result << "oLT=" << changeOLayout << "_";
|
||||
result << "setIBlob=" << setInputBlob << "_";
|
||||
result << "setOBlob=" << setOutputBlob << "_";
|
||||
result << "targetDevice=" << targetDevice;
|
||||
if (!configuration.empty()) {
|
||||
for (auto& configItem : configuration) {
|
||||
result << "configItem=" << configItem.first << "_" << configItem.second << "_";
|
||||
}
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
|
||||
InferenceEngine::Layout getOppositeLayout(InferenceEngine::Layout l) {
|
||||
if (InferenceEngine::Layout::NCHW == l) {
|
||||
return InferenceEngine::Layout::NHWC;
|
||||
} else if (InferenceEngine::Layout::NHWC == l) {
|
||||
return InferenceEngine::Layout::NCHW;
|
||||
}
|
||||
return InferenceEngine::Layout::ANY;
|
||||
}
|
||||
|
||||
InferenceEngine::Precision getOppositePrecision(InferenceEngine::Precision p) {
|
||||
if (InferenceEngine::Precision::U8 == p) {
|
||||
return InferenceEngine::Precision::FP32;
|
||||
} else if (InferenceEngine::Precision::FP32 == p) {
|
||||
return InferenceEngine::Precision::U8;
|
||||
}
|
||||
return InferenceEngine::Precision::UNSPECIFIED;
|
||||
}
|
||||
|
||||
void SetUp() override {
|
||||
std::tie(netPrecision, changeIPrecision, changeOPrecision,
|
||||
netLayout, changeILayout, changeOLayout,
|
||||
setInputBlob, setOutputBlob,
|
||||
targetDevice, configuration) = this->GetParam();
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
if (!configuration.empty()) {
|
||||
PluginCache::get().reset();
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<InferenceEngine::Core> ie = PluginCache::get().ie();
|
||||
InferenceEngine::Precision netPrecision;
|
||||
bool changeIPrecision, changeOPrecision;
|
||||
InferenceEngine::Layout netLayout;
|
||||
bool changeILayout, changeOLayout;
|
||||
bool setInputBlob, setOutputBlob;
|
||||
std::string targetDevice;
|
||||
std::map<std::string, std::string> configuration;
|
||||
};
|
||||
|
||||
TEST_P(PreprocessDynamicallyInSetBlobTest, Infer) {
|
||||
// Skip test according to plugin specific disabledTestPatterns() (if any)
|
||||
SKIP_IF_CURRENT_TEST_IS_DISABLED()
|
||||
std::shared_ptr<ngraph::Function> ngraph;
|
||||
unsigned int shape_size = 9, channels = 3, batch = 1;
|
||||
{
|
||||
ngraph::PartialShape shape({batch, channels, shape_size, shape_size});
|
||||
ngraph::element::Type type(ngraph::element::Type_t::f32);
|
||||
ngraph::element::Type type(InferenceEngine::details::convertPrecision(netPrecision));
|
||||
auto param = std::make_shared<ngraph::op::Parameter>(type, shape);
|
||||
param->set_friendly_name("param");
|
||||
auto relu = std::make_shared<ngraph::op::Relu>(param);
|
||||
@ -796,11 +884,6 @@ TEST_P(PreprocessConversionTest, FailedToChangeBlobFormatAfterNetworkCompilation
|
||||
// Create CNNNetwork from ngraph::Function
|
||||
InferenceEngine::CNNNetwork cnnNet(ngraph);
|
||||
|
||||
cnnNet.getInputsInfo().begin()->second->setPrecision(iPrecision);
|
||||
cnnNet.getInputsInfo().begin()->second->setLayout(iLayout);
|
||||
cnnNet.getOutputsInfo().begin()->second->setPrecision(oPrecision);
|
||||
cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout);
|
||||
|
||||
// Load CNNNetwork to target plugins
|
||||
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
|
||||
auto req = execNet.CreateInferRequest();
|
||||
@ -810,8 +893,10 @@ TEST_P(PreprocessConversionTest, FailedToChangeBlobFormatAfterNetworkCompilation
|
||||
|
||||
auto recreateInputBlob = [&] (InferenceEngine::Blob::Ptr & _inBlob) {
|
||||
auto desc = cnnNet.getInputsInfo().begin()->second->getTensorDesc();
|
||||
desc = InferenceEngine::TensorDesc(getOppositePrecision(desc.getPrecision()),
|
||||
desc.getDims(), getOppositeLayout(desc.getLayout()));
|
||||
desc = InferenceEngine::TensorDesc(
|
||||
changeIPrecision ? getOppositePrecision(desc.getPrecision()) : desc.getPrecision(),
|
||||
desc.getDims(),
|
||||
changeILayout ? getOppositeLayout(desc.getLayout()) : desc.getLayout());
|
||||
auto tempBlob = make_blob_with_precision(desc);
|
||||
tempBlob->allocate();
|
||||
|
||||
@ -820,18 +905,44 @@ TEST_P(PreprocessConversionTest, FailedToChangeBlobFormatAfterNetworkCompilation
|
||||
|
||||
if (setInputBlob) {
|
||||
recreateInputBlob(inBlob);
|
||||
EXPECT_THROW(req.SetBlob("param", inBlob), InferenceEngine::ParameterMismatch);
|
||||
if (changeIPrecision) {
|
||||
EXPECT_THROW(req.SetBlob("param", inBlob), InferenceEngine::ParameterMismatch);
|
||||
// fallback
|
||||
inBlob = req.GetBlob("param");
|
||||
} else {
|
||||
EXPECT_NO_THROW(req.SetBlob("param", inBlob));
|
||||
}
|
||||
} else {
|
||||
inBlob = req.GetBlob("param");
|
||||
recreateInputBlob(inBlob);
|
||||
}
|
||||
|
||||
// Fill input
|
||||
{
|
||||
auto lockedMem = inBlob->buffer();
|
||||
auto desc = inBlob->getTensorDesc();
|
||||
|
||||
if (desc.getPrecision() == InferenceEngine::Precision::FP32) {
|
||||
auto *inData = lockedMem.as<float*>();
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
inData[desc.offset(i)] = static_cast<float>(i);
|
||||
} else if (desc.getPrecision() == InferenceEngine::Precision::U8) {
|
||||
auto *inData = lockedMem.as<std::uint8_t*>();
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
inData[desc.offset(i)] = static_cast<std::uint8_t>(i);
|
||||
} else {
|
||||
ASSERT_TRUE(false);
|
||||
}
|
||||
}
|
||||
|
||||
// create output blob
|
||||
|
||||
auto recreateOutputBlob = [&] (InferenceEngine::Blob::Ptr & _outBlob) {
|
||||
auto desc = cnnNet.getOutputsInfo().begin()->second->getTensorDesc();
|
||||
desc = InferenceEngine::TensorDesc(getOppositePrecision(desc.getPrecision()),
|
||||
desc.getDims(), getOppositeLayout(desc.getLayout()));
|
||||
desc = InferenceEngine::TensorDesc(
|
||||
changeOPrecision ? getOppositePrecision(desc.getPrecision()) : desc.getPrecision(),
|
||||
desc.getDims(),
|
||||
changeOLayout ? getOppositeLayout(desc.getLayout()) : desc.getLayout());
|
||||
auto tempBlob = make_blob_with_precision(desc);
|
||||
tempBlob->allocate();
|
||||
|
||||
@ -840,15 +951,47 @@ TEST_P(PreprocessConversionTest, FailedToChangeBlobFormatAfterNetworkCompilation
|
||||
|
||||
if (setOutputBlob) {
|
||||
recreateOutputBlob(outBlob);
|
||||
EXPECT_THROW(req.SetBlob("relu", outBlob), InferenceEngine::ParameterMismatch);
|
||||
if (changeOPrecision) {
|
||||
ASSERT_THROW(req.SetBlob("relu", outBlob), InferenceEngine::ParameterMismatch);
|
||||
// fallback
|
||||
outBlob = req.GetBlob("relu");
|
||||
} else {
|
||||
ASSERT_NO_THROW(req.SetBlob("relu", outBlob));
|
||||
}
|
||||
} else {
|
||||
outBlob = req.GetBlob("relu");
|
||||
recreateOutputBlob(outBlob);
|
||||
}
|
||||
|
||||
// TODO: if blob from GetBlob is re-created, no checks are performed
|
||||
// should be "GetBlob re-creation error mismatch"
|
||||
EXPECT_NO_THROW(req.Infer() /*, InferenceEngine::Exception */);
|
||||
if (setOutputBlob && setInputBlob) {
|
||||
ASSERT_NO_THROW(req.Infer());
|
||||
} else {
|
||||
// TODO: if blob from GetBlob is re-created, no checks are performed
|
||||
// should be "GetBlob re-creation error mismatch"
|
||||
// EXPECT_THROW(req.Infer(), InferenceEngine::Exception);
|
||||
|
||||
ASSERT_NO_THROW(req.Infer());
|
||||
}
|
||||
|
||||
// Check output
|
||||
{
|
||||
auto outMem = outBlob->cbuffer();
|
||||
auto desc = outBlob->getTensorDesc();
|
||||
|
||||
if (desc.getPrecision() == InferenceEngine::Precision::FP32) {
|
||||
const auto* outData = outMem.as<const float *>();
|
||||
ASSERT_EQ(inBlob->size(), outBlob->size());
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
ASSERT_EQ(i, outData[desc.offset(i)]) << i;
|
||||
} else if (desc.getPrecision() == InferenceEngine::Precision::U8) {
|
||||
const auto* outData = outMem.as<const std::uint8_t *>();
|
||||
ASSERT_EQ(inBlob->size(), outBlob->size());
|
||||
for (size_t i = 0; i < inBlob->size(); i++)
|
||||
ASSERT_EQ(i, outData[desc.offset(i)]) << i;
|
||||
} else {
|
||||
ASSERT_TRUE(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace BehaviorTestsDefinitions
|
||||
|
Loading…
Reference in New Issue
Block a user