templateFuncTests

* Remove StaticShape and add DynamicShape to make existing test cases static default

* Fix typos

cpuFuncTests

* Add functionRefs to each case
This commit is contained in:
Steve Yoo 2021-09-24 12:16:06 +09:00
parent c3b87f098d
commit 5710141cd8
307 changed files with 414 additions and 51 deletions

View File

@ -58,7 +58,7 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine(
); );
// ! [test_convolution:instantiate] // ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPaddingStaticShape, ConvolutionLayerTest, INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine( ::testing::Combine(
conv2DParams_ExplicitPadding, conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
@ -71,7 +71,7 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPaddingStaticShape, ConvolutionLa
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName); ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest, INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPaddingDynamicShape, ConvolutionLayerTest,
::testing::Combine( ::testing::Combine(
conv2DParams_ExplicitPadding, conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
@ -87,7 +87,7 @@ INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
ConvolutionLayerTest::getTestCaseName); ConvolutionLayerTest::getTestCaseName);
// ! [test_convolution:instantiate] // ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValidStaticShape, ConvolutionLayerTest, INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine( ::testing::Combine(
conv2DParams_AutoPadValid, conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
@ -131,7 +131,7 @@ const auto conv3DParams_AutoPadValid = ::testing::Combine(
::testing::Values(ngraph::op::PadType::VALID) ::testing::Values(ngraph::op::PadType::VALID)
); );
INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPaddingStaticShape, ConvolutionLayerTest, INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine( ::testing::Combine(
conv3DParams_ExplicitPadding, conv3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
@ -144,7 +144,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPaddingStaticShape, Convolu
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)), ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName); ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValidStaticShape, ConvolutionLayerTest, INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine( ::testing::Combine(
conv3DParams_AutoPadValid, conv3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),

View File

@ -12,7 +12,7 @@ using namespace LayerTestsDefinitions;
namespace { namespace {
const std::vector<InferenceEngine::Precision> netPrecision = { const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP32,
}; };
@ -38,8 +38,8 @@ const std::vector<size_t> axis2D = {
0, 1 0, 1
}; };
const auto params2DStaticShape = testing::Combine( const auto params2D = testing::Combine(
testing::ValuesIn(netPrecision), testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D), testing::ValuesIn(inputLayouts2D),
@ -51,8 +51,8 @@ const auto params2DStaticShape = testing::Combine(
testing::Values(std::map<std::string, std::string>()) testing::Values(std::map<std::string, std::string>())
); );
const auto params2D = testing::Combine( const auto params2DDynamicShape = testing::Combine(
testing::ValuesIn(netPrecision), testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D), testing::ValuesIn(inputLayouts2D),
@ -65,16 +65,16 @@ const auto params2D = testing::Combine(
); );
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2DStaticShape, smoke_SoftMax2D,
SoftMaxLayerTest, SoftMaxLayerTest,
params2DStaticShape, params2D,
SoftMaxLayerTest::getTestCaseName SoftMaxLayerTest::getTestCaseName
); );
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2D, smoke_SoftMax2DDynamicShape,
SoftMaxLayerTest, SoftMaxLayerTest,
params2D, params2DDynamicShape,
SoftMaxLayerTest::getTestCaseName SoftMaxLayerTest::getTestCaseName
); );
@ -94,8 +94,8 @@ const std::vector<std::vector<InferenceEngine::SizeVector>> targetShapes4D = {
const std::vector<size_t> axis4D = {0, 1, 2, 3}; const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4DStaticShape = testing::Combine( const auto params4D = testing::Combine(
testing::ValuesIn(netPrecision), testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW), testing::Values(InferenceEngine::Layout::NCHW),
@ -107,8 +107,8 @@ const auto params4DStaticShape = testing::Combine(
testing::Values(std::map<std::string, std::string>()) testing::Values(std::map<std::string, std::string>())
); );
const auto params4D = testing::Combine( const auto params4DDynamicShape = testing::Combine(
testing::ValuesIn(netPrecision), testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW), testing::Values(InferenceEngine::Layout::NCHW),
@ -120,13 +120,6 @@ const auto params4D = testing::Combine(
testing::Values(std::map<std::string, std::string>()) testing::Values(std::map<std::string, std::string>())
); );
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4DStaticShape,
SoftMaxLayerTest,
params4DStaticShape,
SoftMaxLayerTest::getTestCaseName
);
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D, smoke_SoftMax4D,
SoftMaxLayerTest, SoftMaxLayerTest,
@ -134,4 +127,11 @@ INSTANTIATE_TEST_SUITE_P(
SoftMaxLayerTest::getTestCaseName SoftMaxLayerTest::getTestCaseName
); );
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4DDynamicShape,
SoftMaxLayerTest,
params4DDynamicShape,
SoftMaxLayerTest::getTestCaseName
);
} // namespace } // namespace

View File

@ -70,6 +70,7 @@ protected:
ngraph::NodeVector {bias_2}, ngraph::NodeVector {bias_2},
ngraph::ParameterVector {input}, ngraph::ParameterVector {input},
"SimpleNet"); "SimpleNet");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -19,10 +19,13 @@ const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC, InferenceEngine::Layout::NC,
}; };
const std::vector<InferenceEngine::SizeVector> inputShapes2D = { const std::vector<std::vector<std::pair<size_t, size_t>>> inputStaticShape2D = {
InferenceEngine::SizeVector {1, 100}, {NULL_RANGE}
InferenceEngine::SizeVector {100, 1}, };
InferenceEngine::SizeVector {10, 10}, const std::vector<std::vector<InferenceEngine::SizeVector>> inputShapes2D = {
{InferenceEngine::SizeVector {1, 100}},
{InferenceEngine::SizeVector {100, 1}},
{InferenceEngine::SizeVector {10, 10}},
}; };
const std::vector<size_t> axis2D = { const std::vector<size_t> axis2D = {
@ -35,6 +38,7 @@ const auto params2D = testing::Combine(
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D), testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY), testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputStaticShape2D),
testing::ValuesIn(inputShapes2D), testing::ValuesIn(inputShapes2D),
testing::ValuesIn(axis2D), testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU), testing::Values(CommonTestUtils::DEVICE_CPU),
@ -48,10 +52,14 @@ INSTANTIATE_TEST_SUITE_P(
SoftMaxLayerTest::getTestCaseName SoftMaxLayerTest::getTestCaseName
); );
const std::vector<InferenceEngine::SizeVector> inputShapes4D = { const std::vector<std::vector<std::pair<size_t, size_t>>> inputStaticShape4D = {
InferenceEngine::SizeVector {1, 100, 1, 1}, {NULL_RANGE}
InferenceEngine::SizeVector {1, 3, 4, 3}, };
InferenceEngine::SizeVector {2, 3, 4, 5},
const std::vector<std::vector<InferenceEngine::SizeVector>> inputShapes4D = {
{InferenceEngine::SizeVector {1, 100, 1, 1}},
{InferenceEngine::SizeVector {1, 3, 4, 3}},
{InferenceEngine::SizeVector {2, 3, 4, 5}},
}; };
const std::vector<size_t> axis4D = {0, 1, 2, 3}; const std::vector<size_t> axis4D = {0, 1, 2, 3};
@ -62,6 +70,7 @@ const auto params4D = testing::Combine(
testing::Values(InferenceEngine::Precision::UNSPECIFIED), testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW), testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY), testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputStaticShape4D),
testing::ValuesIn(inputShapes4D), testing::ValuesIn(inputShapes4D),
testing::ValuesIn(axis4D), testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU), testing::Values(CommonTestUtils::DEVICE_CPU),

View File

@ -92,6 +92,7 @@ protected:
ngraph::ResultVector outputs; ngraph::ResultVector outputs;
outputs.push_back(std::make_shared<ngraph::opset1::Result>(outputNode)); outputs.push_back(std::make_shared<ngraph::opset1::Result>(outputNode));
function = std::make_shared<ngraph::Function>(outputs, inputs); function = std::make_shared<ngraph::Function>(outputs, inputs);
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -71,6 +71,7 @@ protected:
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue); auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue);
activation->get_rt_info() = getCPUInfo(); activation->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "Activation"); function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "Activation");
functionRefs = ngraph::clone_function(*function);
} }
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;

View File

@ -81,6 +81,7 @@ protected:
threshold = 1e-2; threshold = 1e-2;
function = (mode == "max" ? std::make_shared<ngraph::Function>(adapoolMax->outputs(), params, "AdaPoolMax") : function = (mode == "max" ? std::make_shared<ngraph::Function>(adapoolMax->outputs(), params, "AdaPoolMax") :
std::make_shared<ngraph::Function>(adapoolAvg->outputs(), params, "AdaPoolAvg")); std::make_shared<ngraph::Function>(adapoolAvg->outputs(), params, "AdaPoolAvg"));
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -55,6 +55,7 @@ protected:
b2s->get_rt_info() = getCPUInfo(); b2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(b2s)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(b2s)};
function = std::make_shared<ngraph::Function>(results, params, "BatchToSpace"); function = std::make_shared<ngraph::Function>(results, params, "BatchToSpace");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -56,6 +56,7 @@ protected:
auto concat = std::make_shared<ngraph::opset1::Concat>(paramOuts, axis); auto concat = std::make_shared<ngraph::opset1::Concat>(paramOuts, axis);
function = makeNgraphFunction(ngPrc, params, concat, "concat"); function = makeNgraphFunction(ngPrc, params, concat, "concat");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -63,6 +63,7 @@ protected:
auto powerStatic = ngraph::builder::makeEltwise(inputs[0], inputs[1], nodeType); auto powerStatic = ngraph::builder::makeEltwise(inputs[0], inputs[1], nodeType);
function = std::make_shared<ngraph::Function>(powerStatic, ParameterVector{param}, "ConvertToPluginSpecificNode"); function = std::make_shared<ngraph::Function>(powerStatic, ParameterVector{param}, "ConvertToPluginSpecificNode");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -93,9 +93,10 @@ protected:
isBias = (postOpMgrPtr->getFusedOpsNames() == "Add(PerChannel)" && selectedType != "jit_avx512_winograd"); isBias = (postOpMgrPtr->getFusedOpsNames() == "Add(PerChannel)" && selectedType != "jit_avx512_winograd");
convSpecificParams convParams; convSpecificParams convParams;
std::vector<size_t> inputShape; std::vector<std::pair<size_t, size_t>> inputDynamicShape;
std::vector<std::vector<size_t>> inputShape;
auto netPrecision = InferenceEngine::Precision::UNSPECIFIED; auto netPrecision = InferenceEngine::Precision::UNSPECIFIED;
std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetDevice) = basicParamsSet; std::tie(convParams, netPrecision, inPrc, outPrc, inLayout, outLayout, inputDynamicShape, inputShape, targetDevice) = basicParamsSet;
if (inPrc == Precision::UNSPECIFIED) { if (inPrc == Precision::UNSPECIFIED) {
selectedType += std::string("_") + Precision(Precision::FP32).name(); selectedType += std::string("_") + Precision(Precision::FP32).name();
@ -119,6 +120,7 @@ protected:
padEnd, dilation, padType, convOutChannels); padEnd, dilation, padType, convOutChannels);
function = makeNgraphFunction(ngPrc, inputParams, convolutionNode, "Convolution"); function = makeNgraphFunction(ngPrc, inputParams, convolutionNode, "Convolution");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -190,8 +192,8 @@ const std::vector<SizeVector> strides2d = { {1, 1}, {2, 2} };
const std::vector<std::vector<ptrdiff_t>> padBegins2d = { {0, 0}, {1, 1} }; const std::vector<std::vector<ptrdiff_t>> padBegins2d = { {0, 0}, {1, 1} };
const std::vector<std::vector<ptrdiff_t>> padEnds2d = { {0, 0} }; const std::vector<std::vector<ptrdiff_t>> padEnds2d = { {0, 0} };
const std::vector<SizeVector> dilations2d = { {1, 1}, {2, 2} }; const std::vector<SizeVector> dilations2d = { {1, 1}, {2, 2} };
const std::vector<SizeVector> inputShapes2d = { {1, 64, 7, 7}, {1, 67, 7, 7} }; const std::vector<std::vector<SizeVector>> inputShapes2d = { { {1, 64, 7, 7}, {1, 67, 7, 7} } };
const std::vector<SizeVector> inputShapesPlain2Blocked2d = { {1, 1, 7, 7}, {1, 2, 7, 7}, {1, 3, 7, 7} }; const std::vector<std::vector<SizeVector>> inputShapesPlain2Blocked2d = { { {1, 1, 7, 7}, {1, 2, 7, 7}, {1, 3, 7, 7} } };
/* ============= Convolution params (3D) ============= */ /* ============= Convolution params (3D) ============= */
const std::vector<SizeVector> kernels3d = { {3, 3, 3}, {1, 1, 1} }; const std::vector<SizeVector> kernels3d = { {3, 3, 3}, {1, 1, 1} };
@ -199,8 +201,8 @@ const std::vector<SizeVector> strides3d = { {1, 1, 1}, {2, 2, 2} };
const std::vector<std::vector<ptrdiff_t>> padBegins3d = { {0, 0, 0}, {1, 1, 1} }; const std::vector<std::vector<ptrdiff_t>> padBegins3d = { {0, 0, 0}, {1, 1, 1} };
const std::vector<std::vector<ptrdiff_t>> padEnds3d = { {0, 0, 0} }; const std::vector<std::vector<ptrdiff_t>> padEnds3d = { {0, 0, 0} };
const std::vector<SizeVector> dilations3d = { {1, 1, 1}, {2, 2, 2} }; const std::vector<SizeVector> dilations3d = { {1, 1, 1}, {2, 2, 2} };
const std::vector<SizeVector> inputShapes3d = { {1, 64, 7, 7, 7}, {1, 67, 7, 7, 7} }; const std::vector<std::vector<SizeVector>> inputShapes3d = { { {1, 64, 7, 7, 7}, {1, 67, 7, 7, 7} } };
const std::vector<SizeVector> inputShapesPlain2Blocked3d = { {1, 1, 7, 7, 7}, {1, 2, 7, 7, 7}, {1, 3, 7, 7, 7} }; const std::vector<std::vector<SizeVector>> inputShapesPlain2Blocked3d = { { {1, 1, 7, 7, 7}, {1, 2, 7, 7, 7}, {1, 3, 7, 7, 7} } };
/* ============= */ /* ============= */
/* INSTANCES */ /* INSTANCES */
@ -229,7 +231,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSet), ::testing::ValuesIn(fusingParamsSet),
@ -245,7 +248,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_BF16, ConvolutionLayerCPUTest,
::testing::Values(Precision::BF16), ::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSetBF16), ::testing::ValuesIn(fusingParamsSetBF16),
@ -261,7 +265,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_I8, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::Values(fusingSum), ::testing::Values(fusingSum),
@ -293,7 +298,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSet), ::testing::ValuesIn(fusingParamsSet),
@ -309,7 +315,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_BF16, ConvolutionLayerCPUTest,
::testing::Values(Precision::BF16), ::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSetBF16), ::testing::ValuesIn(fusingParamsSetBF16),
@ -325,7 +332,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_I8, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 12, 7, 7, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::Values(fusingSum), ::testing::Values(fusingSum),
@ -361,6 +369,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
@ -377,6 +386,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_BF16, ConvolutionLayerCPUTest,
::testing::Values(Precision::BF16), ::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D, conv_avx512_2D_nspc})), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D, conv_avx512_2D_nspc})),
@ -393,6 +403,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_I8, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
@ -415,6 +426,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_2D_FP32, ConvolutionLayerCPUT
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapesPlain2Blocked2d), ::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D_plain_to_blocked)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D_plain_to_blocked)),
@ -431,6 +443,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_2D_BF16, ConvolutionLayerCPUT
::testing::Values(Precision::BF16, Precision::FP32), ::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapesPlain2Blocked2d), ::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_2D})), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_2D})),
@ -466,6 +479,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes3d), ::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
@ -482,6 +496,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_BF16, ConvolutionLayerCPUTest,
::testing::Values(Precision::BF16), ::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes3d), ::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D, conv_avx512_3D_nspc})), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D, conv_avx512_3D_nspc})),
@ -498,6 +513,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_I8, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes3d), ::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
@ -519,6 +535,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_3D_FP32, ConvolutionLayerCPUT
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapesPlain2Blocked3d), ::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D_plain_to_blocked)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D_plain_to_blocked)),
@ -535,6 +552,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_3D_BF16, ConvolutionLayerCPUT
::testing::Values(Precision::BF16, Precision::FP32), ::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapesPlain2Blocked3d), ::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_3D})), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_3D})),
@ -572,6 +590,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
@ -588,6 +607,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_BF16, ConvolutionLayerCPUTest,
::testing::Values(Precision::BF16), ::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1, conv_avx512_2D_1x1_nspc})), ::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1, conv_avx512_2D_1x1_nspc})),
@ -604,6 +624,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_I8, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
@ -644,7 +665,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_1D, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7})), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 2, 64, 7 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1D)),
::testing::Values(fusingAddPerChannel), ::testing::Values(fusingAddPerChannel),
@ -679,6 +701,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_Jit_Planar_2D_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes2d), ::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_2D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_2D)),
@ -712,6 +735,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_Jit_Planar_3D_FP32, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::ValuesIn(inputShapes3d), ::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_3D)), ::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_3D)),
@ -761,7 +785,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Conv_winograd, ConvolutionLayerCPUTest,
::testing::Values(Precision::UNSPECIFIED), ::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY), ::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 1, 16, 10, 10 })), ::testing::Values(std::vector<std::pair<size_t, size_t>>(NULL_RANGE)),
::testing::Values(std::vector<std::vector<size_t>>({{ 1, 16, 10, 10 }})),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(std::vector<CPUSpecificParams>{conv_winograd})), ::testing::ValuesIn(filterCPUInfoForDevice(std::vector<CPUSpecificParams>{conv_winograd})),
::testing::ValuesIn(fusingParamsSet), ::testing::ValuesIn(fusingParamsSet),

View File

@ -95,6 +95,7 @@ protected:
} }
function = makeNgraphFunction(ngPrc, inputParams, deconvolutionNode, "convolutionBackpropData"); function = makeNgraphFunction(ngPrc, inputParams, deconvolutionNode, "convolutionBackpropData");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -55,6 +55,7 @@ protected:
d2s->get_rt_info() = getCPUInfo(); d2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)};
function = std::make_shared<ngraph::Function>(results, params, "DepthToSpace"); function = std::make_shared<ngraph::Function>(results, params, "DepthToSpace");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -100,6 +100,7 @@ protected:
auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType); auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise"); function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -53,6 +53,7 @@ protected:
inputNode, ngraph::Shape(kernel), ngraph::Strides(strides), ngraph::Shape(rates), pad_type); inputNode, ngraph::Shape(kernel), ngraph::Strides(strides), ngraph::Shape(rates), pad_type);
ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(extImgPatches)}; ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(extImgPatches)};
function = std::make_shared<ngraph::Function>(results, params, "ExtractImagePatches"); function = std::make_shared<ngraph::Function>(results, params, "ExtractImagePatches");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -123,6 +123,7 @@ protected:
fq->get_rt_info() = getCPUInfo(); fq->get_rt_info() = getCPUInfo();
function = std::make_shared<Function>(fq, params, "FakeQuantizeCPU"); function = std::make_shared<Function>(fq, params, "FakeQuantizeCPU");
functionRefs = ngraph::clone_function(*function);
} }
private: private:

View File

@ -60,6 +60,7 @@ protected:
auto activation = ngraph::builder::makeGatherElements(params[0], indicesShape, ngIPrc, axis); auto activation = ngraph::builder::makeGatherElements(params[0], indicesShape, ngIPrc, axis);
activation->get_rt_info() = getCPUInfo(); activation->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "GatherElements"); function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "GatherElements");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -104,6 +104,7 @@ protected:
ngraph::builder::makeGroupConvolution(paramOuts[0], ngPrc, kernel, stride, padBegin, ngraph::builder::makeGroupConvolution(paramOuts[0], ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups)); padEnd, dilation, padType, convOutChannels, numGroups));
function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolution"); function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolution");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -94,6 +94,7 @@ protected:
padEnd, dilation, padType, convOutChannels, numGroups, false, outputPadding)); padEnd, dilation, padType, convOutChannels, numGroups, false, outputPadding));
} }
function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolutionBackpropData"); function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolutionBackpropData");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -88,6 +88,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(gru_cell->output(0))}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(gru_cell->output(0))};
function = makeNgraphFunction(ngPrc, params, gru_cell, "gru_cell"); function = makeNgraphFunction(ngPrc, params, gru_cell, "gru_cell");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -137,6 +137,8 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function); bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false); EXPECT_EQ(ti_found, false);
} }
functionRefs = ngraph::clone_function(*function);
} }
void GenerateInputs() override { void GenerateInputs() override {

View File

@ -107,6 +107,7 @@ protected:
selectedType += "BF16"; selectedType += "BF16";
else else
selectedType += netPrecision.name(); selectedType += netPrecision.name();
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -72,6 +72,7 @@ protected:
logicalNode->get_rt_info() = getCPUInfo(); logicalNode->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(logicalNode, inputs, "Logical"); function = std::make_shared<ngraph::Function>(logicalNode, inputs, "Logical");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -87,6 +87,7 @@ protected:
std::make_shared<ngraph::opset1::Result>(lstm_cell->output(1))}; std::make_shared<ngraph::opset1::Result>(lstm_cell->output(1))};
function = makeNgraphFunction(ngPrc, params, lstm_cell, "lstm_cell"); function = makeNgraphFunction(ngPrc, params, lstm_cell, "lstm_cell");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -144,6 +144,8 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function); bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false); EXPECT_EQ(ti_found, false);
} }
functionRefs = ngraph::clone_function(*function);
} }
void GenerateInputs() override { void GenerateInputs() override {

View File

@ -98,6 +98,7 @@ protected:
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params)); auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
auto matMul = builder::makeMatMul(paramOuts[0], matrixB, transpA, transpB); auto matMul = builder::makeMatMul(paramOuts[0], matrixB, transpA, transpB);
function = makeNgraphFunction(ngPrec, params, matMul, cpuNodeType); function = makeNgraphFunction(ngPrec, params, matMul, cpuNodeType);
functionRefs = ngraph::clone_function(*function);
checkFusingPosition = false; checkFusingPosition = false;
} }
}; };

View File

@ -71,6 +71,7 @@ protected:
threshold = 0.015f; threshold = 0.015f;
function = makeNgraphFunction(netPrc, param, mvn, "mvn"); function = makeNgraphFunction(netPrc, param, mvn, "mvn");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -57,6 +57,7 @@ protected:
auto normalize = builder::makeNormalizeL2(paramOuts[0], axes, eps, eps_mode); auto normalize = builder::makeNormalizeL2(paramOuts[0], axes, eps, eps_mode);
function = makeNgraphFunction(netPrc, params, normalize, "Normalize"); function = makeNgraphFunction(netPrc, params, normalize, "Normalize");
functionRefs = ngraph::clone_function(*function);
selectedType = "unknown_" + std::string(inPrc.name()); selectedType = "unknown_" + std::string(inPrc.name());
threshold = 0.015f; threshold = 0.015f;

View File

@ -73,6 +73,7 @@ protected:
auto oneHot = std::make_shared<ngraph::opset5::OneHot>(inputParams.front(), depthConst, onConst, offConst, axis); auto oneHot = std::make_shared<ngraph::opset5::OneHot>(inputParams.front(), depthConst, onConst, offConst, axis);
function = makeNgraphFunction(ngPrc, inputParams, oneHot, "OneHot"); function = makeNgraphFunction(ngPrc, inputParams, oneHot, "OneHot");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -58,6 +58,7 @@ protected:
pad->get_rt_info() = getCPUInfo(); pad->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(pad)}; ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(pad)};
function = std::make_shared<ngraph::Function>(results, params, "pad"); function = std::make_shared<ngraph::Function>(results, params, "pad");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -86,6 +86,7 @@ protected:
function = makeNgraphFunction(ngPrc, params, pooling, "Pooling"); function = makeNgraphFunction(ngPrc, params, pooling, "Pooling");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -97,6 +97,7 @@ protected:
threshold = 1e-2; threshold = 1e-2;
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(psroi)}; const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(psroi)};
function = std::make_shared<ngraph::Function>(results, params, "PSROIPooling"); function = std::make_shared<ngraph::Function>(results, params, "PSROIPooling");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -74,6 +74,7 @@ protected:
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(reduce)}; const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(reduce)};
function = std::make_shared<ngraph::Function>(results, params, "Reduce"); function = std::make_shared<ngraph::Function>(results, params, "Reduce");
functionRefs = ngraph::clone_function(*function);
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override {
if (ngraph::helpers::ReductionType::Prod == reductionType) { if (ngraph::helpers::ReductionType::Prod == reductionType) {

View File

@ -77,6 +77,7 @@ protected:
attributes.do_softmax, mask, attributes.start_axis, attributes.end_axis); attributes.do_softmax, mask, attributes.start_axis, attributes.end_axis);
function = makeNgraphFunction(ngPrc, paramRegionYolo, region_yolo, "RegionYolo"); function = makeNgraphFunction(ngPrc, paramRegionYolo, region_yolo, "RegionYolo");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -82,6 +82,7 @@ protected:
WRB, hidden_size, activations, {}, {}, clip); WRB, hidden_size, activations, {}, {}, clip);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(rnn_cell)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(rnn_cell)};
function = makeNgraphFunction(ngPrc, params, rnn_cell, "rnn_cell"); function = makeNgraphFunction(ngPrc, params, rnn_cell, "rnn_cell");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -117,6 +117,7 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function); bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false); EXPECT_EQ(ti_found, false);
} }
functionRefs = ngraph::clone_function(*function);
} }
void GenerateInputs() override { void GenerateInputs() override {

View File

@ -144,6 +144,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roi_pooling)}; ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roi_pooling)};
function = makeNgraphFunction(ngPrc, params, roi_pooling, "roi_pooling"); function = makeNgraphFunction(ngPrc, params, roi_pooling, "roi_pooling");
functionRefs = ngraph::clone_function(*function);
selectedType += "_"; selectedType += "_";
selectedType += netPrecision.name(); selectedType += netPrecision.name();

View File

@ -98,6 +98,7 @@ protected:
threshold = 1e-2; threshold = 1e-2;
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roialign)}; const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roialign)};
function = std::make_shared<ngraph::Function>(results, params, "ROIAlign"); function = std::make_shared<ngraph::Function>(results, params, "ROIAlign");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -63,6 +63,8 @@ protected:
} }
selectedType.push_back('_'); selectedType.push_back('_');
selectedType += netPrecision.name(); selectedType += netPrecision.name();
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -67,6 +67,7 @@ protected:
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), config.axis); const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), config.axis);
function = makeNgraphFunction(ngPrc, params, softMax, "SoftMax"); function = makeNgraphFunction(ngPrc, params, softMax, "SoftMax");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -60,6 +60,7 @@ protected:
s2b->get_rt_info() = getCPUInfo(); s2b->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(s2b)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(s2b)};
function = std::make_shared<ngraph::Function>(results, params, "SpaceToBatch"); function = std::make_shared<ngraph::Function>(results, params, "SpaceToBatch");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -55,6 +55,7 @@ protected:
d2s->get_rt_info() = getCPUInfo(); d2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)};
function = std::make_shared<ngraph::Function>(results, params, "SpaceToDepth"); function = std::make_shared<ngraph::Function>(results, params, "SpaceToDepth");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -79,6 +79,7 @@ protected:
} }
split->get_rt_info() = getCPUInfo(); split->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(results, params, "split"); function = std::make_shared<ngraph::Function>(results, params, "split");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -71,6 +71,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(ss)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(ss)};
function = std::make_shared<ngraph::Function>(results, params, "StridedSlice"); function = std::make_shared<ngraph::Function>(results, params, "StridedSlice");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -67,6 +67,7 @@ protected:
transpose->get_rt_info() = getCPUInfo(); transpose->get_rt_info() = getCPUInfo();
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(transpose)}; const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(transpose)};
function = std::make_shared<ngraph::Function>(results, params, "Transpose"); function = std::make_shared<ngraph::Function>(results, params, "Transpose");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -35,6 +35,7 @@ public:
auto gather = std::make_shared<ngraph::opset3::Gather>(paramOuts[0], indicesNode, axisNode); auto gather = std::make_shared<ngraph::opset3::Gather>(paramOuts[0], indicesNode, axisNode);
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(gather)}; ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(gather)};
function = std::make_shared<ngraph::Function>(results, params, "gather"); function = std::make_shared<ngraph::Function>(results, params, "gather");
functionRefs = ngraph::clone_function(*function);
} }
std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> CalculateRefs() override { std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> CalculateRefs() override {
// Convert the second input constant precision to i64 to run the reference function // Convert the second input constant precision to i64 to run the reference function

View File

@ -42,6 +42,7 @@ public:
ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(concat)}; ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(concat)};
function = std::make_shared<ngraph::Function>(results, inputParams, "ConcatConstantInPlace"); function = std::make_shared<ngraph::Function>(results, inputParams, "ConcatConstantInPlace");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -75,6 +75,7 @@ protected:
} }
function = std::make_shared<ngraph::Function>(results, inputParams, "Conv3dReshape"); function = std::make_shared<ngraph::Function>(results, inputParams, "Conv3dReshape");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -109,6 +109,7 @@ void ConvConcatSubgraphTest::SetUp() {
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(concat)}; ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(concat)};
function = std::make_shared<ngraph::Function>(results, inputParams, "convolutionConcat"); function = std::make_shared<ngraph::Function>(results, inputParams, "convolutionConcat");
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConvConcatSubgraphTest, CompareWithRefs) { TEST_P(ConvConcatSubgraphTest, CompareWithRefs) {

View File

@ -59,6 +59,7 @@ protected:
} }
function = makeNgraphFunction(element::f32, inputParams, pooling, "ConvPoolActiv"); function = makeNgraphFunction(element::f32, inputParams, pooling, "ConvPoolActiv");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -105,6 +105,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(eltwiseOps[eltwiseOps.size() - 1])}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(eltwiseOps[eltwiseOps.size() - 1])};
function = std::make_shared<ngraph::Function>(results, ngraphParam, "eltwise_chain"); function = std::make_shared<ngraph::Function>(results, ngraphParam, "eltwise_chain");
} }
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -29,6 +29,7 @@ void FuseMulAddAndEwSimpleTest::SetUp() {
std::tie(inputShape, inPrec) = this->GetParam(); std::tie(inputShape, inPrec) = this->GetParam();
CreateGraph(); CreateGraph();
functionRefs = ngraph::clone_function(*function);
} }
const auto mulAddAndEwSimpleCommonParams = ::testing::Combine( const auto mulAddAndEwSimpleCommonParams = ::testing::Combine(

View File

@ -76,6 +76,7 @@ protected:
quantizeIntervals[3]); quantizeIntervals[3]);
ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(quantize)}; ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(quantize)};
function = std::make_shared<ngraph::Function>(results, ngraph::ParameterVector{param}, "FuseScaleShiftAndQuantize"); function = std::make_shared<ngraph::Function>(results, ngraph::ParameterVector{param}, "FuseScaleShiftAndQuantize");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -48,6 +48,7 @@ void FuseTransposeAndReorderTest::SetUp() {
std::tie(inputShape, inPrec) = this->GetParam(); std::tie(inputShape, inPrec) = this->GetParam();
CreateGraph(); CreateGraph();
functionRefs = ngraph::clone_function(*function);
} }
const auto fuseTransposeAndReorderCommonParams = ::testing::Combine( const auto fuseTransposeAndReorderCommonParams = ::testing::Combine(

View File

@ -33,6 +33,7 @@ protected:
auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType); auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise"); function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -36,6 +36,7 @@ protected:
NodeVector results{postOpCandidate, secondConsumpt}; NodeVector results{postOpCandidate, secondConsumpt};
function = std::make_shared<ngraph::Function>(results, inputParams, "NotFusedConvSimpleOp"); function = std::make_shared<ngraph::Function>(results, inputParams, "NotFusedConvSimpleOp");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -60,6 +60,7 @@ protected:
auto matMul = builder::makeMatMul(reshape, matrixB, false, transpB); auto matMul = builder::makeMatMul(reshape, matrixB, false, transpB);
function = makeNgraphFunction(element::f32, inputParams, matMul, "ReshapeFC"); function = makeNgraphFunction(element::f32, inputParams, matMul, "ReshapeFC");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -29,6 +29,7 @@ protected:
NodeVector results{add1, add2}; NodeVector results{add1, add2};
function = std::make_shared<ngraph::Function>(results, inputParams, "TileWithTwoOutputEdges"); function = std::make_shared<ngraph::Function>(results, inputParams, "TileWithTwoOutputEdges");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -131,6 +131,7 @@ protected:
auto relu2 = std::make_shared<ngraph::opset1::Relu>(conv); auto relu2 = std::make_shared<ngraph::opset1::Relu>(conv);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu2)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu2)};
function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork"); function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork");
functionRefs = ngraph::clone_function(*function);
} }
private: private:

View File

@ -96,6 +96,7 @@ protected:
relu->add_control_dependency(mem_w); relu->add_control_dependency(mem_w);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu)};
function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork"); function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork");
functionRefs = ngraph::clone_function(*function);
} }
private: private:

View File

@ -72,6 +72,7 @@ class Eltwise4dBroadcast : public testing::WithParamInterface<eltwiseParams>,
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dBroadcast"); function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dBroadcast");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -120,6 +121,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) };
function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dMultipleInput"); function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dMultipleInput");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -90,6 +90,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)};
function = std::make_shared<ngraph::Function>(results, inputVector, "ActMaxpoolReordering"); function = std::make_shared<ngraph::Function>(results, inputVector, "ActMaxpoolReordering");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -64,6 +64,7 @@ protected:
auto add = std::make_shared<ngraph::opset1::Add>(fakeQuantize1, fakeQuantize2); auto add = std::make_shared<ngraph::opset1::Add>(fakeQuantize1, fakeQuantize2);
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(add)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(add)};
function = std::make_shared<ngraph::Function>(results, params, "BroadcastConstWithFq"); function = std::make_shared<ngraph::Function>(results, params, "BroadcastConstWithFq");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -118,6 +118,7 @@ protected:
auto result = std::make_shared<Result>(lastOp); auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input}); function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -200,6 +200,7 @@ protected:
auto result = std::make_shared<Result>(lastOp); auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input}); function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -199,6 +199,7 @@ protected:
auto result = std::make_shared<Result>(lastOp); auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input}); function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -56,6 +56,7 @@ protected:
auto mul = ngraph::builder::makeEltwise(params[0], const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY); auto mul = ngraph::builder::makeEltwise(params[0], const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
function = std::make_shared<ngraph::Function>(mul, params, "EltwiseSplitOverChannelsPassTest"); function = std::make_shared<ngraph::Function>(mul, params, "EltwiseSplitOverChannelsPassTest");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -87,6 +87,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reluFQNode) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reluFQNode) };
function = std::make_shared<ngraph::Function>(results, inputVector, "FQActivation"); function = std::make_shared<ngraph::Function>(results, inputVector, "FQActivation");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -99,6 +99,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset7::Result>(add3)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset7::Result>(add3)};
function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights"); function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -41,6 +41,7 @@ protected:
auto mul3 = ngraph::builder::makeEltwise(mul2, fake3, ngraph::helpers::EltwiseTypes::ADD); auto mul3 = ngraph::builder::makeEltwise(mul2, fake3, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ngraph::opset7::Result>(mul3); auto result = std::make_shared<ngraph::opset7::Result>(mul3);
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "fq_fusion_with_sigmoid"); function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "fq_fusion_with_sigmoid");
functionRefs = ngraph::clone_function(*function);
} }
public: public:
static std::string getTestCaseName(const testing::TestParamInfo<fqFusionWithSigmoidParams> &obj) { static std::string getTestCaseName(const testing::TestParamInfo<fqFusionWithSigmoidParams> &obj) {

View File

@ -104,6 +104,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)};
function = std::make_shared<ngraph::Function>(results, inputVector, "FQMaxPoolReorder"); function = std::make_shared<ngraph::Function>(results, inputVector, "FQMaxPoolReorder");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -89,6 +89,7 @@ protected:
results.push_back(std::make_shared<ngraph::opset8::Result>(reluFQNode)); results.push_back(std::make_shared<ngraph::opset8::Result>(reluFQNode));
} }
function = std::make_shared<ngraph::Function>(results, inputVector, "FQOutputsActivation"); function = std::make_shared<ngraph::Function>(results, inputVector, "FQOutputsActivation");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -77,6 +77,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(reshape2), ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(reshape2),
std::make_shared<ngraph::opset8::Result>(reshape3)}; std::make_shared<ngraph::opset8::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights"); function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -80,6 +80,7 @@ protected:
results.push_back(std::make_shared<ngraph::opset1::Result>(relu)); results.push_back(std::make_shared<ngraph::opset1::Result>(relu));
} }
function = std::make_shared<ngraph::Function>(results, params, "InsertCopyBeforeSelfConcat"); function = std::make_shared<ngraph::Function>(results, params, "InsertCopyBeforeSelfConcat");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -91,6 +91,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(matmul)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(matmul)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBeforeMatmul"); function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBeforeMatmul");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -100,6 +100,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs"); function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -178,6 +179,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)}; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs"); function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -75,6 +75,7 @@ protected:
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
std::tie(netPrecision, configuration, targetDevice) = this->GetParam(); std::tie(netPrecision, configuration, targetDevice) = this->GetParam();
function = T::createTopology(netPrecision); function = T::createTopology(netPrecision);
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -102,6 +102,7 @@ class RemovePermutationsNHWCToNCHWPassTest : public testing::WithParamInterface<
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass"); function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -145,6 +146,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(permute2) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(permute2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass4DOutput"); function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass4DOutput");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -240,6 +242,7 @@ class RemovePermutationsWithPoolAndActTest : public testing::WithParamInterface<
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass"); function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -330,6 +333,7 @@ class RemovePermutationsWithTwoConvTest : public testing::WithParamInterface<rem
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass"); function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
} }
}; };
@ -428,6 +432,7 @@ class RemovePermutationsWithEltwiseTest : public testing::WithParamInterface<rem
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) }; ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass"); function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -256,6 +256,7 @@ protected:
break; break;
} }
} }
functionRefs = ngraph::clone_function(*function);
} }
}; };

View File

@ -69,6 +69,7 @@ TEST_P(TrivialLoopTest, PassThroughBody) {
function = std::make_shared<ngraph::Function>( function = std::make_shared<ngraph::Function>(
ngraph::OutputVector {loop}, ngraph::OutputVector {loop},
ngraph::ParameterVector {start}); ngraph::ParameterVector {start});
functionRefs = ngraph::clone_function(*function);
// Precalculated ref blobs // Precalculated ref blobs
auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)}); auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)});
@ -113,6 +114,7 @@ TEST_P(TrivialLoopTest, UnusedInputBody) {
function = std::make_shared<ngraph::Function>( function = std::make_shared<ngraph::Function>(
ngraph::OutputVector {loop}, ngraph::OutputVector {loop},
ngraph::ParameterVector {start}); ngraph::ParameterVector {start});
functionRefs = ngraph::clone_function(*function);
// Precalculated ref blobs // Precalculated ref blobs
auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)}); auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)});

View File

@ -141,6 +141,7 @@ void LoadNetworkCacheTestBase::SetUp() {
} catch (...) { } catch (...) {
GTEST_SKIP(); GTEST_SKIP();
} }
functionRefs = ngraph::clone_function(*function);
std::stringstream ss; std::stringstream ss;
auto hash = std::hash<std::string>()(GetTestName()); auto hash = std::hash<std::string>()(GetTestName());

View File

@ -61,18 +61,21 @@ void DetectNetworkBatch::LoadNetwork() {
TEST_P(DetectNetworkBatch, InferWithOneInput) { TEST_P(DetectNetworkBatch, InferWithOneInput) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = ngraph::builder::subgraph::makeSplitConvConcat(); function = ngraph::builder::subgraph::makeSplitConvConcat();
functionRefs = ngraph::clone_function(*function);
Run(); Run();
}; };
TEST_P(DetectNetworkBatch, InferWithMultipleInputs_DiffDims) { TEST_P(DetectNetworkBatch, InferWithMultipleInputs_DiffDims) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = makeNNWithMultipleInputsDiffDims(); function = makeNNWithMultipleInputsDiffDims();
functionRefs = ngraph::clone_function(*function);
Run(); Run();
}; };
TEST_P(DetectNetworkBatch, InferWithMultipleInputs_SameDims) { TEST_P(DetectNetworkBatch, InferWithMultipleInputs_SameDims) {
SKIP_IF_CURRENT_TEST_IS_DISABLED() SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = makeNNWithMultipleInputsSameDims(); function = makeNNWithMultipleInputsSameDims();
functionRefs = ngraph::clone_function(*function);
Run(); Run();
}; };

View File

@ -71,6 +71,7 @@ namespace ConfigurationTestsDefinitions {
inputs.push_back(blob); inputs.push_back(blob);
} }
reference_inputs.push_back(inputs); reference_inputs.push_back(inputs);
functionRefs = ngraph::clone_function(*function);
reference_outputs.push_back(CalculateRefs()); reference_outputs.push_back(CalculateRefs());
} }

View File

@ -91,6 +91,7 @@ void ProposalBehTest::SetUp() {
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(proposal)}; ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(proposal)};
function = std::make_shared<ngraph::Function>(results, params, "proposal"); function = std::make_shared<ngraph::Function>(results, params, "proposal");
functionRefs = ngraph::clone_function(*function);
} }
void ProposalBehTest::Run() { void ProposalBehTest::Run() {

View File

@ -106,6 +106,7 @@ void SetBlobTest::SetUp() {
auto cumSum = std::dynamic_pointer_cast<ngraph::opset4::CumSum>(ngraph::builder::makeCumSum(paramOuts[0], axisNode, false, false)); auto cumSum = std::dynamic_pointer_cast<ngraph::opset4::CumSum>(ngraph::builder::makeCumSum(paramOuts[0], axisNode, false, false));
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(cumSum)}; ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(cumSum)};
function = std::make_shared<ngraph::Function>(results, params, "InferSetBlob"); function = std::make_shared<ngraph::Function>(results, params, "InferSetBlob");
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(SetBlobTest, CompareWithRefs) { TEST_P(SetBlobTest, CompareWithRefs) {

View File

@ -21,6 +21,7 @@ std::string MultipleAllocations::getTestCaseName(const testing::TestParamInfo<Mu
void MultipleAllocations::SetUp() { void MultipleAllocations::SetUp() {
std::tie(targetDevice, m_allocationsCount) = this->GetParam(); std::tie(targetDevice, m_allocationsCount) = this->GetParam();
function = ngraph::builder::subgraph::makeSplitConvConcat(); function = ngraph::builder::subgraph::makeSplitConvConcat();
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(MultipleAllocations, InferWorksCorrectAfterAllocations) { TEST_P(MultipleAllocations, InferWorksCorrectAfterAllocations) {

View File

@ -14,6 +14,7 @@ void QueryNetworkTest::SetUp() {
auto& param = GetParam(); auto& param = GetParam();
targetDevice = std::get<Plugin>(param); targetDevice = std::get<Plugin>(param);
function = std::get<Function>(param); function = std::get<Function>(param);
functionRefs = ngraph::clone_function(*function);
cnnNetwork = InferenceEngine::CNNNetwork{function}; cnnNetwork = InferenceEngine::CNNNetwork{function};
} }

View File

@ -63,6 +63,7 @@ void AddTransformation::SetUp() {
param.fakeQuantize1, param.fakeQuantize2); param.fakeQuantize1, param.fakeQuantize2);
ngraph::pass::InitNodeInfo().run_on_function(function); ngraph::pass::InitNodeInfo().run_on_function(function);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(AddTransformation, CompareWithRefImpl) { TEST_P(AddTransformation, CompareWithRefImpl) {

View File

@ -41,6 +41,7 @@ void ClampTransformation::SetUp() {
param.fakeQuantize, param.fakeQuantize,
param.clampLowConst, param.clampLowConst,
param.clampHighConst); param.clampHighConst);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ClampTransformation, CompareWithRefImpl) { TEST_P(ClampTransformation, CompareWithRefImpl) {

View File

@ -52,6 +52,7 @@ void ConcatTransformation::SetUp() {
inputShape, inputShape,
testValues.fqOnData1, testValues.fqOnData1,
testValues.fqOnData2); testValues.fqOnData2);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatTransformation, CompareWithRefImpl) { TEST_P(ConcatTransformation, CompareWithRefImpl) {

View File

@ -54,6 +54,7 @@ void ConcatWithChildAndOutputTransformation::SetUp() {
function = ngraph::builder::subgraph::ConcatFunction::getOriginalWithChildAndOutput( function = ngraph::builder::subgraph::ConcatFunction::getOriginalWithChildAndOutput(
netPrecision, inputShapes, param.fqOnData1, param.fqOnData2); netPrecision, inputShapes, param.fqOnData1, param.fqOnData2);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatWithChildAndOutputTransformation, CompareWithRefImpl) { TEST_P(ConcatWithChildAndOutputTransformation, CompareWithRefImpl) {

View File

@ -56,6 +56,7 @@ void ConcatWithDifferentChildrenTransformation::SetUp() {
function = ngraph::builder::subgraph::ConcatFunction::getOriginalWithDifferentPrecisionOnChildren( function = ngraph::builder::subgraph::ConcatFunction::getOriginalWithDifferentPrecisionOnChildren(
netPrecision, inputShapes, param.axis, param.fqOnData1, param.fqOnData2); netPrecision, inputShapes, param.axis, param.fqOnData1, param.fqOnData2);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatWithDifferentChildrenTransformation, CompareWithRefImpl) { TEST_P(ConcatWithDifferentChildrenTransformation, CompareWithRefImpl) {

View File

@ -72,6 +72,7 @@ void ConcatWithIntermediateTransformation::SetUp() {
transparentIntermediate, transparentIntermediate,
{ 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f} }, { 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f} },
{ 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f / 2.f} }); { 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f / 2.f} });
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatWithIntermediateTransformation, CompareWithRefImpl) { TEST_P(ConcatWithIntermediateTransformation, CompareWithRefImpl) {

View File

@ -55,6 +55,7 @@ void ConcatWithNeighborsGraphTransformation::SetUp() {
{ 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f / 3.f} }, { 256ul, ngraph::Shape({}), {0.f}, {2.55f}, {0.f}, {2.55f / 3.f} },
"concat", "concat",
""); "");
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatWithNeighborsGraphTransformation, CompareWithRefImpl) { TEST_P(ConcatWithNeighborsGraphTransformation, CompareWithRefImpl) {

View File

@ -65,6 +65,7 @@ void ConcatWithSplitTransformation::SetUp() {
param.fqOnData1, param.fqOnData1,
param.fqOnData2, param.fqOnData2,
true); true);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConcatWithSplitTransformation, CompareWithRefImpl) { TEST_P(ConcatWithSplitTransformation, CompareWithRefImpl) {

View File

@ -65,6 +65,7 @@ void ConvolutionBackpropDataTransformation::SetUp() {
outputShape, outputShape,
param.fakeQuantizeOnData, param.fakeQuantizeOnData,
weights); weights);
functionRefs = ngraph::clone_function(*function);
} }
void ConvolutionBackpropDataTransformation::Run() { void ConvolutionBackpropDataTransformation::Run() {

View File

@ -53,6 +53,7 @@ void ConvolutionQDqTransformation::SetUp() {
param.convertOnWeights, param.convertOnWeights,
param.dequantizationOnWeights, param.dequantizationOnWeights,
{}); {});
functionRefs = ngraph::clone_function(*function);
} }
void ConvolutionQDqTransformation::Run() { void ConvolutionQDqTransformation::Run() {

View File

@ -50,6 +50,7 @@ void ConvolutionTransformation::SetUp() {
// TODO: pass from test parameters // TODO: pass from test parameters
param.fakeQuantizeOnData, param.fakeQuantizeOnData,
param.fakeQuantizeOnWeights); param.fakeQuantizeOnWeights);
functionRefs = ngraph::clone_function(*function);
} }
void ConvolutionTransformation::Run() { void ConvolutionTransformation::Run() {

View File

@ -51,6 +51,7 @@ void ConvolutionWIthIncorrectWeightsTransformation::SetUp() {
param.fakeQuantizeOnWeights, param.fakeQuantizeOnWeights,
param.fakeQuantizeOnData, param.fakeQuantizeOnData,
param.isCorrect); param.isCorrect);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(ConvolutionWIthIncorrectWeightsTransformation, CompareWithRefImpl) { TEST_P(ConvolutionWIthIncorrectWeightsTransformation, CompareWithRefImpl) {

View File

@ -64,6 +64,7 @@ void DepthToSpaceTransformation::SetUp() {
} }
function = ngraph::builder::subgraph::DepthToSpaceFunction::getOriginal(precision, inputShape, mode, blockSize); function = ngraph::builder::subgraph::DepthToSpaceFunction::getOriginal(precision, inputShape, mode, blockSize);
functionRefs = ngraph::clone_function(*function);
} }
TEST_P(DepthToSpaceTransformation, CompareWithRefImpl) { TEST_P(DepthToSpaceTransformation, CompareWithRefImpl) {

Some files were not shown because too many files have changed in this diff Show More