[CPU Plugin][Func Test] Upgrade ConvConcatSubgraphTest to API 2.0 (#21301)
* [CPU Plugin][Func Test] Upgrade ConvConcatSubgraphTest to API 2.0 Signed-off-by: Zhai, Xuejun <xuejun.zhai@intel.com> * [CPU Plugin][Func Test] Upgrade ConvConcatSubgraphTest, remove ngraph namespace Signed-off-by: Zhai, Xuejun <xuejun.zhai@intel.com> --------- Signed-off-by: Zhai, Xuejun <xuejun.zhai@intel.com>
This commit is contained in:
parent
53baeacc2e
commit
9320fa7c86
@ -9,34 +9,35 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "test_utils/cpu_test_utils.hpp"
|
#include "test_utils/cpu_test_utils.hpp"
|
||||||
#include "shared_test_classes/base/layer_test_utils.hpp"
|
#include "shared_test_classes/base/ov_subgraph.hpp"
|
||||||
#include "ov_models/utils/ov_helpers.hpp"
|
#include "ov_models/utils/ov_helpers.hpp"
|
||||||
#include "ov_models/builders.hpp"
|
#include "ov_models/builders.hpp"
|
||||||
|
|
||||||
using namespace CPUTestUtils;
|
using namespace CPUTestUtils;
|
||||||
|
|
||||||
namespace SubgraphTestsDefinitions {
|
namespace ov {
|
||||||
|
namespace test {
|
||||||
|
|
||||||
using commonConvParams = std::tuple<
|
using commonConvParams = std::tuple<std::vector<size_t>, // Kernel size
|
||||||
InferenceEngine::SizeVector, // Kernel size
|
std::vector<size_t>, // Strides
|
||||||
InferenceEngine::SizeVector, // Strides
|
std::vector<ptrdiff_t>, // Pad begin
|
||||||
std::vector<ptrdiff_t>, // Pad begin
|
std::vector<ptrdiff_t>, // Pad end
|
||||||
std::vector<ptrdiff_t>, // Pad end
|
std::vector<size_t>, // Dilation
|
||||||
InferenceEngine::SizeVector, // Dilation
|
size_t, // Num out channels
|
||||||
size_t, // Num out channels
|
ov::op::PadType, // Padding type
|
||||||
ngraph::op::PadType, // Padding type
|
size_t // Number of groups
|
||||||
size_t // Number of groups
|
>;
|
||||||
>;
|
|
||||||
|
|
||||||
using convConcatCPUParams = std::tuple<
|
using convConcatCPUParams = std::tuple<nodeType, // Node convolution type
|
||||||
nodeType, // Ngraph convolution type
|
commonConvParams, // Convolution params
|
||||||
commonConvParams, // Convolution params
|
CPUTestUtils::CPUSpecificParams, // CPU runtime params
|
||||||
CPUTestUtils::CPUSpecificParams, // CPU runtime params
|
ov::Shape, // Input shapes
|
||||||
InferenceEngine::SizeVector, // Input shapes
|
int // Axis for concat
|
||||||
int // Axis for concat
|
>;
|
||||||
>;
|
|
||||||
|
|
||||||
class ConvConcatSubgraphTest : public testing::WithParamInterface<convConcatCPUParams>, public CPUTestsBase, virtual public LayerTestsUtils::LayerTestsCommon {
|
class ConvConcatSubgraphTest : public testing::WithParamInterface<convConcatCPUParams>,
|
||||||
|
public CPUTestsBase,
|
||||||
|
virtual public SubgraphBaseStaticTest {
|
||||||
public:
|
public:
|
||||||
static std::string getTestCaseName(testing::TestParamInfo<convConcatCPUParams> obj);
|
static std::string getTestCaseName(testing::TestParamInfo<convConcatCPUParams> obj);
|
||||||
|
|
||||||
@ -45,4 +46,5 @@ protected:
|
|||||||
std::string pluginTypeNode;
|
std::string pluginTypeNode;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace SubgraphTestsDefinitions
|
} // namespace test
|
||||||
|
} // namespace ov
|
||||||
|
@ -2,30 +2,36 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include "subgraph_tests/include/conv_concat.hpp"
|
||||||
|
|
||||||
|
#include "common_test_utils/node_builders/convolution.hpp"
|
||||||
|
#include "common_test_utils/node_builders/convolution_backprop_data.hpp"
|
||||||
|
#include "common_test_utils/node_builders/group_convolution.hpp"
|
||||||
|
#include "common_test_utils/node_builders/group_convolution_backprop_data.hpp"
|
||||||
#include "test_utils/convolution_params.hpp"
|
#include "test_utils/convolution_params.hpp"
|
||||||
#include "test_utils/filter_cpu_info.hpp"
|
#include "test_utils/filter_cpu_info.hpp"
|
||||||
#include "subgraph_tests/include/conv_concat.hpp"
|
|
||||||
|
|
||||||
using namespace InferenceEngine;
|
using namespace InferenceEngine;
|
||||||
using namespace CPUTestUtils;
|
using namespace CPUTestUtils;
|
||||||
|
|
||||||
namespace SubgraphTestsDefinitions {
|
namespace ov {
|
||||||
|
namespace test {
|
||||||
|
|
||||||
std::string ConvConcatSubgraphTest::getTestCaseName(testing::TestParamInfo<convConcatCPUParams> obj) {
|
std::string ConvConcatSubgraphTest::getTestCaseName(testing::TestParamInfo<convConcatCPUParams> obj) {
|
||||||
std::ostringstream result;
|
std::ostringstream result;
|
||||||
nodeType type;
|
nodeType type;
|
||||||
commonConvParams convParams;
|
commonConvParams convParams;
|
||||||
CPUSpecificParams cpuParams;
|
CPUSpecificParams cpuParams;
|
||||||
SizeVector inputShapes;
|
ov::Shape inputShapes;
|
||||||
int axis;
|
int axis;
|
||||||
std::tie(type, convParams, cpuParams, inputShapes, axis) = obj.param;
|
std::tie(type, convParams, cpuParams, inputShapes, axis) = obj.param;
|
||||||
|
|
||||||
result << "Type=" << nodeType2str(type) << "_";
|
result << "Type=" << nodeType2str(type) << "_";
|
||||||
|
|
||||||
SizeVector kernelSize, strides, dilation;
|
std::vector<size_t> kernelSize, strides, dilation;
|
||||||
std::vector<ptrdiff_t> padBegin, padEnd;
|
std::vector<ptrdiff_t> padBegin, padEnd;
|
||||||
size_t numOutChannels, numOfGroups;
|
size_t numOutChannels, numOfGroups;
|
||||||
ngraph::op::PadType paddingType;
|
ov::op::PadType paddingType;
|
||||||
std::tie(kernelSize, strides, padBegin, padEnd, dilation, numOutChannels, paddingType, numOfGroups) = convParams;
|
std::tie(kernelSize, strides, padBegin, padEnd, dilation, numOutChannels, paddingType, numOfGroups) = convParams;
|
||||||
|
|
||||||
result << "IS=" << ov::test::utils::vec2str(inputShapes) << "_";
|
result << "IS=" << ov::test::utils::vec2str(inputShapes) << "_";
|
||||||
@ -50,52 +56,81 @@ void ConvConcatSubgraphTest::SetUp() {
|
|||||||
nodeType type;
|
nodeType type;
|
||||||
commonConvParams convParams;
|
commonConvParams convParams;
|
||||||
CPUSpecificParams cpuParams;
|
CPUSpecificParams cpuParams;
|
||||||
SizeVector inputShapes;
|
ov::Shape inputShapes;
|
||||||
int axis;
|
int axis;
|
||||||
|
|
||||||
std::tie(type, convParams, cpuParams, inputShapes, axis) = this->GetParam();
|
std::tie(type, convParams, cpuParams, inputShapes, axis) = this->GetParam();
|
||||||
pluginTypeNode = nodeType2PluginType(type);
|
pluginTypeNode = nodeType2PluginType(type);
|
||||||
SizeVector kernelSize, strides, dilation;
|
std::vector<size_t> kernelSize, strides, dilation;
|
||||||
std::vector<ptrdiff_t> padBegin, padEnd;
|
std::vector<ptrdiff_t> padBegin, padEnd;
|
||||||
size_t numOutChannels, numOfGroups;
|
size_t numOutChannels, numOfGroups;
|
||||||
ngraph::op::PadType paddingType;
|
ov::op::PadType paddingType;
|
||||||
|
|
||||||
std::tie(kernelSize, strides, padBegin, padEnd, dilation, numOutChannels, paddingType, numOfGroups) = convParams;
|
std::tie(kernelSize, strides, padBegin, padEnd, dilation, numOutChannels, paddingType, numOfGroups) = convParams;
|
||||||
std::tie(inFmts, outFmts, priority, selectedType) = cpuParams;
|
std::tie(inFmts, outFmts, priority, selectedType) = cpuParams;
|
||||||
|
|
||||||
selectedType += "_FP32";
|
selectedType += "_f32";
|
||||||
|
|
||||||
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape(inputShapes)),
|
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, inputShapes),
|
||||||
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape(inputShapes))};
|
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, inputShapes)};
|
||||||
|
|
||||||
std::vector<std::shared_ptr<ngraph::Node>> convolutionNodes(2);
|
std::vector<std::shared_ptr<ov::Node>> convolutionNodes(2);
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case nodeType::convolution : {
|
case nodeType::convolution : {
|
||||||
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
||||||
convolutionNodes[conv] = ngraph::builder::makeConvolution(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
|
convolutionNodes[conv] = ov::test::utils::make_convolution(inputParams[conv],
|
||||||
padEnd, dilation, paddingType, numOutChannels);
|
ov::element::f32,
|
||||||
|
kernelSize,
|
||||||
|
strides,
|
||||||
|
padBegin,
|
||||||
|
padEnd,
|
||||||
|
dilation,
|
||||||
|
paddingType,
|
||||||
|
numOutChannels);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case nodeType::convolutionBackpropData : {
|
case nodeType::convolutionBackpropData : {
|
||||||
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
||||||
convolutionNodes[conv] = ngraph::builder::makeConvolutionBackpropData(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
|
convolutionNodes[conv] = ov::test::utils::make_convolution_backprop_data(inputParams[conv],
|
||||||
padEnd, dilation, paddingType, numOutChannels);
|
ov::element::f32,
|
||||||
|
kernelSize,
|
||||||
|
strides,
|
||||||
|
padBegin,
|
||||||
|
padEnd,
|
||||||
|
dilation,
|
||||||
|
paddingType,
|
||||||
|
numOutChannels);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case nodeType::groupConvolution : {
|
case nodeType::groupConvolution : {
|
||||||
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
||||||
convolutionNodes[conv] = ngraph::builder::makeGroupConvolution(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
|
convolutionNodes[conv] = ov::test::utils::make_group_convolution(inputParams[conv],
|
||||||
padEnd, dilation, paddingType, numOutChannels, numOfGroups);
|
ov::element::f32,
|
||||||
|
kernelSize,
|
||||||
|
strides,
|
||||||
|
padBegin,
|
||||||
|
padEnd,
|
||||||
|
dilation,
|
||||||
|
paddingType,
|
||||||
|
numOutChannels,
|
||||||
|
numOfGroups);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case nodeType::groupConvolutionBackpropData : {
|
case nodeType::groupConvolutionBackpropData : {
|
||||||
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
|
||||||
convolutionNodes[conv] = ngraph::builder::makeGroupConvolutionBackpropData(inputParams[conv], ngraph::element::f32, kernelSize,
|
convolutionNodes[conv] = ov::test::utils::make_group_convolution_backprop_data(inputParams[conv],
|
||||||
strides, padBegin, padEnd, dilation, paddingType,
|
ov::element::f32,
|
||||||
numOutChannels, numOfGroups);
|
kernelSize,
|
||||||
|
strides,
|
||||||
|
padBegin,
|
||||||
|
padEnd,
|
||||||
|
dilation,
|
||||||
|
paddingType,
|
||||||
|
numOutChannels,
|
||||||
|
numOfGroups);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -109,36 +144,36 @@ void ConvConcatSubgraphTest::SetUp() {
|
|||||||
|
|
||||||
auto concat = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{convolutionNodes[0], convolutionNodes[1]}, axis);
|
auto concat = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{convolutionNodes[0], convolutionNodes[1]}, axis);
|
||||||
|
|
||||||
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(concat)};
|
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(concat)};
|
||||||
function = std::make_shared<ngraph::Function>(results, inputParams, "convolutionConcat");
|
function = std::make_shared<ov::Model>(results, inputParams, "convolutionConcat");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_P(ConvConcatSubgraphTest, CompareWithRefs) {
|
TEST_P(ConvConcatSubgraphTest, CompareWithRefs) {
|
||||||
Run();
|
run();
|
||||||
|
|
||||||
CheckPluginRelatedResults(executableNetwork, pluginTypeNode);
|
CheckPluginRelatedResults(compiledModel, pluginTypeNode);
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ============= Common Convolution Params ============= */
|
/* ============= Common Convolution Params ============= */
|
||||||
const ngraph::op::PadType paddingType{ngraph::op::PadType::EXPLICIT};
|
const ov::op::PadType paddingType{ov::op::PadType::EXPLICIT};
|
||||||
const size_t numOutChannels{32};
|
const size_t numOutChannels{32};
|
||||||
const int axis{1};
|
const int axis{1};
|
||||||
|
|
||||||
const SizeVector inputShapes2D{1, 64, 16, 16};
|
const ov::Shape inputShapes2D{1, 64, 16, 16};
|
||||||
const SizeVector kernelSize2D{3, 3};
|
const std::vector<size_t> kernelSize2D{3, 3};
|
||||||
const SizeVector strides2D{2, 2};
|
const std::vector<size_t> strides2D{2, 2};
|
||||||
const std::vector<ptrdiff_t> padBegin2D{1, 1};
|
const std::vector<ptrdiff_t> padBegin2D{1, 1};
|
||||||
const std::vector<ptrdiff_t> padEnd2D{1, 1};
|
const std::vector<ptrdiff_t> padEnd2D{1, 1};
|
||||||
const SizeVector dilation2D{1, 1};
|
const std::vector<size_t> dilation2D{1, 1};
|
||||||
commonConvParams convParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, 1};
|
commonConvParams convParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, 1};
|
||||||
commonConvParams groupConvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, 2};
|
commonConvParams groupConvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, 2};
|
||||||
|
|
||||||
const SizeVector inputShapes3D{1, 64, 8, 16, 16};
|
const ov::Shape inputShapes3D{1, 64, 8, 16, 16};
|
||||||
const SizeVector kernelSize3D{3, 3, 3};
|
const std::vector<size_t> kernelSize3D{3, 3, 3};
|
||||||
const SizeVector strides3D{2, 2, 2};
|
const std::vector<size_t> strides3D{2, 2, 2};
|
||||||
const std::vector<ptrdiff_t> padBegin3D{1, 1, 1};
|
const std::vector<ptrdiff_t> padBegin3D{1, 1, 1};
|
||||||
const std::vector<ptrdiff_t> padEnd3D{1, 1, 1};
|
const std::vector<ptrdiff_t> padEnd3D{1, 1, 1};
|
||||||
const SizeVector dilation3D{1, 1, 1};
|
const std::vector<size_t> dilation3D{1, 1, 1};
|
||||||
commonConvParams convParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, 1};
|
commonConvParams convParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, 1};
|
||||||
commonConvParams groupConvParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, 2};
|
commonConvParams groupConvParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, 2};
|
||||||
|
|
||||||
@ -184,7 +219,7 @@ namespace GroupConvolutionBackpropDataDWConcat {
|
|||||||
|
|
||||||
/* ============= GroupConvolutionBackpropData (DW 2D) ============= */
|
/* ============= GroupConvolutionBackpropData (DW 2D) ============= */
|
||||||
commonConvParams dwDeconvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, numOutChannels};
|
commonConvParams dwDeconvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, numOutChannels};
|
||||||
const SizeVector inputShapesDW2D{1, 32, 16, 16};
|
const ov::Shape inputShapesDW2D{1, 32, 16, 16};
|
||||||
const std::vector<CPUSpecificParams> CPUParams2D = {
|
const std::vector<CPUSpecificParams> CPUParams2D = {
|
||||||
conv_sse42_dw_2D,
|
conv_sse42_dw_2D,
|
||||||
conv_avx2_dw_2D,
|
conv_avx2_dw_2D,
|
||||||
@ -207,7 +242,7 @@ namespace GroupConvolutionDWConcat {
|
|||||||
|
|
||||||
/* ============= GroupConvolution (DW 2D) ============= */
|
/* ============= GroupConvolution (DW 2D) ============= */
|
||||||
commonConvParams dwConvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, numOutChannels};
|
commonConvParams dwConvParams2D = commonConvParams{kernelSize2D, strides2D, padBegin2D, padEnd2D, dilation2D, numOutChannels, paddingType, numOutChannels};
|
||||||
const SizeVector inputShapesDW2D{1, 32, 16, 16};
|
const ov::Shape inputShapesDW2D{1, 32, 16, 16};
|
||||||
const std::vector<CPUSpecificParams> CPUParams2D = {
|
const std::vector<CPUSpecificParams> CPUParams2D = {
|
||||||
conv_sse42_dw_2D,
|
conv_sse42_dw_2D,
|
||||||
conv_avx2_dw_2D,
|
conv_avx2_dw_2D,
|
||||||
@ -226,7 +261,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_DWGroupConvolution2D, ConvConcatSubgraphTest, par
|
|||||||
|
|
||||||
/* ============= GroupConvolution (DW 3D) ============= */
|
/* ============= GroupConvolution (DW 3D) ============= */
|
||||||
commonConvParams dwConvParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, numOutChannels};
|
commonConvParams dwConvParams3D = commonConvParams{kernelSize3D, strides3D, padBegin3D, padEnd3D, dilation3D, numOutChannels, paddingType, numOutChannels};
|
||||||
const SizeVector inputShapesDW3D{1, 32, 8, 16, 16};
|
const ov::Shape inputShapesDW3D{1, 32, 8, 16, 16};
|
||||||
const std::vector<CPUSpecificParams> CPUParams3D = {
|
const std::vector<CPUSpecificParams> CPUParams3D = {
|
||||||
conv_sse42_dw_3D,
|
conv_sse42_dw_3D,
|
||||||
conv_avx2_dw_3D,
|
conv_avx2_dw_3D,
|
||||||
@ -404,4 +439,5 @@ INSTANTIATE_TEST_SUITE_P(smoke_GroupConvolutionBackpropData3D, ConvConcatSubgrap
|
|||||||
|
|
||||||
} // namespace GroupConvolutionBackpropDataConcat
|
} // namespace GroupConvolutionBackpropDataConcat
|
||||||
|
|
||||||
} // namespace SubgraphTestsDefinitions
|
} // namespace test
|
||||||
|
} // namespace ov
|
||||||
|
Loading…
Reference in New Issue
Block a user