Remove use of convertOps2Nodes() & convert2OutVect() (#20837)

This commit is contained in:
Vitaliy Urusovskij 2023-11-03 11:00:33 +04:00 committed by GitHub
parent caa81a0b3c
commit 0955faef93
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
87 changed files with 185 additions and 330 deletions

View File

@ -117,8 +117,6 @@ protected:
}
functionParams.front()->set_friendly_name("data");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(functionParams));
std::shared_ptr<ov::op::v3::Broadcast> broadcastOp;
if (mode == ov::op::BroadcastType::EXPLICIT) {
std::shared_ptr<ov::Node> targetShapeOp;
@ -133,19 +131,19 @@ protected:
} else {
axesMappingOp = functionParams.size() > 2 ? functionParams[2] : functionParams[1];
}
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
targetShapeOp,
axesMappingOp,
mode);
} else if (mode == ov::op::BroadcastType::NUMPY) {
if (isTargetShapeConst) {
auto targetShapeConst = ov::op::v0::Constant::create(ov::element::i64, {targetShapeRank}, targetShape);
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
targetShapeConst,
mode);
} else {
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
paramOuts[1],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
functionParams[1],
mode);
}
}

View File

@ -100,14 +100,12 @@ void MvnLayerCPUTest::SetUp() {
init_input_shapes({inputShapes});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto mvn = ngraph::builder::makeMVN(paramOuts[0], acrossChanels, normalizeVariance, eps);
auto mvn = ngraph::builder::makeMVN(params[0], acrossChanels, normalizeVariance, eps);
if (!axes.empty()) {
mvn = ngraph::builder::makeMVN(paramOuts[0], axes, normalizeVariance, eps);
mvn = ngraph::builder::makeMVN(params[0], axes, normalizeVariance, eps);
}
rel_threshold = 0.015f;

View File

@ -95,11 +95,8 @@ void ReduceCPULayerTest::SetUp() {
init_input_shapes(inputShapes);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> shapeAxes;
switch (opType) {
@ -116,7 +113,7 @@ void ReduceCPULayerTest::SetUp() {
auto reductionAxesNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape(shapeAxes), axes));
const auto reduce = ngraph::builder::makeReduce(paramOuts[0], reductionAxesNode, keepDims, reductionType);
const auto reduce = ngraph::builder::makeReduce(params[0], reductionAxesNode, keepDims, reductionType);
// hybrid layouts
if (inFmts.size() != 0 && outFmts.size() == 0) {

View File

@ -53,13 +53,10 @@ void SoftMaxLayerCPUTest::SetUp() {
selectedType = makeSelectedTypeStr(selectedType, inType);
init_input_shapes({config.inputShape});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), config.axis);
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(params.at(0), config.axis);
function = makeNgraphFunction(inType, params, softMax, "SoftMax");
}

View File

@ -200,12 +200,9 @@ protected:
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
ov::ParameterVector inputParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(ov::element::f32, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
auto convolutionNode = ngraph::builder::makeConvolution(paramOuts.front(), netType, kernel, stride, padBegin,
auto convolutionNode = ngraph::builder::makeConvolution(inputParams[0], netType, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels);
function = makeNgraphFunction(netType, inputParams, convolutionNode, "Convolution");

View File

@ -112,16 +112,14 @@ protected:
auto ngInPrec = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(inPrec);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngInPrec, shape));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset5::Parameter>(params));
auto il = builder::makeConstant(ngInPrec, ranges[0], rangesBounds[0], rangesBounds[0].empty());
auto ih = builder::makeConstant(ngInPrec, ranges[1], rangesBounds[1], rangesBounds[1].empty());
auto ol = builder::makeConstant(ngInPrec, ranges[2], rangesBounds[2], rangesBounds[2].empty());
auto oh = builder::makeConstant(ngInPrec, ranges[3], rangesBounds[3], rangesBounds[3].empty());
auto fq = std::make_shared<opset5::FakeQuantize>(paramOuts[0], il, ih, ol, oh, levels);
auto fq = std::make_shared<opset5::FakeQuantize>(params[0], il, ih, ol, oh, levels);
layerName = shouldBeDecomposed ? "" : "FakeQuantize";

View File

@ -105,13 +105,12 @@ protected:
params.push_back(std::make_shared<ov::op::v0::Parameter>(intInputsPrecision, inputDynamicShapes[2]));
params[2]->set_friendly_name("axis");
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
std::shared_ptr<ov::Node> gatherNode;
if (isAxisConstant) {
gatherNode = std::make_shared<ov::op::v8::Gather>(paramOuts[0], paramOuts[1],
gatherNode = std::make_shared<ov::op::v8::Gather>(params[0], params[1],
ov::op::v0::Constant::create(intInputsPrecision, ov::Shape({1}), { axis }), batchDims);
} else {
gatherNode = std::make_shared<ov::op::v8::Gather>(paramOuts[0], paramOuts[1], paramOuts[2], batchDims);
gatherNode = std::make_shared<ov::op::v8::Gather>(params[0], params[1], params[2], batchDims);
}
function = makeNgraphFunction(netPrecision, params, gatherNode, "GatherCPU");
@ -205,8 +204,7 @@ protected:
std::make_shared<ov::op::v0::Parameter>(netPrecision, inputDynamicShapes[0])
};
params[0]->set_friendly_name("data");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
std::shared_ptr<ov::Node> gatherNode = std::make_shared<ov::op::v8::Gather>(paramOuts[0],
std::shared_ptr<ov::Node> gatherNode = std::make_shared<ov::op::v8::Gather>(params[0],
ov::op::v0::Constant::create(intInputsPrecision, ov::Shape({indices.size()}), indices),
ov::op::v0::Constant::create(intInputsPrecision, ov::Shape({1}), { axis }), batchDims);

View File

@ -103,9 +103,8 @@ protected:
std::make_shared<ov::op::v0::Parameter>(gridPrecision, inputDynamicShapes[1])};
params[0]->set_friendly_name("data");
params[1]->set_friendly_name("grid");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
GridSample::Attributes attributes = {alignCorners, interpolateMode, paddingMode};
auto gridSampleNode = std::make_shared<GridSample>(paramOuts[0], paramOuts[1], attributes);
auto gridSampleNode = std::make_shared<GridSample>(params[0], params[1], attributes);
function = makeNgraphFunction(dataPrecision, params, gridSampleNode, "GridSampleCPU");
}

View File

@ -68,11 +68,10 @@ protected:
init_input_shapes({inputShape});
ov::ParameterVector paramsIn;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
paramsIn.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
const auto paramsOut = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(paramsIn));
const auto grn = std::make_shared<ngraph::opset1::GRN>(paramsOut[0], bias);
const auto grn = std::make_shared<ngraph::opset1::GRN>(paramsIn[0], bias);
const ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(grn)};
function = std::make_shared<ngraph::Function>(results, paramsIn, "Grn");
}

View File

@ -193,13 +193,11 @@ protected:
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType) = groupConvParams;
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto groupConv = std::dynamic_pointer_cast<ngraph::opset1::GroupConvolution>(
ngraph::builder::makeGroupConvolution(paramOuts[0], netType, kernel, stride, padBegin,
ngraph::builder::makeGroupConvolution(params[0], netType, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups));
function = makeNgraphFunction(netType, params, groupConv, "groupConvolution");
}

View File

@ -65,8 +65,7 @@ protected:
init_input_shapes(inputShapes);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, inputDynamicShapes.front())};
const auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
const auto logSoftmax = std::make_shared<ngraph::op::v5::LogSoftmax>(paramOuts[0], axis);
const auto logSoftmax = std::make_shared<ngraph::op::v5::LogSoftmax>(params[0], axis);
const ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(logSoftmax)};
function = std::make_shared<ngraph::Function>(results, params, "logSoftmax");
}

View File

@ -164,8 +164,7 @@ protected:
if (secondaryInputType == helpers::InputLayerType::PARAMETER) {
params.push_back(std::dynamic_pointer_cast<opset1::Parameter>(matrixB));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
auto matMul = builder::makeMatMul(paramOuts[0], matrixB, transpA, transpB);
auto matMul = builder::makeMatMul(params[0], matrixB, transpA, transpB);
function = makeNgraphFunction(netType, params, matMul, cpuNodeType);
checkFusingPosition = false;
}

View File

@ -190,12 +190,11 @@ protected:
selectedType = makeSelectedTypeStr(selectedType, element::i8);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(inType, inShapeA)};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
auto matrixB = builder::makeDynamicInputLayer(element::f32, helpers::InputLayerType::CONSTANT, inShapeB);
auto weiData = generateSparseVector(ngraph::shape_size(inShapeB.get_shape()), weiSparseRate);
auto matMul = makeMatMulRelaxed(paramOuts[0], inShapeB, weiType, transpA, transpB, weiData);
auto matMul = makeMatMulRelaxed(params[0], inShapeB, weiType, transpA, transpB, weiData);
function = makeNgraphFunction(element::f32, params, matMul, cpuNodeType);

View File

@ -137,10 +137,9 @@ protected:
params.push_back(depthParam);
depth = depthParam;
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto on_value_const = std::make_shared<ngraph::op::Constant>(outType, ngraph::Shape{ }, OnValue);
auto off_value_const = std::make_shared<ngraph::op::Constant>(outType, ngraph::Shape{ }, OffValue);
auto oneHot = std::make_shared<ngraph::opset5::OneHot>(paramOuts[0], depth, on_value_const, off_value_const, Axis);
auto oneHot = std::make_shared<ngraph::opset5::OneHot>(params[0], depth, on_value_const, off_value_const, Axis);
return makeNgraphFunction(ngraph::element::i32, params, oneHot, "OneHot");
}
void generateDepth() {

View File

@ -143,7 +143,6 @@ protected:
for (auto&& shape : {inputDynamicShapes[0], inputDynamicShapes[1], inputDynamicShapes[2]}) {
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngPrc, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
ngraph::op::ProposalAttrs attrs;
attrs.base_size = base_size;
@ -162,7 +161,7 @@ protected:
attrs.framework = framework;
attrs.infer_probs = true;
auto proposal = std::make_shared<opset4::Proposal>(paramOuts[0], paramOuts[1], paramOuts[2], attrs);
auto proposal = std::make_shared<opset4::Proposal>(params[0], params[1], params[2], attrs);
ngraph::ResultVector results{
std::make_shared<ngraph::opset1::Result>(proposal->output(0)),

View File

@ -203,13 +203,10 @@ protected:
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngPrc, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto roi_pooling = ngraph::builder::makeROIPooling(paramOuts[0], paramOuts[1], poolShape, spatial_scale, pool_method);
auto roi_pooling = ngraph::builder::makeROIPooling(params[0], params[1], poolShape, spatial_scale, pool_method);
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roi_pooling)};
function = makeNgraphFunction(ngPrc, params, roi_pooling, "ROIPooling");

View File

@ -66,8 +66,7 @@ protected:
auto param_node = std::make_shared<ov::op::v0::Parameter>(types[i], inputDynamicShapes[i]);
parameters.push_back(param_node);
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(parameters));
auto select = ngraph::builder::makeSelect(paramOuts, broadcast);
auto select = std::make_shared<ov::op::v1::Select>(parameters[0], parameters[1], parameters[2], broadcast);
function = makeNgraphFunction(precision, parameters, select, "Eltwise");
}

View File

@ -69,11 +69,10 @@ protected:
selectedType = makeSelectedTypeStr("ref", inType);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto shapeOf = std::make_shared<ngraph::opset3::ShapeOf>(paramOuts[0], ngraph::element::i32);
auto shapeOf = std::make_shared<ngraph::opset3::ShapeOf>(params[0], ngraph::element::i32);
function = makeNgraphFunction(netPrecision, params, shapeOf, "ShapeOf");
}

View File

@ -107,8 +107,7 @@ protected:
selectedType = std::string("ref_any_") + netPrecision.name();
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrec, inputDynamicShapes.front())};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
paramShape = {paramOuts[0].get_partial_shape().size()};
paramShape = {params[0]->get_partial_shape().size()};
std::shared_ptr<ov::Node> in2, in3, in4;
auto blockShapeParam = std::make_shared<ov::op::v0::Parameter>(ov::element::i64, paramShape);
@ -121,7 +120,7 @@ protected:
in4 = padsEndParam;
params.push_back(padsEndParam);
auto s2b = std::make_shared<ov::op::v1::SpaceToBatch>(paramOuts[0], in2, in3, in4);
auto s2b = std::make_shared<ov::op::v1::SpaceToBatch>(params[0], in2, in3, in4);
function = makeNgraphFunction(inType, params, s2b, "SpaceToBatchCPU");
}
};

View File

@ -71,12 +71,10 @@ protected:
init_input_shapes({inputShapes});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto split = std::dynamic_pointer_cast<ngraph::opset5::Split>(ngraph::builder::makeSplit(paramOuts[0],
auto split = std::dynamic_pointer_cast<ngraph::opset5::Split>(ngraph::builder::makeSplit(params[0],
netPrecision, numSplits, axis));
ngraph::ResultVector results;

View File

@ -99,13 +99,12 @@ protected:
}
functionParams.front()->set_friendly_name("data");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(functionParams));
std::shared_ptr<ov::Node> tileNode;
if (isRepeatsConst) {
tileNode = std::make_shared<ov::op::v0::Tile>(paramOuts[0],
tileNode = std::make_shared<ov::op::v0::Tile>(functionParams[0],
ov::op::v0::Constant::create(ov::element::i64, { repeatsData.size() }, repeatsData));
} else {
tileNode = std::make_shared<ov::op::v0::Tile>(paramOuts[0], paramOuts[1]);
tileNode = std::make_shared<ov::op::v0::Tile>(functionParams[0], functionParams[1]);
}
function = makeNgraphFunction(netPrecision, functionParams, tileNode, "CPUTile");
}

View File

@ -99,13 +99,12 @@ protected:
params.push_back(std::make_shared<ov::op::v0::Parameter>(dataPrecision, shape));
}
params[0]->set_friendly_name("data");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
std::shared_ptr<ov::Node> uniqueNode;
if (flattened) {
uniqueNode = std::make_shared<ov::op::v10::Unique>(paramOuts[0], sorted);
uniqueNode = std::make_shared<ov::op::v10::Unique>(params[0], sorted);
} else {
axis = std::get<1>(flatOrAxis);
uniqueNode = std::make_shared<ov::op::v10::Unique>(paramOuts[0],
uniqueNode = std::make_shared<ov::op::v10::Unique>(params[0],
ov::op::v0::Constant::create(ov::element::i64, ov::Shape({1}), {axis}),
sorted);
}

View File

@ -28,11 +28,9 @@ public:
<< "Indices vector size and provided indices shape doesn't fit each other";
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto indicesNode = ngraph::opset3::Constant::create(secondConstantType, ngraph::Shape(indicesShape), indices);
auto axisNode = ngraph::opset3::Constant::create(ngraph::element::i64, ngraph::Shape({}), {axis});
auto gather = std::make_shared<ngraph::opset3::Gather>(paramOuts[0], indicesNode, axisNode);
auto gather = std::make_shared<ngraph::opset3::Gather>(params[0], indicesNode, axisNode);
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(gather)};
function = std::make_shared<ngraph::Function>(results, params, "gather");
}

View File

@ -56,9 +56,7 @@ protected:
const auto ngPrec = element::f32;
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ngPrec, ov::Shape(inShapes.first)),
std::make_shared<ov::op::v0::Parameter>(ngPrec, ov::Shape(inShapes.second))};
const auto outputNodes = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(inputParams));
const auto matMul = builder::makeMatMul(outputNodes[0], outputNodes[1], false, false);
const auto matMul = builder::makeMatMul(inputParams[0], inputParams[1], false, false);
selectedType = makeSelectedTypeStr(with_cpu_x86_avx512_core() ? "brgemm_avx512" : "jit_gemm", ngPrec);

View File

@ -62,8 +62,7 @@ protected:
for (auto&& shape : inputDynamicShapes) {
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(ngraph::element::f32, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
conv = builder::makeGroupConvolution(paramOuts.front(), element::f32, kernelSize, strides, padBegin, padEnd, dilation,
conv = builder::makeGroupConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation,
paddingType, numOutChannels, numOfGroups);
ResultVector results;

View File

@ -67,15 +67,14 @@ protected:
for (auto&& shape : inputDynamicShapes) {
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(ngraph::element::f32, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
switch (convType) {
case nodeType::convolution : {
conv = builder::makeConvolution(paramOuts.front(), element::f32, kernelSize, strides, padBegin, padEnd, dilation,
conv = builder::makeConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation,
paddingType, numOutChannels);
break;
}
case nodeType::groupConvolution : {
conv = builder::makeGroupConvolution(paramOuts.front(), element::f32, kernelSize, strides, padBegin, padEnd, dilation,
conv = builder::makeGroupConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation,
paddingType, numOutChannels, numOfGroups);
break;
}

View File

@ -67,13 +67,12 @@ protected:
for (auto&& shape : inputDynamicShapes) {
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngraph::element::f32, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> shapeAxes;
shapeAxes.push_back(axes.size());
auto reductionAxesNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape(shapeAxes), axes));
const auto reduce = ngraph::builder::makeReduce(paramOuts[0], reductionAxesNode, keepDims, reductionType);
const auto reduce = ngraph::builder::makeReduce(params[0], reductionAxesNode, keepDims, reductionType);
function = makeNgraphFunction(ElementType::f32, params, reduce, "Reduce");
}
private:

View File

@ -42,7 +42,6 @@ protected:
cpuNodeType = nodeType2PluginType(convType);
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{1, 1024, 64})};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(inputParams));
std::shared_ptr<Node> conv;
const std::vector<size_t> kernelSize = {1};
@ -55,11 +54,11 @@ protected:
const op::PadType paddingType = op::PadType::EXPLICIT;
switch (convType) {
case nodeType::convolution : {
conv = builder::makeConvolution(paramOuts[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
conv = builder::makeConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
break;
}
case nodeType::groupConvolution : {
conv = builder::makeGroupConvolution(paramOuts[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels,
conv = builder::makeGroupConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels,
numOfGroups);
break;
}

View File

@ -66,35 +66,35 @@ void ConvConcatSubgraphTest::SetUp() {
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape(inputShapes)),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape(inputShapes))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
std::vector<std::shared_ptr<ngraph::Node>> convolutionNodes(2);
switch (type) {
case nodeType::convolution : {
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
convolutionNodes[conv] = ngraph::builder::makeConvolution(paramOuts[conv], ngraph::element::f32, kernelSize, strides, padBegin,
convolutionNodes[conv] = ngraph::builder::makeConvolution(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
padEnd, dilation, paddingType, numOutChannels);
}
break;
}
case nodeType::convolutionBackpropData : {
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
convolutionNodes[conv] = ngraph::builder::makeConvolutionBackpropData(paramOuts[conv], ngraph::element::f32, kernelSize, strides, padBegin,
convolutionNodes[conv] = ngraph::builder::makeConvolutionBackpropData(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
padEnd, dilation, paddingType, numOutChannels);
}
break;
}
case nodeType::groupConvolution : {
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
convolutionNodes[conv] = ngraph::builder::makeGroupConvolution(paramOuts[conv], ngraph::element::f32, kernelSize, strides, padBegin,
convolutionNodes[conv] = ngraph::builder::makeGroupConvolution(inputParams[conv], ngraph::element::f32, kernelSize, strides, padBegin,
padEnd, dilation, paddingType, numOutChannels, numOfGroups);
}
break;
}
case nodeType::groupConvolutionBackpropData : {
for (size_t conv = 0; conv < convolutionNodes.size(); conv++) {
convolutionNodes[conv] = ngraph::builder::makeGroupConvolutionBackpropData(paramOuts[conv], ngraph::element::f32, kernelSize, strides, padBegin,
padEnd, dilation, paddingType, numOutChannels, numOfGroups);
convolutionNodes[conv] = ngraph::builder::makeGroupConvolutionBackpropData(inputParams[conv], ngraph::element::f32, kernelSize,
strides, padBegin, padEnd, dilation, paddingType,
numOutChannels, numOfGroups);
}
break;
}

View File

@ -33,7 +33,6 @@ protected:
std::tie(postOpMgrPtr, fusedOps) = fusingParams;
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{1, 3, 40, 40})};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(inputParams));
std::shared_ptr<Node> conv;
{
@ -44,7 +43,7 @@ protected:
const std::vector<size_t> dilation = {1, 1};
const size_t numOutChannels = 16;
const op::PadType paddingType = op::PadType::EXPLICIT;
conv = builder::makeConvolution(paramOuts[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
conv = builder::makeConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
}
std::shared_ptr<Node> pooling;
{

View File

@ -51,8 +51,6 @@ void ConvWithZeroPointFuseSubgraphTest::SetUp() {
{-12.8f},
{12.7f});
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
std::vector<std::shared_ptr<ngraph::Node>> branches(2);
{
ngraph::Strides strides{1, 1};

View File

@ -113,11 +113,8 @@ protected:
std::shared_ptr<Node> H;
ov::ParameterVector inputParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(ngPrec, shape));
}
const auto outputNodes = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(inputParams));
auto makeDataFQ = [](const ngraph::Output<Node>& input) {
const auto fqLevels = 256;
@ -126,10 +123,10 @@ protected:
{-128.f/127}, {1.f});
};
auto X_FQ = makeDataFQ(outputNodes[0]);
auto X_FQ = makeDataFQ(inputParams[0]);
if (quantizedHiddenState) {
H = makeDataFQ(outputNodes[1]);
H = makeDataFQ(inputParams[1]);
} else {
H = ngraph::builder::makeConstant(ngraph::element::f32, inputDynamicShapes[1].get_shape(), {}, true, 1.f, -1.f);
}
@ -159,7 +156,7 @@ protected:
if (rnnType == "LSTMSequence") {
hasCell = true;
auto C = outputNodes[cellIdx];
auto C = inputParams[cellIdx];
rnnCellOp = std::make_shared<ov::op::v5::LSTMSequence>(
X_FQ, H, C, seq_lengths, W_FQ, R_FQ, B,
hiddenSize, op::RecurrentSequenceDirection::FORWARD);

View File

@ -38,16 +38,15 @@ protected:
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, 512, 32}),
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, 128, 32})};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto FQ = ngraph::builder::makeFakeQuantize(paramOuts[1], ngPrc, 256, {}, {-2.8215785026550293}, {2.799535036087036},
auto FQ = ngraph::builder::makeFakeQuantize(params[1], ngPrc, 256, {}, {-2.8215785026550293}, {2.799535036087036},
{-2.8215785026550293}, {2.799535036087036});
auto FQ_0 = ngraph::builder::makeFakeQuantize(paramOuts[1], ngPrc, 256, {}, {-5.031249523162842}, {4.991942882537842},
auto FQ_0 = ngraph::builder::makeFakeQuantize(params[1], ngPrc, 256, {}, {-5.031249523162842}, {4.991942882537842},
{-5.031249523162842}, {4.991942882537842});
auto Add_0 = ngraph::builder::makeEltwise(FQ_0, FQ, EltwiseTypes::ADD);
auto FQ_1 = ngraph::builder::makeFakeQuantize(paramOuts[0], ngPrc, 256, {}, {-2.122633457183838}, {2.106050491333008},
auto FQ_1 = ngraph::builder::makeFakeQuantize(params[0], ngPrc, 256, {}, {-2.122633457183838}, {2.106050491333008},
{-2.122633457183838}, {2.106050491333008});
auto Const = ngraph::builder::makeConstant(ngPrc, {128, 512, 1}, std::vector<float>{-0.0512377955019474}, false);
@ -58,7 +57,7 @@ protected:
auto Add = ngraph::builder::makeEltwise(Add_0, Conv, EltwiseTypes::ADD);
auto FQ_11 = ngraph::builder::makeFakeQuantize(paramOuts[0], ngPrc, 256, {}, {-3.2050728797912598}, {3.1800332069396973},
auto FQ_11 = ngraph::builder::makeFakeQuantize(params[0], ngPrc, 256, {}, {-3.2050728797912598}, {3.1800332069396973},
{-3.2050728797912598}, {3.1800332069396973});
auto Const_ = ngraph::builder::makeConstant(ngPrc, {128, 512, 1}, std::vector<float>{-0.001183388871140778}, false);

View File

@ -47,9 +47,8 @@ protected:
auto constShift = ngraph::opset5::Constant::create(ngraph::element::f32, sumShape, sumConstData);
auto mmConst = ngraph::opset5::Constant::create(ngraph::element::f32, mmShape2, mmInData);
ov::ParameterVector mmParams {std::make_shared<ov::op::v0::Parameter>(ngPrec, mmShape)};
const auto mmOutputNodes = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(mmParams));
const auto mm = builder::makeMatMul(mmOutputNodes[0], mmConst, false, false);
const auto mm = builder::makeMatMul(mmParams[0], mmConst, false, false);
auto sum = ngraph::builder::makeEltwise(constShift, mm, ngraph::helpers::EltwiseTypes::ADD);
auto fq = ngraph::builder::makeFakeQuantize(sum, ngraph::element::f32, 256, {}, {-8.0f}, {7.0f}, {-8.0f}, {7.0f});

View File

@ -48,9 +48,8 @@ protected:
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrec, ov::Shape(splitShape))};
const auto splitOutputNodes = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(params));
const auto splitAxis = rank == 3 ? 1 : 0;
const auto split = builder::makeSplit(splitOutputNodes[0], ngPrec, 2 /* splits */, splitAxis);
const auto split = builder::makeSplit(params[0], ngPrec, 2 /* splits */, splitAxis);
SizeVector fcWeightsShape{16, 8};
if (rank == 3) bcastTo3D(fcWeightsShape);

View File

@ -209,7 +209,6 @@ protected:
selectedType = makeSelectedTypeStr(selectedType, outType);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(inType, inShapeA)};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
std::shared_ptr<Node> inputB = builder::makeConstant<float>(weiConstElemType, inShapeB.get_shape(), {}, true);
if (weiConstElemType == ElementType::f16) {
inputB = std::make_shared<opset1::Convert>(inputB, convertOutType);
@ -217,7 +216,7 @@ protected:
}
expectedWeiConstElemType = weiConstElemType;
auto matMul = builder::makeMatMul(paramOuts[0], inputB, transpA, transpB);
auto matMul = builder::makeMatMul(params[0], inputB, transpA, transpB);
function = CPUTestsBase::makeNgraphFunction(netType, params, matMul, cpuNodeType);
}
@ -494,7 +493,6 @@ protected:
for (auto&& shape : {inShapeFC0, inShapeFC1}) {
params.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
std::shared_ptr<Node> inputWeights = builder::makeConstant<float>(weiConstElemType, inShapeWeights.get_shape(), {}, true);
if (weiConstElemType == ElementType::f16) {
inputWeights = std::make_shared<opset1::Convert>(inputWeights, convertOutType);
@ -503,8 +501,8 @@ protected:
// In this test, convert must be folded on the ngraph side, so the constant with fp32 precision is expected
expectedWeiConstElemType = ElementType::f32;
auto matMul0 = builder::makeMatMul(paramOuts[0], inputWeights, transpA, transpB);
auto matMul1 = builder::makeMatMul(paramOuts[1], inputWeights, transpA, transpB);
auto matMul0 = builder::makeMatMul(params[0], inputWeights, transpA, transpB);
auto matMul1 = builder::makeMatMul(params[1], inputWeights, transpA, transpB);
auto concat = builder::makeConcat({matMul0, matMul1}, 0);

View File

@ -36,8 +36,7 @@ protected:
SizeVector splitShape{1, 2, 1, 16};
ov::ParameterVector splitInputParams {std::make_shared<ov::op::v0::Parameter>(ngPrec, ov::Shape(splitShape))};
const auto splitOutputNodes = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(splitInputParams));
const auto split = builder::makeSplit(splitOutputNodes[0], ngPrec, 2 /* splits */, 1 /* 2nd axis */);
const auto split = builder::makeSplit(splitInputParams[0], ngPrec, 2 /* splits */, 1 /* 2nd axis */);
std::vector<ov::Shape> concatShapes{{1, 1, 8, 8}, {1, 1, 8, 8}};
ov::ParameterVector concatInputParams {std::make_shared<ov::op::v0::Parameter>(ngPrec, concatShapes[0]),
@ -49,9 +48,8 @@ protected:
SizeVector matmulShape{1, 1, 16, 8};
ov::ParameterVector matmulInputParams {std::make_shared<ov::op::v0::Parameter>(ngPrec, ov::Shape(matmulShape))};
const auto matmulOutputNodes = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(matmulInputParams));
const auto matMul2 = builder::makeMatMul(split->output(1), matmulOutputNodes[0], false, false);
const auto matMul2 = builder::makeMatMul(split->output(1), matmulInputParams[0], false, false);
const auto concatMatMuls = builder::makeConcat({matMul1, matMul2}, 2 /* 3rd axis */);

View File

@ -17,7 +17,6 @@ protected:
ov::ParameterVector inputParams{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{1, 3, 12, 9}),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{1, 16, 12, 9})};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(inputParams));
std::shared_ptr<Node> conv;
{
@ -28,12 +27,12 @@ protected:
const std::vector<size_t> dilation = {1, 1};
const size_t numOutChannels = 16;
const op::PadType paddingType = op::PadType::EXPLICIT;
conv = builder::makeConvolution(paramOuts[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
conv = builder::makeConvolution(inputParams[0], element::f32, kernelSize, strides, padBegin, padEnd, dilation, paddingType, numOutChannels);
}
const auto sharedNode = builder::makeConstant(element::f32, {1, 16, 1, 1}, std::vector<float>{}, true);
const auto postOpCandidate = builder::makeEltwise(conv, sharedNode, EltwiseTypes::ADD);
const auto secondConsumpt = builder::makeEltwise(paramOuts[1], sharedNode, EltwiseTypes::ADD);
const auto secondConsumpt = builder::makeEltwise(inputParams[1], sharedNode, EltwiseTypes::ADD);
NodeVector results{postOpCandidate, secondConsumpt};
function = std::make_shared<ngraph::Function>(results, inputParams, "NotFusedConvSimpleOp");

View File

@ -107,10 +107,9 @@ protected:
const auto& inShapeB = inputDynamicShapes[1];
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ElementType::f32, inShapeA)};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
std::shared_ptr<Node> inputB = builder::makeConstant<float>(ElementType::f32, inShapeB.get_shape(), {}, true);
auto split = builder::makeVariadicSplit(paramOuts[0], {1, 1}, 0);
auto split = builder::makeVariadicSplit(params[0], {1, 1}, 0);
auto matMul = builder::makeMatMul(split->output(0), inputB, transpA, transpB);

View File

@ -17,9 +17,8 @@ protected:
auto ngPrc = element::f32;
ov::ParameterVector inputParams {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, 3, 12, 9})};
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<op::Parameter>(inputParams));
auto tile = ngraph::builder::makeTile(paramOuts[0], std::vector<int64_t>{1, 2, 1, 1});
auto tile = ngraph::builder::makeTile(inputParams[0], std::vector<int64_t>{1, 2, 1, 1});
const auto const1 = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, 6, 1, 1}, std::vector<float>{}, true);
const auto const2 = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, 6, 1, 1}, std::vector<float>{}, true);

View File

@ -362,9 +362,7 @@ const auto fusingSum = fusingSpecificParams{std::make_shared<postNodesMgr>(std::
auto shape = cfg.input->get_output_partial_shape(0);
ov::ParameterVector newParams{std::make_shared<ov::op::v0::Parameter>(cfg.type, shape)};
cfg.params.insert(cfg.params.end(), newParams.begin(), newParams.end());
auto newParamOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(newParams));
return std::make_shared<ngraph::opset1::Add>(cfg.input, newParamOuts[0]);
return std::make_shared<ngraph::opset1::Add>(cfg.input, newParams[0]);
}, "Add(Parameters)"}}), {"Add"}};
const auto fusingSumEluFQ = fusingSpecificParams{std::make_shared<postNodesMgr>(std::vector<postNodeBuilder>{
@ -372,9 +370,7 @@ const auto fusingSumEluFQ = fusingSpecificParams{std::make_shared<postNodesMgr>(
auto shape = cfg.input->get_output_partial_shape(0);
ov::ParameterVector newParams{std::make_shared<ov::op::v0::Parameter>(cfg.type, shape)};
cfg.params.insert(cfg.params.end(), newParams.begin(), newParams.end());
auto newParamOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(newParams));
return std::make_shared<ngraph::opset1::Add>(cfg.input, newParamOuts[0]);
return std::make_shared<ngraph::opset1::Add>(cfg.input, newParams[0]);
}, "Add(Parameters)"},
{[](postNodeConfig& cfg){
return ngraph::builder::makeActivation(cfg.input, cfg.type, ngraph::helpers::Elu, {}, {2.0f});

View File

@ -103,14 +103,13 @@ void ConvolutionLayerTestFixture::SetUp() {
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<Parameter>(params));
std::vector<float> filter_weights;
auto filter_size = std::accumulate(std::begin(kernel), std::end(kernel), 1, std::multiplies<size_t>());
filter_weights =
ov::test::utils::generate_float_numbers(convOutChannels * inputShape[1] * filter_size, -0.1f, 0.1f);
auto conv = std::dynamic_pointer_cast<Convolution>(ngraph::builder::makeConvolution(paramOuts[0],
auto conv = std::dynamic_pointer_cast<Convolution>(ngraph::builder::makeConvolution(params[0],
ngPrc,
kernel,
stride,

View File

@ -58,7 +58,6 @@ protected:
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShapes[0])),
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShapes[1]))};
std::vector<ngraph::Shape> WRB = {inputShapes[2], inputShapes[3], inputShapes[4]};
auto in = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(params));
std::vector<float> weights_vals =
ov::test::utils::generate_float_numbers(ngraph::shape_size(WRB[0]), -0.0001f, 0.0001f);
std::vector<float> reccurrenceWeights_vals =
@ -70,8 +69,8 @@ protected:
auto reccurrenceWeightsNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[1], reccurrenceWeights_vals);
auto biasNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[2], bias_vals);
auto gru_cell = std::make_shared<ngraph::opset8::GRUCell>(in[0],
in[1],
auto gru_cell = std::make_shared<ngraph::opset8::GRUCell>(params[0],
params[1],
weightsNode,
reccurrenceWeightsNode,
biasNode,

View File

@ -62,7 +62,6 @@ protected:
std::vector<ngraph::Shape> WRB = {inputShapes[3], inputShapes[4], inputShapes[5], inputShapes[2]};
auto in = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(params));
std::vector<float> weights_vals =
ov::test::utils::generate_float_numbers(ngraph::shape_size(WRB[0]), -0.0001f, 0.0001f);
std::vector<float> reccurrenceWeights_vals =
@ -74,13 +73,13 @@ protected:
auto reccurrenceWeightsNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[1], reccurrenceWeights_vals);
auto biasNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[2], bias_vals);
std::vector<float> lengths(in[0].get_partial_shape()[0].get_min_length(),
in[0].get_partial_shape()[1].get_min_length());
std::vector<float> lengths(params[0]->get_partial_shape()[0].get_min_length(),
params[0]->get_partial_shape()[1].get_min_length());
std::shared_ptr<ngraph::Node> seq_length =
ngraph::builder::makeConstant(ngraph::element::i64, WRB[3], lengths, false);
auto gru_sequence = std::make_shared<ngraph::opset8::GRUSequence>(in[0],
in[1],
auto gru_sequence = std::make_shared<ngraph::opset8::GRUSequence>(params[0],
params[1],
seq_length,
weightsNode,
reccurrenceWeightsNode,

View File

@ -61,7 +61,6 @@ protected:
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShapes[2]))};
std::vector<ngraph::Shape> WRB = {inputShapes[4], inputShapes[5], inputShapes[6], inputShapes[3]};
auto in = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(params));
std::vector<float> weights_vals =
ov::test::utils::generate_float_numbers(ngraph::shape_size(WRB[0]), -0.0001f, 0.0001f);
@ -74,13 +73,13 @@ protected:
auto reccurrenceWeightsNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[1], reccurrenceWeights_vals);
auto biasNode = ngraph::builder::makeConstant<float>(ngPrc, WRB[2], bias_vals);
std::vector<float> lengths(in[0].get_partial_shape()[0].get_min_length(),
in[0].get_partial_shape()[1].get_min_length());
std::vector<float> lengths(params[0]->get_partial_shape()[0].get_min_length(),
params[0]->get_partial_shape()[1].get_min_length());
std::shared_ptr<ngraph::Node> seq_length =
ngraph::builder::makeConstant(ngraph::element::i64, WRB[3], lengths, false);
auto lstm_sequence = std::make_shared<ngraph::opset8::LSTMSequence>(in[0],
in[1],
in[2],
auto lstm_sequence = std::make_shared<ngraph::opset8::LSTMSequence>(params[0],
params[1],
params[2],
seq_length,
weightsNode,
reccurrenceWeightsNode,

View File

@ -74,11 +74,10 @@ void TopKLayerTestGPU::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramIn = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto k = std::make_shared<ov::op::v0::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{}, &keepK);
auto topk = std::dynamic_pointer_cast<ov::op::v11::TopK>(
std::make_shared<ov::op::v11::TopK>(paramIn[0], k, axis, mode, sort, ngraph::element::Type_t::i64, stable));
std::make_shared<ov::op::v11::TopK>(params[0], k, axis, mode, sort, ngraph::element::Type_t::i64, stable));
ngraph::ResultVector results;
for (size_t i = 0; i < topk->get_output_size(); i++) {

View File

@ -85,12 +85,10 @@ protected:
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType) = convParams;
ov::ParameterVector inputParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
auto convolutionNode = ngraph::builder::makeConvolution(paramOuts.front(), netType, kernel, stride, padBegin,
auto convolutionNode = ngraph::builder::makeConvolution(inputParams.front(), netType, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels);
ngraph::ResultVector results;

View File

@ -109,8 +109,6 @@ protected:
}
functionParams.front()->set_friendly_name("data");
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<ov::op::v0::Parameter>(functionParams));
std::shared_ptr<ov::op::v3::Broadcast> broadcastOp;
if (mode == ov::op::BroadcastType::EXPLICIT) {
std::shared_ptr<ov::Node> targetShapeOp;
@ -125,19 +123,19 @@ protected:
} else {
axesMappingOp = functionParams.size() > 2 ? functionParams[2] : functionParams[1];
}
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
targetShapeOp,
axesMappingOp,
mode);
} else if (mode == ov::op::BroadcastType::NUMPY) {
if (isTargetShapeConst) {
auto targetShapeConst = ov::op::v0::Constant::create(ov::element::i64, {targetShapeRank}, targetShape);
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
targetShapeConst,
mode);
} else {
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(paramOuts[0],
paramOuts[1],
broadcastOp = std::make_shared<ov::op::v3::Broadcast>(functionParams[0],
functionParams[1],
mode);
}
}

View File

@ -96,12 +96,10 @@ protected:
}
ov::ParameterVector inputParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
auto convolutionNode = ngraph::builder::makeConvolution(paramOuts.front(), netType, kernel, stride, padBegin,
auto convolutionNode = ngraph::builder::makeConvolution(inputParams.front(), netType, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels);
if (activationFusing) {
auto activationNode = ngraph::builder::makeActivation(convolutionNode, netType, ngraph::helpers::ActivationTypes::Relu);

View File

@ -196,24 +196,23 @@ protected:
init_input_shapes({ inShapes });
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngraph::element::f32, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
if (attrs.num_classes == -1) {
std::shared_ptr<ov::op::v8::DetectionOutput> detOut;
if (paramOuts.size() == 3)
detOut = std::make_shared<ov::op::v8::DetectionOutput>(paramOuts[0], paramOuts[1], paramOuts[2], attrs);
else if (paramOuts.size() == 5)
detOut = std::make_shared<ov::op::v8::DetectionOutput>(paramOuts[0], paramOuts[1], paramOuts[2], paramOuts[3], paramOuts[4], attrs);
if (params.size() == 3)
detOut = std::make_shared<ov::op::v8::DetectionOutput>(params[0], params[1], params[2], attrs);
else if (params.size() == 5)
detOut = std::make_shared<ov::op::v8::DetectionOutput>(params[0], params[1], params[2], params[3], params[4], attrs);
else
throw std::runtime_error("DetectionOutput layer supports only 3 or 5 inputs");
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(detOut)};
function = std::make_shared<ngraph::Function>(results, params, "DetectionOutputDynamic");
} else {
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto detOut = ngraph::builder::makeDetectionOutput(paramOuts, attrs);
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(detOut)};
function = std::make_shared<ngraph::Function>(results, params, "DetectionOutputDynamic");

View File

@ -119,14 +119,11 @@ protected:
params.back()->set_friendly_name("axis");
}
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
gatherNode = std::make_shared<ov::op::v7::Gather>(paramOuts[0],
isIndicesConstant ? indicesNode : paramOuts[1],
gatherNode = std::make_shared<ov::op::v7::Gather>(params[0],
isIndicesConstant ? indicesNode : params[1],
isAxisConstant ? axisNode
: isIndicesConstant ? paramOuts[1]
: paramOuts[2],
: isIndicesConstant ? params[1]
: params[2],
batchDims);
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(gatherNode)};
function = std::make_shared<ngraph::Function>(results, params, "Gather");

View File

@ -100,11 +100,8 @@ protected:
params.back()->set_friendly_name("indices");
}
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
gather_ndNode = std::make_shared<ov::op::v8::GatherND>(paramOuts[0],
isIndicesConstant ? indicesNode : paramOuts[1],
gather_ndNode = std::make_shared<ov::op::v8::GatherND>(params[0],
isIndicesConstant ? indicesNode : params[1],
batchDims);
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(gather_ndNode)};
function = std::make_shared<ngraph::Function>(results, params, "GatherND");

View File

@ -83,9 +83,8 @@ protected:
std::make_shared<ov::op::v0::Parameter>(gridPrecision, inputDynamicShapes[1])};
params[0]->set_friendly_name("data");
params[1]->set_friendly_name("grid");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
GridSample::Attributes attributes = {alignCorners, interpolateMode, paddingMode};
auto gridSampleNode = std::make_shared<GridSample>(paramOuts[0], paramOuts[1], attributes);
auto gridSampleNode = std::make_shared<GridSample>(params[0], params[1], attributes);
ngraph::ResultVector results;
for (size_t i = 0; i < gridSampleNode->get_output_size(); i++) {

View File

@ -87,12 +87,10 @@ protected:
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType) = groupConvParams;
ov::ParameterVector inputParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
inputParams.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
auto groupConvolutionNode = ngraph::builder::makeGroupConvolution(paramOuts.front(), netType, kernel, stride, padBegin,
auto groupConvolutionNode = ngraph::builder::makeGroupConvolution(inputParams.front(), netType, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups);
ngraph::ResultVector results;

View File

@ -122,8 +122,7 @@ protected:
if (secondaryInputType == helpers::InputLayerType::PARAMETER) {
params.push_back(std::dynamic_pointer_cast<opset1::Parameter>(matrixB));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
auto matMul = builder::makeMatMul(paramOuts[0], matrixB, transpA, transpB);
auto matMul = builder::makeMatMul(params[0], matrixB, transpA, transpB);
auto makeFunction = [](const ngraph::element::Type &ngPrc, ngraph::ParameterVector &params, const std::shared_ptr<ngraph::Node> &lastNode) {
ngraph::ResultVector results;

View File

@ -72,12 +72,11 @@ protected:
std::string eps_mode = "inside_sqrt";
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto axesNode = ngraph::builder::makeConstant(axesType, ngraph::Shape{axes.size()}, axes);
auto mvn = ngraph::builder::makeMVN6(paramOuts[0], axesNode, normalizeVariance, eps, eps_mode);
auto mvn = ngraph::builder::makeMVN6(params[0], axesNode, normalizeVariance, eps, eps_mode);
rel_threshold = 0.015f;

View File

@ -56,11 +56,10 @@ protected:
init_input_shapes({inputShapes});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto normalize = ngraph::builder::makeNormalizeL2(paramOuts[0], axes, eps, epsMode);
auto normalize = ngraph::builder::makeNormalizeL2(params[0], axes, eps, epsMode);
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(normalize)};
function = std::make_shared<ngraph::Function>(results, params, "NormalizeL2");

View File

@ -121,8 +121,7 @@ protected:
arg_pad_value = std::make_shared<ngraph::opset3::Constant>(inType, ngraph::Shape{}, &argPadValue);
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<ov::op::v0::Parameter>(functionParams));
auto pad = std::make_shared<ngraph::opset3::Pad>(paramOuts[0], pads_begin, pads_end, arg_pad_value, padMode);
auto pad = std::make_shared<ngraph::opset3::Pad>(functionParams[0], pads_begin, pads_end, arg_pad_value, padMode);
ngraph::ResultVector results;
for (size_t i = 0; i < pad->get_output_size(); ++i) {

View File

@ -95,13 +95,11 @@ protected:
auto strideInput = ngraph::opset1::Constant::create(ngraph::element::i32, ngraph::Shape{1}, {1});
ov::ParameterVector functionParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
functionParams.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset3::Parameter>(functionParams));
auto shapeOfOp1 = std::make_shared<opset3::ShapeOf>(paramOuts[0], element::i32);
auto shapeOfOp2 = std::make_shared<opset3::ShapeOf>(paramOuts[1], element::i32);
auto shapeOfOp1 = std::make_shared<opset3::ShapeOf>(functionParams[0], element::i32);
auto shapeOfOp2 = std::make_shared<opset3::ShapeOf>(functionParams[1], element::i32);
auto stridedSliceOp1 = ngraph::builder::makeStridedSlice(shapeOfOp1, beginInput, endInput, strideInput, element::i32,

View File

@ -78,11 +78,8 @@ protected:
init_input_shapes(inputShapes);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> shapeAxes;
shapeAxes.push_back(axes.size());
@ -90,7 +87,7 @@ protected:
auto reductionAxesNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape(shapeAxes), axes));
const auto reduce = ngraph::builder::makeReduce(paramOuts[0], reductionAxesNode, keepDims, reductionType);
const auto reduce = ngraph::builder::makeReduce(params[0], reductionAxesNode, keepDims, reductionType);
auto makeFunction = [](ParameterVector &params, const std::shared_ptr<Node> &lastNode) {
ResultVector results;

View File

@ -183,13 +183,10 @@ protected:
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(ngPrc, shape));
}
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto roi_pooling = ngraph::builder::makeROIPooling(paramOuts[0], paramOuts[1], poolShape, spatial_scale, pool_method);
auto roi_pooling = ngraph::builder::makeROIPooling(params[0], params[1], poolShape, spatial_scale, pool_method);
ngraph::ResultVector results;
for (size_t i = 0; i < roi_pooling->get_output_size(); i++)

View File

@ -54,11 +54,10 @@ protected:
outType = ElementType::i32;
ov::ParameterVector functionParams;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
functionParams.push_back(std::make_shared<ov::op::v0::Parameter>(netPrecision, shape));
}
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset3::Parameter>(functionParams));
auto shapeOfOp = std::make_shared<opset3::ShapeOf>(paramOuts[0], element::i32);
auto shapeOfOp = std::make_shared<opset3::ShapeOf>(functionParams[0], element::i32);
auto makeFunction = [](ParameterVector &params, const std::shared_ptr<Node> &lastNode) {
ResultVector results;

View File

@ -53,14 +53,10 @@ protected:
init_input_shapes({inShape});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(inType, shape));
}
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), axis);
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(params.at(0), axis);
auto makeFunction = [](ParameterVector &params, const std::shared_ptr<Node> &lastNode) {
ResultVector results;

View File

@ -66,10 +66,8 @@ protected:
}
init_input_shapes({inputShape});
ov::ParameterVector dyn_params{std::make_shared<ov::op::v0::Parameter>(netPrecision, inputDynamicShapes[0])};
auto paramOuts =
ngraph::helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(dyn_params));
auto split = std::dynamic_pointer_cast<ngraph::opset5::Split>(
ngraph::builder::makeSplit(paramOuts[0], netPrecision, numSplits, axis));
ngraph::builder::makeSplit(dyn_params[0], netPrecision, numSplits, axis));
ngraph::ResultVector results;
for (size_t i = 0; i < outIndices.size(); i++) {
results.push_back(std::make_shared<ngraph::opset1::Result>(split->output(outIndices[i])));
@ -205,7 +203,6 @@ protected:
init_input_shapes(inputShapes);
ov::ParameterVector dyn_params{std::make_shared<ov::op::v0::Parameter>(netPrecision, inputDynamicShapes[0])};
auto paramOuts = ngraph::helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(dyn_params));
auto splitAxisOp = std::make_shared<ngraph::opset3::Constant>(ngraph::element::i64, ngraph::Shape{}, std::vector<int64_t>{static_cast<int64_t>(axis)});
@ -218,7 +215,7 @@ protected:
splitLengthOp = std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{splitLength.size()}, splitLength);
}
auto varSplit = std::make_shared<ngraph::opset3::VariadicSplit>(paramOuts[0], splitAxisOp, splitLengthOp);
auto varSplit = std::make_shared<ngraph::opset3::VariadicSplit>(dyn_params[0], splitAxisOp, splitLengthOp);
ngraph::ResultVector results;
for (size_t i = 0; i < splitLength.size(); i++) {
results.push_back(std::make_shared<ngraph::opset1::Result>(varSplit->output(i)));

View File

@ -93,13 +93,12 @@ protected:
}
functionParams.front()->set_friendly_name("data");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(functionParams));
std::shared_ptr<ov::Node> tileNode;
if (isRepeatsConst) {
tileNode = std::make_shared<ov::op::v0::Tile>(paramOuts[0],
tileNode = std::make_shared<ov::op::v0::Tile>(functionParams[0],
ov::op::v0::Constant::create(ov::element::i64, { repeatsData.size() }, repeatsData));
} else {
tileNode = std::make_shared<ov::op::v0::Tile>(paramOuts[0], paramOuts[1]);
tileNode = std::make_shared<ov::op::v0::Tile>(functionParams[0], functionParams[1]);
}
ngraph::ResultVector results;

View File

@ -74,15 +74,13 @@ protected:
params.push_back(std::make_shared<ov::op::v0::Parameter>(dataPrecision, shape));
}
params[0]->set_friendly_name("data");
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
std::shared_ptr<ov::Node> uniqueNode;
if (flattened) {
uniqueNode = std::make_shared<ov::op::v10::Unique>(paramOuts[0], sorted);
uniqueNode = std::make_shared<ov::op::v10::Unique>(params[0], sorted);
} else {
axis = std::get<1>(flatOrAxis);
uniqueNode = std::make_shared<ov::op::v10::Unique>(
paramOuts[0],
params[0],
ov::op::v0::Constant::create(ov::element::i64, ov::Shape({1}), {axis}),
sorted);
}

View File

@ -79,10 +79,8 @@ protected:
const auto inShapSplit = inputDynamicShapes[0];
const auto inShapeElt = inputDynamicShapes[1];
ov::ParameterVector params;
for (auto&& shape : {inShapSplit, inShapeElt}) {
for (auto&& shape : {inShapSplit, inShapeElt})
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto axis = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{}, {0});
axis->set_friendly_name("axis");
@ -90,10 +88,10 @@ protected:
auto split_sizes = ngraph::opset1::Constant::create(ngraph::element::i64, ngraph::Shape{2}, {1, 1});
split_sizes->set_friendly_name("split_sizes");
auto variadicSplitOp = std::make_shared<ngraph::opset1::VariadicSplit>(paramOuts[0], axis, split_sizes);
auto variadicSplitOp = std::make_shared<ngraph::opset1::VariadicSplit>(params[0], axis, split_sizes);
variadicSplitOp->set_friendly_name("variadicSplit");
auto addOp = ngraph::builder::makeEltwise(paramOuts[1], variadicSplitOp->output(1), ngraph::helpers::EltwiseTypes::ADD);
auto addOp = ngraph::builder::makeEltwise(params[1], variadicSplitOp->output(1), ngraph::helpers::EltwiseTypes::ADD);
addOp->set_friendly_name("add");
ngraph::ResultVector results = {std::make_shared<ngraph::opset1::Result>(addOp)};

View File

@ -84,12 +84,10 @@ protected:
const auto inShapeShapeOf = inputDynamicShapes[0];
const auto inShapeElt = inputDynamicShapes[1];
ov::ParameterVector params;
for (auto&& shape : {inShapeShapeOf, inShapeElt}) {
for (auto&& shape : {inShapeShapeOf, inShapeElt})
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto addOp1 = ngraph::builder::makeEltwise(paramOuts[1], paramOuts[1], ngraph::helpers::EltwiseTypes::ADD);
auto addOp1 = ngraph::builder::makeEltwise(params[1], params[1], ngraph::helpers::EltwiseTypes::ADD);
addOp1->set_friendly_name("add1");
auto shapeOfOp1 = std::make_shared<ngraph::opset3::ShapeOf>(addOp1, ElementType::i64);
@ -110,7 +108,7 @@ protected:
auto reshapeOp1 = std::make_shared<ngraph::opset1::Reshape>(addOp1, concatOp1, false);
reshapeOp1->set_friendly_name("reshapeOp1");
auto addOp2 = ngraph::builder::makeEltwise(paramOuts[1], paramOuts[1], ngraph::helpers::EltwiseTypes::ADD);
auto addOp2 = ngraph::builder::makeEltwise(params[1], params[1], ngraph::helpers::EltwiseTypes::ADD);
addOp2->set_friendly_name("add2");
auto shapeOfOp2 = std::make_shared<ngraph::opset3::ShapeOf>(addOp2, ElementType::i64);

View File

@ -81,19 +81,17 @@ protected:
init_input_shapes(inputShapes);
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto deconvOp = ngraph::builder::makeConvolutionBackpropData(paramOuts[0], netType, {2, 2, 2}, {2, 2, 2}, {0, 0, 0},
auto deconvOp = ngraph::builder::makeConvolutionBackpropData(params[0], netType, {2, 2, 2}, {2, 2, 2}, {0, 0, 0},
{0, 0, 0}, {1, 1, 1}, ov::op::PadType::EXPLICIT, 16);
deconvOp->set_friendly_name("deconv");
std::vector<int> reduce_axes = {5};
auto reduceAxesNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape({1}), reduce_axes));
auto reduceOp = ngraph::builder::makeReduce(paramOuts[1], reduceAxesNode, false, ngraph::helpers::ReductionType::Max);
auto reduceOp = ngraph::builder::makeReduce(params[1], reduceAxesNode, false, ngraph::helpers::ReductionType::Max);
reduceOp->set_friendly_name("reduce");
auto concatOp = ngraph::builder::makeConcat({deconvOp, reduceOp}, 1);

View File

@ -92,15 +92,13 @@ protected:
init_input_shapes({inputShape});
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
std::vector<int> shape_pattern = {0, 1, -1, 0};
auto shapePatternsNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape({4}), shape_pattern));
auto reshapeOp = std::make_shared<ngraph::opset1::Reshape>(paramOuts[0], shapePatternsNode, true);
auto reshapeOp = std::make_shared<ngraph::opset1::Reshape>(params[0], shapePatternsNode, true);
reshapeOp->set_friendly_name("reshape");
auto shapeOfOp = std::make_shared<ngraph::opset3::ShapeOf>(reshapeOp, ElementType::i32);

View File

@ -84,15 +84,13 @@ protected:
const auto inShapeShapeOf = inputDynamicShapes[0];
const auto inShapeElt = inputDynamicShapes[1];
ov::ParameterVector params;
for (auto&& shape : inputDynamicShapes) {
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts = helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
auto addOp = ngraph::builder::makeEltwise(paramOuts[1], paramOuts[1], ngraph::helpers::EltwiseTypes::ADD);
auto addOp = ngraph::builder::makeEltwise(params[1], params[1], ngraph::helpers::EltwiseTypes::ADD);
addOp->set_friendly_name("add");
auto shapeOfOp1 = std::make_shared<ngraph::opset3::ShapeOf>(paramOuts[0], ElementType::i64);
auto shapeOfOp1 = std::make_shared<ngraph::opset3::ShapeOf>(params[0], ElementType::i64);
shapeOfOp1->set_friendly_name("shapeof1");
std::vector<int> reduce_axes = {0};
auto reduceAxesNode = std::dynamic_pointer_cast<ngraph::Node>(

View File

@ -91,19 +91,17 @@ protected:
const auto AllZeroData = inputDynamicShapes[0];
const auto ConcatInputData = inputDynamicShapes[1];
ov::ParameterVector params;
for (auto&& shape : {AllZeroData, ConcatInputData}) {
for (auto&& shape : {AllZeroData, ConcatInputData})
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
}
auto paramOuts =
helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::opset3::Parameter>(params));
const ElementType intInputsPrecision = ElementType::i32;
auto nonzeroEmptyResultOp = std::make_shared<ngraph::opset3::NonZero>(paramOuts[0]);
auto nonzeroEmptyResultOp = std::make_shared<ngraph::opset3::NonZero>(params[0]);
auto convertEmptyInputOp = ngraph::builder::makeConversion(nonzeroEmptyResultOp,
ElementType::i32,
ngraph::helpers::ConversionTypes::CONVERT);
auto concatPartialInputEmptyOp =
ngraph::builder::makeConcat({convertEmptyInputOp, paramOuts[1], convertEmptyInputOp},
ngraph::builder::makeConcat({convertEmptyInputOp, params[1], convertEmptyInputOp},
1); // partially empty input / non empty output
auto concatEmptyInputEmptyOutputOp =
ngraph::builder::makeConcat({convertEmptyInputOp, convertEmptyInputOp, convertEmptyInputOp},
@ -117,14 +115,14 @@ protected:
auto axisNode = ngraph::builder::makeConstant<int64_t>(intInputsPrecision, ov::Shape({1}), {0});
auto gatherEmptyIndicesOp =
std::make_shared<ov::op::v7::Gather>(paramOuts[0], squeezeEmptyInputOp, axisNode, 0);
std::make_shared<ov::op::v7::Gather>(params[0], squeezeEmptyInputOp, axisNode, 0);
auto shapeofEmptyInputOp = std::make_shared<ngraph::opset3::ShapeOf>(gatherEmptyIndicesOp, ElementType::i32);
ngraph::ResultVector results = {std::make_shared<ngraph::opset1::Result>(shapeofEmptyInputOp),
std::make_shared<ngraph::opset1::Result>(concatPartialInputEmptyOp),
std::make_shared<ngraph::opset1::Result>(concatEmptyInputEmptyOutputOp)};
function = std::make_shared<ngraph::Function>(results, params, "result");
auto nonzero = std::make_shared<ngraph::opset3::NonZero>(paramOuts[0]);
auto nonzero = std::make_shared<ngraph::opset3::NonZero>(params[0]);
}
};

View File

@ -1604,11 +1604,8 @@ std::shared_ptr<ov::Model> generateMultiSubGraph(const std::shared_ptr<ov::op::O
std::shared_ptr<ov::Model> generate(const std::shared_ptr<ov::op::v8::MatrixNms> &node) {
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 2, 4}}),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 2, 2}})};
const auto outputs =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
const auto nms =
std::make_shared<ov::op::v8::MatrixNms>(outputs[0], outputs[1], ov::op::v8::MatrixNms::Attributes());
std::make_shared<ov::op::v8::MatrixNms>(params[0], params[1], ov::op::v8::MatrixNms::Attributes());
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(nms)};
return std::make_shared<ov::Model>(results, params, "MatrixNms");
}
@ -1616,14 +1613,12 @@ std::shared_ptr<ov::Model> generate(const std::shared_ptr<ov::op::v8::MatrixNms>
std::shared_ptr<ov::Model> generateMulticlassNmsBase(const std::shared_ptr<ov::op::Op> &node) {
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 2, 4}}),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 2, 2}})};
const auto outputs =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
if (ov::is_type<ov::op::v8::MulticlassNms>(node)) {
const auto nms = std::make_shared<ov::op::v8::MulticlassNms>(outputs[0], outputs[1], ov::op::v8::MulticlassNms::Attributes());
const auto nms = std::make_shared<ov::op::v8::MulticlassNms>(params[0], params[1], ov::op::v8::MulticlassNms::Attributes());
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(nms)};
return std::make_shared<ov::Model>(results, params, "MulticlassNms");
} else if (ov::is_type<ov::op::v9::MulticlassNms>(node)) {
const auto nms = std::make_shared<ov::op::v9::MulticlassNms>(outputs[0], outputs[1], ov::op::v9::MulticlassNms::Attributes());
const auto nms = std::make_shared<ov::op::v9::MulticlassNms>(params[0], params[1], ov::op::v9::MulticlassNms::Attributes());
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(nms)};
return std::make_shared<ov::Model>(results, params, "MulticlassNms");
} else {
@ -1808,8 +1803,6 @@ std::shared_ptr<ov::Model> generate(const std::shared_ptr<ov::op::v9::GeneratePr
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{2, 2, 3, 4}}),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 12, 2, 2}}),
std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{{1, 3, 2, 2}})};
const auto outputs =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
ov::op::v9::GenerateProposals::Attributes attrs;
attrs.min_size = 1;
attrs.nms_threshold = 0.8;
@ -1817,7 +1810,7 @@ std::shared_ptr<ov::Model> generate(const std::shared_ptr<ov::op::v9::GeneratePr
attrs.post_nms_count = 100;
if (ov::is_type<ov::op::v9::GenerateProposals>(node)) {
const auto gp = std::make_shared<ov::op::v9::GenerateProposals>(
outputs[0], outputs[1], outputs[2], outputs[3], attrs);
params[0], params[1], params[2], params[3], attrs);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(gp)};
return std::make_shared<ov::Model>(results, params, "GenerateProposalsGraph");
} else {

View File

@ -87,15 +87,14 @@ public:
auto make_ngraph = [&](bool with_extra_conv) {
auto in_prec = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(with_extra_conv ? inPrc : decltype(inPrc)(InferenceEngine::Precision::FP32));
ov::ParameterVector paramsIn {std::make_shared<ov::op::v0::Parameter>(in_prec, ov::Shape(inputShape))};
auto paramIn = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(paramsIn));
auto toF32 = std::make_shared<ngraph::opset1::Convert>(paramIn[0], ngraph::element::Type_t::f32);
auto toF32 = std::make_shared<ngraph::opset1::Convert>(paramsIn[0], ngraph::element::Type_t::f32);
auto constNode = std::make_shared<ngraph::opset1::Constant>(
ngraph::element::Type_t::i64, ngraph::Shape{inputShape.size()}, inputShape);
std::shared_ptr<ov::Node> reshape_input = with_extra_conv ? toF32->shared_from_this() : paramsIn[0];
auto reshape = std::dynamic_pointer_cast<ngraph::opset1::Reshape>(
std::make_shared<ngraph::opset1::Reshape>(with_extra_conv ? toF32 : paramIn[0], constNode, specialZero));
std::make_shared<ngraph::opset1::Reshape>(reshape_input, constNode, specialZero));
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(reshape)};
return std::make_shared<ngraph::Function>(results, paramsIn, "Reshape");
};

View File

@ -105,9 +105,8 @@ void SetBlobTest::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(precNg);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(IS))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto axisNode = std::make_shared<ngraph::op::Constant>(ngraph::element::Type_t::i64, ngraph::Shape{}, std::vector<int64_t>{-1})->output(0);
auto cumSum = std::dynamic_pointer_cast<ngraph::opset4::CumSum>(ngraph::builder::makeCumSum(paramOuts[0], axisNode, false, false));
auto cumSum = std::dynamic_pointer_cast<ngraph::opset4::CumSum>(ngraph::builder::makeCumSum(params[0], axisNode, false, false));
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(cumSum)};
function = std::make_shared<ngraph::Function>(results, params, "InferSetBlob");
}

View File

@ -73,10 +73,9 @@ void ProposalBehTest::SetUp() {
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(boxesShape))};
params[0]->set_friendly_name("scores");
params[1]->set_friendly_name("boxes");
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto proposal = std::dynamic_pointer_cast<ngraph::opset1::Proposal>(
ngraph::builder::makeProposal(paramOuts[0], paramOuts[1], img_info, ngPrc,
ngraph::builder::makeProposal(params[0], params[1], img_info, ngPrc,
base_size,
pre_nms_topn,
post_nms_topn,

View File

@ -62,9 +62,8 @@ namespace SubgraphTestsDefinitions {
}
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto clamp = std::make_shared<ngraph::opset1::Clamp>(paramOuts[0], clamp_min_max[0], clamp_min_max[1]);
auto clamp = std::make_shared<ngraph::opset1::Clamp>(params[0], clamp_min_max[0], clamp_min_max[1]);
auto FQNode = ngraph::builder::makeFakeQuantize(clamp, ngraph::element::f32, levels[0], constShape[0],
{ inputDataMin }, { inputDataMax }, { inputDataMin }, { inputDataMax });

View File

@ -49,7 +49,7 @@ void ConvolutionReluSequenceTest::SetUp() {
configuration.insert(config.begin(), config.end());
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(convParamsAll.inputShape))};
auto lastOutputs = ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params).front();
std::shared_ptr<ov::Node> lastOutputs = params.front();
auto inputChannels = convParamsAll.inputShape[1];
for (auto&& single : convParamsAll.sequenceDesc) {

View File

@ -28,14 +28,12 @@ void MultiplyAddLayerTest::SetUp() {
ov::element::Type element_type;
std::tie(inputShape, element_type, targetDevice) = this->GetParam();
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(element_type, ov::PartialShape(inputShape))};
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
std::vector<size_t> constShape(inputShape.size(), 1);
constShape[1] = inputShape[1];
auto const_mul = ngraph::builder::makeConstant<float>(element_type, constShape, {}, true);
auto mul = std::make_shared<ov::op::v1::Multiply>(paramOuts[0], const_mul);
auto mul = std::make_shared<ov::op::v1::Multiply>(params[0], const_mul);
auto const_add = ngraph::builder::makeConstant<float>(element_type, constShape, {}, true);
auto add = std::make_shared<ov::op::v1::Add>(mul, const_add);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(add)};

View File

@ -46,10 +46,8 @@ void MVNMultiplyAdd::SetUp() {
std::tie(inputShapes, constantShapes) = shapes;
ov::ParameterVector param{std::make_shared<ov::op::v0::Parameter>(dataType, ov::Shape(inputShapes))};
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(param));
auto axesNode = ngraph::builder::makeConstant(axesType, ov::Shape{axes.size()}, axes);
auto mvn = ngraph::builder::makeMVN6(paramOuts[0], axesNode, normalizeVariance, eps, epsMode);
auto mvn = ngraph::builder::makeMVN6(param[0], axesNode, normalizeVariance, eps, epsMode);
auto gamma = ngraph::builder::makeConstant<float>(dataType, constantShapes, {}, true);
auto mul = std::make_shared<ov::op::v1::Multiply>(mvn, gamma);
auto beta = ngraph::builder::makeConstant<float>(dataType, constantShapes, {}, true);

View File

@ -53,12 +53,11 @@ void QuantConvBackpropDataLayerTest::SetUp() {
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, quantLevels, quantGranularity) = groupConvBackpropDataParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> dataFqConstShapes(inputShape.size(), 1);
if (quantGranularity == ngraph::helpers::Perchannel)
dataFqConstShapes[1] = inputShape[1];
auto dataFq = ngraph::builder::makeFakeQuantize(paramOuts[0], ngPrc, quantLevels, dataFqConstShapes);
auto dataFq = ngraph::builder::makeFakeQuantize(params[0], ngPrc, quantLevels, dataFqConstShapes);
std::vector<size_t> weightsShapes = {inputShape[1], convOutChannels};
weightsShapes.insert(weightsShapes.end(), kernel.begin(), kernel.end());

View File

@ -58,12 +58,11 @@ void QuantGroupConvLayerTest::SetUp() {
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, quantLevels, quantGranularity, quantizeWeights) = groupConvParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> dataFqConstShapes(inputShape.size(), 1);
if (quantGranularity == ngraph::helpers::Perchannel)
dataFqConstShapes[1] = inputShape[1];
auto dataFq = ngraph::builder::makeFakeQuantize(paramOuts[0], ngPrc, quantLevels, dataFqConstShapes);
auto dataFq = ngraph::builder::makeFakeQuantize(params[0], ngPrc, quantLevels, dataFqConstShapes);
std::vector<size_t> weightsShapes = {convOutChannels, inputShape[1]};
if (weightsShapes[0] % numGroups || weightsShapes[1] % numGroups)

View File

@ -54,12 +54,11 @@ void QuantGroupConvBackpropDataLayerTest::SetUp() {
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, numGroups, padType, quantLevels, quantGranularity) = groupConvBackpropDataParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> dataFqConstShapes(inputShape.size(), 1);
if (quantGranularity == ngraph::helpers::Perchannel)
dataFqConstShapes[1] = inputShape[1];
auto dataFq = ngraph::builder::makeFakeQuantize(paramOuts[0], ngPrc, quantLevels, dataFqConstShapes);
auto dataFq = ngraph::builder::makeFakeQuantize(params[0], ngPrc, quantLevels, dataFqConstShapes);
std::vector<size_t> weightsShapes = {inputShape[1], convOutChannels};
if (weightsShapes[0] % numGroups || weightsShapes[1] % numGroups)

View File

@ -73,8 +73,6 @@ void QuantMatMulTest::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape0)),
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape1))};
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto makeFakeQuantizeNode = [ngPrc](size_t quantLevels, QuantRange inputRange, QuantRange outputRange,
QuantizationGranularity quantGranularity, const ngraph::Output<ngraph::Node> &in, std::vector<size_t> inputShape,
@ -93,8 +91,8 @@ void QuantMatMulTest::SetUp() {
return ngraph::builder::makeFakeQuantize(in, ngPrc, quantLevels, dataFqConstShapes, inputLowData, inputHighData, outputLowData, outputHighData);
};
auto dataFq0 = makeFakeQuantizeNode(quantLevels0, inputRange0, outputRange0, quantGranularity0, paramOuts[0], inputShape0, fqPrec0);
auto dataFq1 = makeFakeQuantizeNode(quantLevels1, inputRange1, outputRange1, quantGranularity1, paramOuts[1], inputShape1, fqPrec1);
auto dataFq0 = makeFakeQuantizeNode(quantLevels0, inputRange0, outputRange0, quantGranularity0, params[0], inputShape0, fqPrec0);
auto dataFq1 = makeFakeQuantizeNode(quantLevels1, inputRange1, outputRange1, quantGranularity1, params[1], inputShape1, fqPrec1);
auto MatMul = std::dynamic_pointer_cast<ngraph::opset3::MatMul>(
ngraph::builder::makeMatMul(dataFq0, dataFq1));

View File

@ -34,8 +34,6 @@ void ReduceEltwiseTest::SetUp() {
std::tie(inputShape, axes, opType, keepDims, netPrecision, targetDevice) = this->GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
std::vector<size_t> shapeAxes;
switch (opType) {
@ -54,7 +52,7 @@ void ReduceEltwiseTest::SetUp() {
auto reductionAxesNode = std::dynamic_pointer_cast<ngraph::Node>(
std::make_shared<ngraph::opset3::Constant>(ngraph::element::Type_t::i64, ngraph::Shape(shapeAxes), axes));
auto reduce = std::make_shared<ngraph::opset3::ReduceSum>(paramOuts[0], reductionAxesNode, keepDims);
auto reduce = std::make_shared<ngraph::opset3::ReduceSum>(params[0], reductionAxesNode, keepDims);
std::vector<size_t> constShape(reduce.get()->get_output_partial_shape(0).rank().get_length(), 1);
ASSERT_GT(constShape.size(), 2);

View File

@ -47,9 +47,7 @@ void StridedSliceTest::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(ssParams.inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto relu = std::make_shared<ngraph::opset1::Relu>(paramOuts[0]);
auto relu = std::make_shared<ngraph::opset1::Relu>(params[0]);
auto ss = ngraph::builder::makeStridedSlice(relu, ssParams.begin, ssParams.end, ssParams.strides, ngPrc, ssParams.beginMask,
ssParams.endMask, ssParams.newAxisMask, ssParams.shrinkAxisMask, ssParams.ellipsisAxisMask);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(ss)};

View File

@ -65,7 +65,6 @@ void FakeQuantizeSubgraphTest::SetUp() {
}
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
const int seed = 0;
std::mt19937 gen(seed);
@ -114,7 +113,7 @@ void FakeQuantizeSubgraphTest::SetUp() {
auto lowNode = ngraph::builder::makeConstant(ngraph::element::f32, channelDataSize, inputMinRange, false);
auto highNode = ngraph::builder::makeConstant(ngraph::element::f32, channelDataSize, inputMaxRange, false);
auto inputFQNode = ngraph::builder::makeFakeQuantize(paramOuts[0], ngraph::element::f32, levels[0], constShape[0],
auto inputFQNode = ngraph::builder::makeFakeQuantize(params[0], ngraph::element::f32, levels[0], constShape[0],
{ inputDataMin }, { inputDataMax }, { inputDataMin }, { inputDataMax });
auto weightsFQNode = std::make_shared<ngraph::opset1::FakeQuantize>(const_param,