Use new builders instead of old ones in tests (#21742)

* Use new make_convolution instead of old one

* Use new make_group_convolution instead of old one

* Use new make_convolution_backprop_data instead of old one

* Use new make_group_convolution_backprop_data instead of old one

* Use new make_binary_conv instead of old one

* Remove makeVariadicSplit builder

* Use new make_activation instead of old one

* Use new make_eltwise instead of old one

* Use new make_embedding_bag_packed_sum  instead of old one

* Remove augru builder

* Fix clang-format

* Fix
This commit is contained in:
Oleg Pipikin 2023-12-21 08:26:39 +01:00 committed by GitHub
parent ca5bf95e26
commit 12a9d55c3e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
80 changed files with 271 additions and 348 deletions

View File

@ -6,6 +6,8 @@
#include <vector>
#include "common_test_utils/node_builders/eltwise.hpp"
using namespace ov::test::behavior;
namespace {
@ -23,11 +25,11 @@ std::shared_ptr<ov::Model> getFunction2() {
auto split = std::make_shared<ov::op::v1::Split>(params[0], split_axis_op, 2);
auto in2add = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(split->output(0), in2add, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(split->output(0), in2add, ov::test::utils::EltwiseTypes::ADD);
auto relu1 = std::make_shared<ov::op::v0::Relu>(add);
auto in2mult = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto mult = ngraph::builder::makeEltwise(split->output(1), in2mult, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mult = ov::test::utils::make_eltwise(split->output(1), in2mult, ov::test::utils::EltwiseTypes::MULTIPLY);
auto relu2 = std::make_shared<ov::op::v0::Relu>(mult);
auto concat = std::make_shared<ov::op::v0::Concat>(ov::OutputVector{relu1->output(0), relu2->output(0)}, 3);

View File

@ -6,6 +6,7 @@
#include "openvino/runtime/compiled_model.hpp"
#include "common_test_utils/test_common.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include <openvino/opsets/opset9.hpp>
@ -24,7 +25,7 @@ std::shared_ptr<ov::Model> MakeMatMulModel() {
auto matmul = std::make_shared<ov::op::v0::MatMul>(params[0], matmul_const);
auto add_const = ngraph::builder::makeConstant(precision, {1, 1024}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(matmul, add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(matmul, add_const, ov::test::utils::EltwiseTypes::ADD);
auto softmax = std::make_shared<ov::opset9::Softmax>(add);
ov::NodeVector results{softmax};

View File

@ -5,6 +5,7 @@
#include <vector>
#include "behavior/ov_infer_request/infer_request_dynamic.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
using namespace ov::test::behavior;
@ -28,7 +29,7 @@ std::shared_ptr<ngraph::Function> getFunction1() {
params.front()->get_output_tensor(0).set_names({"input_tensor"});
auto in2add = ngraph::builder::makeConstant(ngPrc, {1, 4, 1, 1}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(params[0], in2add, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(params[0], in2add, ngraph::helpers::EltwiseTypes::ADD);
auto relu1 = std::make_shared<ov::op::v0::Relu>(add->output(0));
relu1->get_output_tensor(0).set_names({"relu1"});
auto relu2 = std::make_shared<ov::op::v0::Relu>(add->output(0));
@ -50,11 +51,11 @@ std::shared_ptr<ngraph::Function> getFunction2() {
auto in2add = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(split->output(0), in2add, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(split->output(0), in2add, ngraph::helpers::EltwiseTypes::ADD);
auto relu1 = std::make_shared<ov::op::v0::Relu>(add);
auto in2mult = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto mult = ngraph::builder::makeEltwise(split->output(1), in2mult, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mult = ov::test::utils::make_eltwise(split->output(1), in2mult, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto relu2 = std::make_shared<ov::op::v0::Relu>(mult);
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{relu1->output(0), relu2->output(0)}, 3);

View File

@ -213,7 +213,7 @@ void EltwiseLayerCPUTest::SetUp() {
}
}
}
auto eltwise = utils::makeEltwise(parameters[0], secondaryInput, eltwiseType);
auto eltwise = utils::make_eltwise(parameters[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(netType, parameters, eltwise, "Eltwise");
}

View File

@ -60,7 +60,7 @@ protected:
inputs[port] = constNode;
inputs[1 - port] = param;
auto powerStatic = ov::test::utils::makeEltwise(inputs[0], inputs[1], nodeType);
auto powerStatic = ov::test::utils::make_eltwise(inputs[0], inputs[1], nodeType);
function = std::make_shared<ov::Model>(powerStatic, ParameterVector{param}, "ConvertToPluginSpecificNode");
}

View File

@ -56,7 +56,7 @@ protected:
{-5.031249523162842},
{4.991942882537842});
auto Add_0 = ov::test::utils::makeEltwise(FQ_0, FQ, ov::test::utils::EltwiseTypes::ADD);
auto Add_0 = ov::test::utils::make_eltwise(FQ_0, FQ, ov::test::utils::EltwiseTypes::ADD);
auto FQ_1 = ngraph::builder::makeFakeQuantize(params[0],
netPrecision,
@ -85,7 +85,7 @@ protected:
CoordinateDiff{0},
Strides{1});
auto Add = ov::test::utils::makeEltwise(Add_0, Conv, ov::test::utils::EltwiseTypes::ADD);
auto Add = ov::test::utils::make_eltwise(Add_0, Conv, ov::test::utils::EltwiseTypes::ADD);
auto FQ_11 = ngraph::builder::makeFakeQuantize(params[0],
netPrecision,
@ -115,7 +115,7 @@ protected:
CoordinateDiff{0},
CoordinateDiff{0},
Strides{1});
auto Add2 = ov::test::utils::makeEltwise(Add, Conv2, ov::test::utils::EltwiseTypes::ADD);
auto Add2 = ov::test::utils::make_eltwise(Add, Conv2, ov::test::utils::EltwiseTypes::ADD);
auto relu3 = ov::test::utils::make_activation(Add2, netPrecision, ov::test::utils::ActivationTypes::Relu);
auto result = std::make_shared<ov::op::v0::Result>(relu3);

View File

@ -166,9 +166,9 @@ protected:
inputNodes.push_back(paramVec.back());
}
auto lastNode0 = utils::makeEltwise(paramVec[0], paramVec[1], eltwiseOpTypes[0]);
auto lastNode0 = utils::make_eltwise(paramVec[0], paramVec[1], eltwiseOpTypes[0]);
lastNode0->get_rt_info() = getCPUInfo();
auto lastNode1 = utils::makeEltwise(paramVec[2], paramVec[3], eltwiseOpTypes[1]);
auto lastNode1 = utils::make_eltwise(paramVec[2], paramVec[3], eltwiseOpTypes[1]);
lastNode1->get_rt_info() = getCPUInfo();
if (withQuantization) {
lastNode0 = ngraph::builder::makeFakeQuantize(lastNode0, ov::element::Type(ov::element::Type_t::f32),

View File

@ -110,9 +110,9 @@ protected:
if (withQuantization) {
std::vector<std::shared_ptr<ov::Node>> eltwiseOps;
eltwiseOps.push_back(makeEltwise(paramVec[0], inputNodes[0], eltwiseOpTypes[0]));
eltwiseOps.push_back(make_eltwise(paramVec[0], inputNodes[0], eltwiseOpTypes[0]));
for (size_t i = 1; i < eltwiseOpTypes.size() - 1; i++) {
eltwiseOps.push_back(makeEltwise(eltwiseOps[eltwiseOps.size() - 1], inputNodes[i], eltwiseOpTypes[i]));
eltwiseOps.push_back(make_eltwise(eltwiseOps[eltwiseOps.size() - 1], inputNodes[i], eltwiseOpTypes[i]));
}
std::vector<size_t> constShape(targetStaticShapes[0][0].size(), 1);
@ -122,15 +122,15 @@ protected:
256,
constShape);
eltwiseOps.push_back(makeEltwise(fq, inputNodes[eltwiseOpTypes.size() - 1], eltwiseOpTypes[eltwiseOpTypes.size() - 1]));
eltwiseOps.push_back(make_eltwise(fq, inputNodes[eltwiseOpTypes.size() - 1], eltwiseOpTypes[eltwiseOpTypes.size() - 1]));
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwiseOps[eltwiseOps.size() - 1])};
function = std::make_shared<ov::Model>(results, paramVec, "eltwise_chain_fq");
} else {
std::vector<std::shared_ptr<ov::Node>> eltwiseOps;
eltwiseOps.push_back(makeEltwise(paramVec[0], inputNodes[0], eltwiseOpTypes[0]));
eltwiseOps.push_back(make_eltwise(paramVec[0], inputNodes[0], eltwiseOpTypes[0]));
for (size_t i = 1; i < eltwiseOpTypes.size(); i++) {
eltwiseOps.push_back(makeEltwise(eltwiseOps[eltwiseOps.size() - 1], inputNodes[i], eltwiseOpTypes[i]));
eltwiseOps.push_back(make_eltwise(eltwiseOps[eltwiseOps.size() - 1], inputNodes[i], eltwiseOpTypes[i]));
}
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwiseOps[eltwiseOps.size() - 1])};

View File

@ -46,7 +46,7 @@ protected:
ov::ParameterVector mmParams{std::make_shared<ov::op::v0::Parameter>(netPrecision, mmShape)};
const auto mm = std::make_shared<ov::op::v0::MatMul>(mmParams[0], mmConst, false, false);
auto sum = ov::test::utils::makeEltwise(constShift, mm, ov::test::utils::EltwiseTypes::ADD);
auto sum = ov::test::utils::make_eltwise(constShift, mm, ov::test::utils::EltwiseTypes::ADD);
auto fq = ngraph::builder::makeFakeQuantize(sum, ov::element::f32, 256, {}, {-8.0f}, {7.0f}, {-8.0f}, {7.0f});
ov::ParameterVector inputParams = {mmParams[0]};

View File

@ -47,8 +47,8 @@ void FuseMulAddAndEwSimpleTest1::CreateGraph() {
auto clamp = ov::test::utils::make_activation(params[0], inPrec, ActivationTypes::Clamp, inputShape, {0, 100});
auto tanh = ov::test::utils::make_activation(clamp, inPrec, ActivationTypes::Tanh);
auto mul1 = ov::test::utils::makeEltwise(params[1], params[2], EltwiseTypes::MULTIPLY);
auto add = ov::test::utils::makeEltwise(tanh, mul1, EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(params[1], params[2], EltwiseTypes::MULTIPLY);
auto add = ov::test::utils::make_eltwise(tanh, mul1, EltwiseTypes::ADD);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(add)};
function = std::make_shared<ov::Model>(results, params, "MulAdd_EwSimple");
@ -73,8 +73,8 @@ void FuseMulAddAndEwSimpleTest2::CreateGraph() {
auto tanh1 = ov::test::utils::make_activation(clamp1, inPrec, ActivationTypes::Tanh);
auto clamp2 = ov::test::utils::make_activation(params[1], inPrec, ActivationTypes::Clamp, inputShape, {0, 100});
auto tanh2 = ov::test::utils::make_activation(clamp2, inPrec, ActivationTypes::Tanh);
auto mul1 = ov::test::utils::makeEltwise(tanh2, tanh1, EltwiseTypes::MULTIPLY);
auto add = ov::test::utils::makeEltwise(mul1, params[2], EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(tanh2, tanh1, EltwiseTypes::MULTIPLY);
auto add = ov::test::utils::make_eltwise(mul1, params[2], EltwiseTypes::ADD);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(add)};
function = std::make_shared<ov::Model>(results, params, "MulAdd_EwSimple_2");
@ -96,11 +96,11 @@ void FuseMulAddAndEwSimpleTest3::CreateGraph() {
params.push_back(std::make_shared<ov::op::v0::Parameter>(inPrec, shape));
}
auto mul1 = ov::test::utils::makeEltwise(params[0], params[1], EltwiseTypes::MULTIPLY);
auto add1 = ov::test::utils::makeEltwise(mul1, params[2], EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(params[0], params[1], EltwiseTypes::MULTIPLY);
auto add1 = ov::test::utils::make_eltwise(mul1, params[2], EltwiseTypes::ADD);
auto tanh1 = ov::test::utils::make_activation(add1, inPrec, ActivationTypes::Tanh);
auto mul2 = ov::test::utils::makeEltwise(tanh1, params[3], EltwiseTypes::MULTIPLY);
auto add2 = ov::test::utils::makeEltwise(params[4], mul2, EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(tanh1, params[3], EltwiseTypes::MULTIPLY);
auto add2 = ov::test::utils::make_eltwise(params[4], mul2, EltwiseTypes::ADD);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(add2)};
function = std::make_shared<ov::Model>(results, params, "MulAdd_EwSimple_3");

View File

@ -49,9 +49,9 @@ public:
auto axis_node = ov::op::v0::Constant::create(ov::element::i32, {}, {0});
const auto cumsum = std::make_shared<ov::op::v0::CumSum>(cumsum_tensor, axis_node);
auto eltwiseMul = ov::test::utils::makeEltwise(inputParams[0], cumsum, ov::test::utils::EltwiseTypes::MULTIPLY);
auto eltwiseAdd1 = ov::test::utils::makeEltwise(inputParams[1], cumsum, ov::test::utils::EltwiseTypes::ADD);
auto eltwiseAdd2 = ov::test::utils::makeEltwise(eltwiseAdd1, eltwiseMul, ov::test::utils::EltwiseTypes::ADD);
auto eltwiseMul = ov::test::utils::make_eltwise(inputParams[0], cumsum, ov::test::utils::EltwiseTypes::MULTIPLY);
auto eltwiseAdd1 = ov::test::utils::make_eltwise(inputParams[1], cumsum, ov::test::utils::EltwiseTypes::ADD);
auto eltwiseAdd2 = ov::test::utils::make_eltwise(eltwiseAdd1, eltwiseMul, ov::test::utils::EltwiseTypes::ADD);
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwiseAdd2)};
function = std::make_shared<ov::Model>(results, inputParams, "NonInputInPlaceT");

View File

@ -31,7 +31,7 @@ protected:
auto tensor = ov::test::utils::create_and_fill_tensor(netPrecision, inputShape);
auto secondaryInput = std::make_shared<ov::op::v0::Constant>(tensor);
auto eltwise = ov::test::utils::makeEltwise(input[0], secondaryInput, eltwiseType);
auto eltwise = ov::test::utils::make_eltwise(input[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(netPrecision, input, eltwise, "Eltwise");
}

View File

@ -40,9 +40,9 @@ protected:
numOutChannels);
}
const auto sharedNode = ngraph::builder::makeConstant(element::f32, {1, 16, 1, 1}, std::vector<float>{}, true);
const auto postOpCandidate = ov::test::utils::makeEltwise(conv, sharedNode, utils::EltwiseTypes::ADD);
const auto postOpCandidate = ov::test::utils::make_eltwise(conv, sharedNode, utils::EltwiseTypes::ADD);
const auto secondConsumpt = ov::test::utils::makeEltwise(inputParams[1], sharedNode, utils::EltwiseTypes::ADD);
const auto secondConsumpt = ov::test::utils::make_eltwise(inputParams[1], sharedNode, utils::EltwiseTypes::ADD);
NodeVector results{postOpCandidate, secondConsumpt};
function = std::make_shared<ov::Model>(results, inputParams, "NotFusedConvSimpleOp");

View File

@ -49,14 +49,14 @@ protected:
auto split = std::make_shared<ov::op::v1::Split>(params.front(), split_axis_op, 3);
auto add_const = ngraph::builder::makeConstant(precision, {1}, std::vector<float>({1.0f}));
auto add_1 = utils::makeEltwise(split->output(0), add_const, utils::EltwiseTypes::ADD);
auto add_1 = utils::make_eltwise(split->output(0), add_const, utils::EltwiseTypes::ADD);
auto result_add_1 = std::make_shared<ov::op::v0::Result>(add_1);
auto add_2 = utils::makeEltwise(split->output(1), add_const, utils::EltwiseTypes::ADD);
auto add_3 = utils::makeEltwise(split->output(2), add_const, utils::EltwiseTypes::ADD);
auto add_2 = utils::make_eltwise(split->output(1), add_const, utils::EltwiseTypes::ADD);
auto add_3 = utils::make_eltwise(split->output(2), add_const, utils::EltwiseTypes::ADD);
auto concat = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{add_1, add_2, add_3}, 1);
auto result_concat = std::make_shared<ov::op::v0::Result>(concat);
auto add_4 = utils::makeEltwise(concat, add_const, utils::EltwiseTypes::ADD);
auto add_5 = utils::makeEltwise(concat, add_const, utils::EltwiseTypes::ADD);
auto add_4 = utils::make_eltwise(concat, add_const, utils::EltwiseTypes::ADD);
auto add_5 = utils::make_eltwise(concat, add_const, utils::EltwiseTypes::ADD);
auto result_1 = std::make_shared<ov::op::v0::Result>(add_4);
auto result_2 = std::make_shared<ov::op::v0::Result>(add_5);
ov::ResultVector results = {result_1, result_2, result_add_1, result_concat};

View File

@ -107,7 +107,9 @@ protected:
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ElementType::f32, inShapeA)};
std::shared_ptr<Node> inputB = ngraph::builder::makeConstant<float>(ElementType::f32, inShapeB.get_shape(), {}, true);
auto split = ngraph::builder::makeVariadicSplit(params[0], {1, 1}, 0);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{0});
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{1, 1});
auto split = std::make_shared<ov::op::v1::VariadicSplit>(params[0], split_axis_op, num_split);
auto matMul = std::make_shared<ov::op::v0::MatMul>(split->output(0), inputB, transpA, transpB);

View File

@ -25,8 +25,8 @@ protected:
const auto const1 = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, 6, 1, 1}, std::vector<float>{}, true);
const auto const2 = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, 6, 1, 1}, std::vector<float>{}, true);
const auto add1 = utils::makeEltwise(tile->output(0), const1, utils::EltwiseTypes::ADD);
const auto add2 = utils::makeEltwise(tile->output(0), const2, utils::EltwiseTypes::ADD);
const auto add1 = utils::make_eltwise(tile->output(0), const1, utils::EltwiseTypes::ADD);
const auto add2 = utils::make_eltwise(tile->output(0), const2, utils::EltwiseTypes::ADD);
NodeVector results{add1, add2};
function = std::make_shared<ov::Model>(results, inputParams, "TileWithTwoOutputEdges");

View File

@ -9,6 +9,7 @@
#include <vector>
#include "base/import_export_base.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "ov_models/builders.hpp"
namespace LayerTestsDefinitions {
@ -24,7 +25,7 @@ protected:
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape)),
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ov::op::v0::Result>(mul1);
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "multiple_input");

View File

@ -9,6 +9,7 @@
#include <vector>
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "functional_test_utils/blob_utils.hpp"
#include "functional_test_utils/plugin_cache.hpp"
#include "ov_models/builders.hpp"
@ -61,7 +62,7 @@ protected:
auto reshape1 = std::make_shared<ov::opset1::Reshape>(params[0], pattern1, false);
auto constant1 = ngraph::builder::makeConstant<float>(ngPrc, {1, 1, 1, 12}, {}, true);
auto eltwise = ngraph::builder::makeEltwise(reshape1, constant1, eltwiseType);
auto eltwise = ov::test::utils::make_eltwise(reshape1, constant1, eltwiseType);
std::vector<size_t> outFormShapes2 = {1, 72};
auto pattern2 =
@ -111,7 +112,7 @@ protected:
auto reshape2 = std::make_shared<ov::opset1::Reshape>(params[1], pattern1, false);
auto eltwise = ngraph::builder::makeEltwise(reshape1, reshape2, eltwiseType);
auto eltwise = ov::test::utils::make_eltwise(reshape1, reshape2, eltwiseType);
std::vector<size_t> outFormShapes2 = {1, 72};
auto pattern2 =

View File

@ -11,6 +11,7 @@
#include <vector>
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "functional_test_utils/blob_utils.hpp"
#include "functional_test_utils/plugin_cache.hpp"
#include "openvino/opsets/opset7.hpp"
@ -84,7 +85,7 @@ protected:
auto matmul = std::make_shared<ov::op::v0::MatMul>(params[0], weightsNode, false, true);
auto bias = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, batch, 1}, std::vector<float>{1.0f});
auto add = ngraph::builder::makeEltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
auto pattern = std::make_shared<ov::op::v0::Constant>(ngraph::element::Type_t::i64,
ngraph::Shape{inputShape.size()},
@ -172,7 +173,7 @@ protected:
auto matmul = std::make_shared<ov::op::v0::MatMul>(inputFQ, weightsFQNode, false, true);
auto bias = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{1, 1, 1}, std::vector<float>{1.0f});
auto add = ngraph::builder::makeEltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
auto outputLowNode = ngraph::builder::makeConstant(ngPrc,
std::vector<size_t>{1},

View File

@ -8,6 +8,7 @@
#include <tuple>
#include <vector>
#include "common_test_utils/node_builders/eltwise.hpp"
#include "functional_test_utils/blob_utils.hpp"
#include "ov_models/builders.hpp"
#include "ov_models/utils/ov_helpers.hpp"
@ -52,7 +53,7 @@ protected:
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto const_mult2 = ngraph::builder::makeConstant<float>(ngPrc, inputShape, {-1.0f});
auto mul = ngraph::builder::makeEltwise(params[0], const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(params[0], const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
function = std::make_shared<ngraph::Function>(mul, params, "EltwiseSplitOverChannelsPassTest");
}
};

View File

@ -4,6 +4,7 @@
#include <ie_core.hpp>
#include "common_test_utils/node_builders/eltwise.hpp"
#include "common_test_utils/test_constants.hpp"
#include "ov_models/builders.hpp"
#include "shared_test_classes/base/layer_test_utils.hpp"
@ -34,9 +35,9 @@ protected:
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, inputSize})};
auto constant = ngraph::builder::makeConstant(ngPrc, {1, inputSize}, std::vector<size_t>{1});
auto mul1 = ngraph::builder::makeEltwise(input[0], constant, ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], constant, ngraph::helpers::EltwiseTypes::ADD);
auto sigmoid1 = std::make_shared<ov::opset1::Sigmoid>(mul1);
auto mul2 = ngraph::builder::makeEltwise(input[0], sigmoid1, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul2 = ov::test::utils::make_eltwise(input[0], sigmoid1, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto fake3 = ngraph::builder::makeFakeQuantize(sigmoid1,
ngPrc,
levelFq,
@ -45,7 +46,7 @@ protected:
{minMaxFq.second},
{minMaxFq.first},
{minMaxFq.second});
auto mul3 = ngraph::builder::makeEltwise(mul2, fake3, ngraph::helpers::EltwiseTypes::ADD);
auto mul3 = ov::test::utils::make_eltwise(mul2, fake3, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ov::op::v0::Result>(mul3);
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "fq_fusion_with_sigmoid");
}

View File

@ -9,6 +9,7 @@
#include <vector>
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "functional_test_utils/blob_utils.hpp"
#include "functional_test_utils/plugin_cache.hpp"
#include "ov_models/builders.hpp"
@ -69,7 +70,7 @@ protected:
auto constant =
ngraph::builder::makeConstant<float>(ngPrc, shape, {}, true, constRange.second, constRange.first);
auto eltwise = ngraph::builder::makeEltwise(constant, params[0], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(constant, params[0], ngraph::helpers::EltwiseTypes::ADD);
ngraph::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwise)};
function = std::make_shared<ngraph::Function>(results, params, "InputConstAdd");

View File

@ -8,6 +8,7 @@
#include <ngraph/pass/manager.hpp>
#include <transformations/init_node_info.hpp>
#include "common_test_utils/node_builders/eltwise.hpp"
#include "common_test_utils/ov_test_utils.hpp"
#include "openvino/opsets/opset8.hpp"
#include "ov_models/builders.hpp"
@ -210,8 +211,8 @@ public:
auto convert1 = std::make_shared<ov::op::v0::Convert>(input[0], net_precision_);
auto convert2 = std::make_shared<ov::op::v0::Convert>(input[1], net_precision_);
auto convert3 = std::make_shared<ov::op::v0::Convert>(input[2], net_precision_);
auto mul1 = ngraph::builder::makeEltwise(convert1, convert2, ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(convert3, mul1, ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(convert1, convert2, ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(convert3, mul1, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ov::op::v0::Result>(mul2);
func_ = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "multiple_input");
}
@ -221,8 +222,8 @@ public:
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape)};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(input[2], mul1, ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(input[2], mul1, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ov::op::v0::Result>(mul2);
ref_func_no_convert_ =
std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "multiple_input");
@ -244,8 +245,8 @@ public:
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape)};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto convert1 = std::make_shared<ov::op::v0::Convert>(mul1, target_precision_);
auto convert2 = std::make_shared<ov::op::v0::Convert>(mul2, target_precision_);
auto result1 = std::make_shared<ov::op::v0::Result>(convert1);
@ -261,8 +262,8 @@ public:
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape)};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto result1 = std::make_shared<ov::op::v0::Result>(mul1);
auto result2 = std::make_shared<ov::op::v0::Result>(mul2);
@ -286,9 +287,9 @@ public:
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape)};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul3 = ngraph::builder::makeEltwise(mul1, mul2, ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul3 = ov::test::utils::make_eltwise(mul1, mul2, ngraph::helpers::EltwiseTypes::ADD);
auto convert1 = std::make_shared<ov::op::v0::Convert>(mul1, target_precision_);
auto convert2 = std::make_shared<ov::op::v0::Convert>(mul2, target_precision_);
auto convert3 = std::make_shared<ov::op::v0::Convert>(mul3, target_precision_);
@ -307,9 +308,9 @@ public:
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape),
std::make_shared<ov::op::v0::Parameter>(net_precision_, input_shape)};
auto mul1 = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ngraph::builder::makeEltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul3 = ngraph::builder::makeEltwise(mul1, mul2, ngraph::helpers::EltwiseTypes::ADD);
auto mul1 = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto mul2 = ov::test::utils::make_eltwise(input[2], input[3], ngraph::helpers::EltwiseTypes::ADD);
auto mul3 = ov::test::utils::make_eltwise(mul1, mul2, ngraph::helpers::EltwiseTypes::ADD);
auto result1 = std::make_shared<ov::op::v0::Result>(mul1);
auto result2 = std::make_shared<ov::op::v0::Result>(mul2);
auto result3 = std::make_shared<ov::op::v0::Result>(mul3);

View File

@ -84,7 +84,7 @@ protected:
auto variadicSplitOp = std::make_shared<ov::op::v1::VariadicSplit>(params[0], axis, split_sizes);
variadicSplitOp->set_friendly_name("variadicSplit");
auto addOp = ov::test::utils::makeEltwise(params[1], variadicSplitOp->output(1), ov::test::utils::EltwiseTypes::ADD);
auto addOp = ov::test::utils::make_eltwise(params[1], variadicSplitOp->output(1), ov::test::utils::EltwiseTypes::ADD);
addOp->set_friendly_name("add");
ov::ResultVector results = {std::make_shared<ov::op::v0::Result>(addOp)};

View File

@ -83,7 +83,7 @@ protected:
for (auto&& shape : {inShapeShapeOf, inShapeElt})
params.push_back(std::make_shared<ov::op::v0::Parameter>(netType, shape));
auto addOp1 = ov::test::utils::makeEltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
auto addOp1 = ov::test::utils::make_eltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
addOp1->set_friendly_name("add1");
auto shapeOfOp1 = std::make_shared<ov::op::v3::ShapeOf>(addOp1, ov::element::i64);
@ -104,7 +104,7 @@ protected:
auto reshapeOp1 = std::make_shared<ov::op::v1::Reshape>(addOp1, concatOp1, false);
reshapeOp1->set_friendly_name("reshapeOp1");
auto addOp2 = ov::test::utils::makeEltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
auto addOp2 = ov::test::utils::make_eltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
addOp2->set_friendly_name("add2");
auto shapeOfOp2 = std::make_shared<ov::op::v3::ShapeOf>(addOp2, ov::element::i64);
@ -122,7 +122,7 @@ protected:
auto reshapeOp2 = std::make_shared<ov::op::v1::Reshape>(addOp2, concatOp2, false);
reshapeOp2->set_friendly_name("reshapeOp2");
auto addOp3 = ov::test::utils::makeEltwise(reshapeOp1, reshapeOp2, ov::test::utils::EltwiseTypes::ADD);
auto addOp3 = ov::test::utils::make_eltwise(reshapeOp1, reshapeOp2, ov::test::utils::EltwiseTypes::ADD);
addOp3->set_friendly_name("add3");
auto shapeOf3 = std::make_shared<ov::op::v3::ShapeOf>(addOp3, ov::element::i64);

View File

@ -80,7 +80,7 @@ protected:
for (auto&& shape : inputDynamicShapes)
params.push_back(std::make_shared<ov::op::v0::Parameter>(model_type, shape));
auto addOp = ov::test::utils::makeEltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
auto addOp = ov::test::utils::make_eltwise(params[1], params[1], ov::test::utils::EltwiseTypes::ADD);
addOp->set_friendly_name("add");
auto shapeOfOp1 = std::make_shared<ov::op::v3::ShapeOf>(params[0], ov::element::i64);

View File

@ -4,10 +4,10 @@
#include <vector>
#include "common_test_utils/node_builders/eltwise.hpp"
#include "behavior/ov_infer_request/infer_request_dynamic.hpp"
#include "ov_api_conformance_helpers.hpp"
namespace {
using namespace ov::test::behavior;
using namespace ov::test::conformance;
@ -21,7 +21,7 @@ std::shared_ptr<ov::Model> ovGetFunction1() {
params.front()->get_output_tensor(0).set_names({"input_tensor"});
auto in2add = ngraph::builder::makeConstant(ngPrc, {1, 4, 1, 1}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(params[0], in2add, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(params[0], in2add, ov::test::utils::EltwiseTypes::ADD);
auto relu1 = std::make_shared<ov::op::v0::Relu>(add->output(0));
relu1->get_output_tensor(0).set_names({"relu1"});
auto relu2 = std::make_shared<ov::op::v0::Relu>(add->output(0));
@ -42,11 +42,11 @@ std::shared_ptr<ov::Model> ovGetFunction2() {
auto split = std::make_shared<ov::op::v1::Split>(params[0], splitAxisOp, 2);
auto in2add = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto add = ngraph::builder::makeEltwise(split->output(0), in2add, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(split->output(0), in2add, ov::test::utils::EltwiseTypes::ADD);
auto relu1 = std::make_shared<ov::op::v0::Relu>(add);
auto in2mult = ngraph::builder::makeConstant(ngPrc, {1, 2, 1, 1}, std::vector<float>{}, true);
auto mult = ngraph::builder::makeEltwise(split->output(1), in2mult, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mult = ov::test::utils::make_eltwise(split->output(1), in2mult, ov::test::utils::EltwiseTypes::MULTIPLY);
auto relu2 = std::make_shared<ov::op::v0::Relu>(mult);
auto concat = std::make_shared<ov::op::v0::Concat>(ov::OutputVector{relu1->output(0), relu2->output(0)}, 3);

View File

@ -24,6 +24,7 @@
#include "openvino/runtime/infer_request.hpp"
#include "openvino/runtime/tensor.hpp"
#include "behavior/ov_infer_request/inference_chaining.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace ov {
namespace test {
@ -44,8 +45,8 @@ std::shared_ptr<ov::Model> OVInferenceChaining::getFirstStaticFunction(const ov:
params[1]->set_friendly_name("param_1");
params[2]->get_output_tensor(0).set_names({"input_tensor_2"});
params[2]->set_friendly_name("param_2");
auto eltwise = ngraph::builder::makeEltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise2 = ngraph::builder::makeEltwise(eltwise, params[2], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise2 = ov::test::utils::make_eltwise(eltwise, params[2], ngraph::helpers::EltwiseTypes::ADD);
eltwise2->get_output_tensor(0).set_names({"result_tensor_0"});
eltwise2->set_friendly_name("result_0");
@ -61,7 +62,7 @@ std::shared_ptr<ov::Model> OVInferenceChaining::getSecondStaticFunction(const ov
params[0]->set_friendly_name("param_0");
params[1]->get_output_tensor(0).set_names({"input_tensor_1"});
params[1]->set_friendly_name("param_1");
auto eltwise = ngraph::builder::makeEltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::MULTIPLY);
auto eltwise = ov::test::utils::make_eltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::MULTIPLY);
eltwise->get_output_tensor(0).set_names({"result_tensor_0"});
eltwise->set_friendly_name("result_0");
@ -81,9 +82,9 @@ std::shared_ptr<ov::Model> OVInferenceChaining::getThirdStaticFunction(const ov:
params[2]->set_friendly_name("param_2");
params[3]->get_output_tensor(0).set_names({"input_tensor_3"});
params[3]->set_friendly_name("param_3");
auto eltwise = ngraph::builder::makeEltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise2 = ngraph::builder::makeEltwise(eltwise, params[2], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise3 = ngraph::builder::makeEltwise(eltwise2, params[3], ngraph::helpers::EltwiseTypes::MULTIPLY);
auto eltwise = ov::test::utils::make_eltwise(params[0], params[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise2 = ov::test::utils::make_eltwise(eltwise, params[2], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise3 = ov::test::utils::make_eltwise(eltwise2, params[3], ngraph::helpers::EltwiseTypes::MULTIPLY);
eltwise3->get_output_tensor(0).set_names({"result_tensor_0"});
eltwise3->set_friendly_name("result_0");

View File

@ -24,6 +24,7 @@
#include "openvino/runtime/infer_request.hpp"
#include "openvino/runtime/tensor.hpp"
#include "behavior/ov_infer_request/iteration_chaining.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace ov {
namespace test {
@ -40,7 +41,7 @@ std::shared_ptr<ov::Model> OVIterationChaining::getIterativeFunction() {
auto concat_const = ngraph::builder::makeConstant(element::Type_t::f32, {1, 16}, std::vector<float>{}, true);
auto concat = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{params, concat_const}, 0 /*axis*/);
auto eltwise_const = ngraph::builder::makeConstant(element::Type_t::f32, {1, 16}, std::vector<float>{}, true);
auto eltwise = ngraph::builder::makeEltwise(concat, eltwise_const, ngraph::helpers::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(concat, eltwise_const, ngraph::helpers::EltwiseTypes::ADD);
concat->get_output_tensor(0).set_names({"result_tensor_0"});
concat->set_friendly_name("result_0");
eltwise->get_output_tensor(0).set_names({"result_tensor_1"});

View File

@ -12,6 +12,8 @@
#include "functional_test_utils/plugin_cache.hpp"
#include "shared_test_classes/base/layer_test_utils.hpp"
#include "common_test_utils/common_utils.hpp"
#include "common_test_utils/node_builders/group_convolution.hpp"
#include "common_test_utils/node_builders/binary_convolution.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "execution_graph_tests/num_inputs_fusing_bin_conv.hpp"
@ -33,10 +35,10 @@ void ExecGraphInputsFusingBinConv::SetUp() {
targetDevice = this->GetParam();
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngraph::element::f32, ov::Shape(inputShapes))};
auto binConv = ngraph::builder::makeBinaryConvolution(params[0], binConvKernelSize, strides, padsBegin, padsEnd, dilations, paddingType, numOutChannels,
auto binConv = ov::test::utils::make_binary_convolution(params[0], binConvKernelSize, strides, padsBegin, padsEnd, dilations, paddingType, numOutChannels,
padValue);
auto conv = ngraph::builder::makeGroupConvolution(binConv, ngraph::element::f32, convKernelSize, strides, padsBegin, padsEnd, dilations, paddingType,
numOutChannels, numGroups);
auto conv = ov::test::utils::make_group_convolution(binConv, ngraph::element::f32, convKernelSize, strides, padsBegin, padsEnd, dilations, paddingType,
numOutChannels, numGroups);
auto biasNode = std::make_shared<ov::op::v0::Constant>(ngraph::element::f32, std::vector<size_t>{16, 1, 1});
auto add = std::make_shared<ov::op::v1::Add>(conv, biasNode);

View File

@ -18,6 +18,8 @@
#include "shared_test_classes/base/layer_test_utils.hpp"
#include "functional_test_utils/blob_utils.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "common_test_utils/node_builders/binary_convolution.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "execution_graph_tests/runtime_precision.hpp"
@ -31,7 +33,7 @@ std::shared_ptr<ngraph::Function> makeEltwiseFunction(const std::vector<Inferenc
std::make_shared<ov::op::v0::Parameter>(FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(inputPrecisions[1]),
ov::Shape{1, 16, 5, 4})};
auto eltwise = ngraph::builder::makeEltwise(inputs[0], inputs[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(inputs[0], inputs[1], ngraph::helpers::EltwiseTypes::ADD);
eltwise->set_friendly_name("Eltwise");
auto function = std::make_shared<ngraph::Function>(eltwise, inputs, "EltwiseWithTwoDynamicInputs");
@ -69,7 +71,7 @@ std::shared_ptr<ngraph::Function> makeFakeQuantizeBinaryConvolutionFunction(cons
auto fakeQuantize = std::make_shared<ov::op::v0::FakeQuantize>(inputs[0], inputLowNode, inputHighNode, outputLowNode, outputHighNode, 2);
fakeQuantize->set_friendly_name("FakeQuantize");
auto binConv = ngraph::builder::makeBinaryConvolution(fakeQuantize, {3, 3}, {1, 1}, {1, 1}, {1, 1}, {1, 1}, ov::op::PadType::EXPLICIT, 32, 0);
auto binConv = ov::test::utils::make_binary_convolution(fakeQuantize, {3, 3}, {1, 1}, {1, 1}, {1, 1}, {1, 1}, ov::op::PadType::EXPLICIT, 32, 0);
binConv->set_friendly_name("BinaryConvolution");
auto function = std::make_shared<ngraph::Function>(binConv, inputs, "FakeQuantizeBinaryConvolution");

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/single_layer/activation.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace LayerTestsDefinitions {
@ -48,7 +49,7 @@ void ActivationLayerTest::SetUp() {
std::iota(constantsValue.begin(), constantsValue.end(), -10);
}
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue);
auto activation = ov::test::utils::make_activation(params[0], ngPrc, activationType, shapes.second, constantsValue);
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params);
}
@ -217,7 +218,7 @@ void ActivationParamLayerTest::SetUp() {
params[0]->set_friendly_name("Input");
params.insert(params.end(), activationParams.begin(), activationParams.end());
auto activation = ngraph::builder::makeActivation(params, ngPrc, activationType);
auto activation = ov::test::utils::make_activation(params, ngPrc, activationType);
ngraph::ResultVector results{std::make_shared<ov::op::v0::Result>(activation)};
function = std::make_shared<ngraph::Function>(results, params);
}

View File

@ -5,6 +5,7 @@
#include "ov_models/builders.hpp"
#include <common_test_utils/ov_tensor_utils.hpp>
#include "shared_test_classes/single_layer/eltwise.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
#include "functional_test_utils/plugin_cache.hpp"
@ -124,7 +125,7 @@ void EltwiseLayerTest::SetUp() {
parameters[0]->set_friendly_name("param0");
secondaryInput->set_friendly_name("param1");
auto eltwise = ngraph::builder::makeEltwise(parameters[0], secondaryInput, eltwiseType);
auto eltwise = ov::test::utils::make_eltwise(parameters[0], secondaryInput, eltwiseType);
function = std::make_shared<ngraph::Function>(eltwise, parameters, "Eltwise");
}

View File

@ -36,8 +36,11 @@ namespace LayerTestsDefinitions {
std::tie(numSplits, axis, netPrecision, inPrc, outPrc, inLayout, outLayout, inputShape, targetDevice) = this->GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto VariadicSplit = std::dynamic_pointer_cast<ov::op::v1::VariadicSplit>(ngraph::builder::makeVariadicSplit(params[0], numSplits,
axis));
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, axis);
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{numSplits.size()}, numSplits);
auto VariadicSplit = std::make_shared<ov::op::v1::VariadicSplit>(params[0], split_axis_op, num_split);
ngraph::ResultVector results;
for (int i = 0; i < numSplits.size(); i++) {
results.push_back(std::make_shared<ov::op::v0::Result>(VariadicSplit->output(i)));

View File

@ -135,7 +135,7 @@ void EltwiseLayerTest::SetUp() {
parameters[0]->set_friendly_name("param0");
secondary_input->set_friendly_name("param1");
auto eltwise = ov::test::utils::makeEltwise(parameters[0], secondary_input, eltwise_type);
auto eltwise = ov::test::utils::make_eltwise(parameters[0], secondary_input, eltwise_type);
function = std::make_shared<ov::Model>(eltwise, parameters, "Eltwise");
}
} // namespace test

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/single_op/embedding_bag_packed_sum.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/embedding_bag_packed_sum.hpp"
namespace ov {
namespace test {
@ -50,7 +51,7 @@ void EmbeddingBagPackedSumLayerTest::SetUp() {
auto param = std::make_shared<ov::op::v0::Parameter>(model_type, inputDynamicShapes.front());
auto embBag = ngraph::builder::makeEmbeddingBagPackedSum(model_type, ind_type, param, indices, with_weights);
auto embBag = ov::test::utils::make_embedding_bag_packed_sum(model_type, ind_type, param, indices, with_weights);
auto result = std::make_shared<ov::op::v0::Result>(embBag);
function = std::make_shared<ov::Model>(result, ov::ParameterVector{param}, "embeddingBagPackedSum");

View File

@ -4,6 +4,8 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/activation_concats_eltwise.hpp"
#include "common_test_utils/node_builders/activation.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -40,7 +42,7 @@ void ActivationConcatsEltwise::SetUp() {
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, inputSize})};
auto relu = ngraph::builder::makeActivation(input[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto relu = ov::test::utils::make_activation(input[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto concat_vals_1 = ov::test::utils::generate_float_numbers(concatSize, 14, 14);
auto concat_vals_2 = ov::test::utils::generate_float_numbers(concatSize, 14, 14);
@ -50,7 +52,7 @@ void ActivationConcatsEltwise::SetUp() {
auto concat_1 = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{concat_const_1, relu}, 1);
auto concat_2 = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{concat_const_2, relu}, 1);
auto eltw = ngraph::builder::makeEltwise(concat_1, concat_2, ngraph::helpers::EltwiseTypes::ADD);
auto eltw = ov::test::utils::make_eltwise(concat_1, concat_2, ngraph::helpers::EltwiseTypes::ADD);
auto reshape_pattern = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64, ngraph::Shape{2}, std::vector<size_t>({1, inputSize + concatSize}));
auto final_reshape = std::make_shared<ov::op::v1::Reshape>(eltw, reshape_pattern, false);

View File

@ -4,6 +4,7 @@
#include <ov_models/builders.hpp>
#include "shared_test_classes/subgraph/activation_fq.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace SubgraphTestsDefinitions {
@ -64,7 +65,7 @@ namespace SubgraphTestsDefinitions {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto act = ngraph::builder::makeActivation(params[0], ngPrc, activationType);
auto act = ov::test::utils::make_activation(params[0], ngPrc, activationType);
auto FQNode = ngraph::builder::makeFakeQuantize(act, ngraph::element::f32, levels[0], constShape[0],
{ inputDataMin }, { inputDataMax }, { inputDataMin }, { inputDataMax });

View File

@ -4,6 +4,8 @@
#include "shared_test_classes/subgraph/broadcast_power.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
std::string BroadcastPowerTest::getTestCaseName(const testing::TestParamInfo<BroadCastPowerTuple>& obj) {
InferenceEngine::Precision netPrecision;
@ -34,7 +36,7 @@ void BroadcastPowerTest::SetUp() {
auto reshape = std::make_shared<ov::op::v1::Reshape>(params[0], reshape_pattern, false);
auto const_mult2 = ngraph::builder::makeConstant<float>(ngPrc, {}, {-1.0f});
auto sum = ngraph::builder::makeEltwise(reshape, const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto sum = ov::test::utils::make_eltwise(reshape, const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto reshape_pattern_2 = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64, ngraph::Shape{inputs_shapes[0].size()},
inputs_shapes[0]);

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/concat_conv.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -74,7 +75,7 @@ void ConcatConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/concat_multi_input.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace SubgraphTestsDefinitions {
@ -92,7 +93,7 @@ void ConcatMultiInput::GenerateConstOnlyModel() {
}
if (i == 0) {
input_vector = ov::ParameterVector{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, total_size})};
auto relu = ngraph::builder::makeActivation(input_vector[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto relu = ov::test::utils::make_activation(input_vector[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
concatInputs.push_back(relu);
} else {
auto min_max = (i % 2 == 0) ? 2 : 30;

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/concat_quantization_during_memory_requantization.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
std::string ConcatQuantDuringMemoryRequantTest::getTestCaseName(const testing::TestParamInfo<ConcatQuantDuringMemoryRequantTuple>& obj) {
@ -44,11 +45,13 @@ namespace SubgraphTestsDefinitions {
auto concat_1 = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ mem_1_read, input[0] }, 1);
// Revert concat names to set the needed order of scale factors calculation
concat_1->set_friendly_name("concat2");
auto split_1 = ngraph::builder::makeVariadicSplit(concat_1, { inputSize, hiddenSize }, 1);
auto split_axis_op_1 = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split_1 = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{inputSize, hiddenSize});
auto split_1 = std::make_shared<ov::op::v1::VariadicSplit>(concat_1, split_axis_op_1, num_split_1);
auto mul_const = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize },
ov::test::utils::generate_float_numbers(hiddenSize, -0.2f, 0.0f));
auto mul = ngraph::builder::makeEltwise(split_1->output(1), mul_const, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(split_1->output(1), mul_const, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mem_1_write = std::make_shared<ov::op::v3::Assign>(mul, "memory_1");
auto mem_2_const = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize }, memory_2_init);
@ -57,8 +60,8 @@ namespace SubgraphTestsDefinitions {
auto concat_2 = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ mem_2_read, mul }, 1);
// Revert concat names to set the needed order of scale factors calculation
concat_2->set_friendly_name("concat1");
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::Type_t::i64, ov::Shape{}, std::vector<int64_t>{1});
auto split_2 = std::make_shared<ov::op::v1::Split>(concat_2, split_axis_op, 2);
auto split_axis_op_2 = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto split_2 = std::make_shared<ov::op::v1::Split>(concat_2, split_axis_op_2, 2);
auto mem_2_write = std::make_shared<ov::op::v3::Assign>(split_2->output(0), "memory_2");
auto sigm = std::make_shared<ov::op::v0::Sigmoid>(split_2->output(1));
@ -87,16 +90,19 @@ namespace SubgraphTestsDefinitions {
auto mem_1_const = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize }, memory_1_init);
auto concat_1 = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ mem_1_const, input[0] }, 1);
auto split_1 = ngraph::builder::makeVariadicSplit(concat_1, { inputSize, hiddenSize }, 1);
auto split_axis_op_1 = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split_1 = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{inputSize, hiddenSize});
auto split_1 = std::make_shared<ov::op::v1::VariadicSplit>(concat_1, split_axis_op_1, num_split_1);
auto mul_const = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize },
ov::test::utils::generate_float_numbers(hiddenSize, -0.2f, 0.0f));
auto mul = ngraph::builder::makeEltwise(split_1->output(1), mul_const, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(split_1->output(1), mul_const, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mem_2_const = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize }, memory_2_init);
auto concat_2 = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ mem_2_const, mul }, 1);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::Type_t::i64, ov::Shape{}, std::vector<int64_t>{1});
auto split_2 = std::make_shared<ov::op::v1::Split>(concat_2, split_axis_op, 2);
auto split_axis_op_2 = std::make_shared<ov::op::v0::Constant>(ov::element::Type_t::i64, ov::Shape{}, std::vector<int64_t>{1});
auto split_2 = std::make_shared<ov::op::v1::Split>(concat_2, split_axis_op_2, 2);
auto sigm = std::make_shared<ov::op::v0::Sigmoid>(split_2->output(1));

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/const_conv_concat.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -69,7 +70,7 @@ void ConstConvConcatTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
0.0f, 0.1f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/convolution_relu_sequence.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -70,7 +71,7 @@ void ConvolutionReluSequenceTest::SetUp() {
std::shared_ptr<ngraph::Node> conv =
std::dynamic_pointer_cast<ngraph::Node>(
ngraph::builder::makeConvolution(
ov::test::utils::make_convolution(
lastOutputs,
ngPrc, single.kernelSize, single.strides, single.padBegin, single.padEnd,
dilation, ov::op::PadType::EXPLICIT, single.numOutChannels, addBiases, filter_weights, biases));

View File

@ -77,10 +77,18 @@ namespace SubgraphTestsDefinitions {
auto mem_r = std::make_shared<ov::op::v3::ReadValue>(mem_c, "id");
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{mem_r, input[0]}, 1);
auto split = ngraph::builder::makeVariadicSplit(concat, {3 * memory_size, memory_size}, 1);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{3 * memory_size, memory_size});
auto split = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op, num_split);
auto mem_w = std::make_shared<ov::op::v3::Assign>(split->output(1), "id");
auto VariadicSplit = ngraph::builder::makeVariadicSplit(concat, {memory_size / 2, 3 * memory_size + memory_size / 2}, 1);
auto split_axis_op_variadic = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split_variadic = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2},
std::vector<size_t>{memory_size / 2, 3 * memory_size + memory_size / 2});
auto VariadicSplit = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op_variadic, num_split_variadic);
auto relu2 = std::make_shared<ov::op::v0::Sigmoid>(VariadicSplit->output(1));
mem_w->add_control_dependency(mem_r);
@ -103,9 +111,16 @@ namespace SubgraphTestsDefinitions {
auto mem_c = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{1, memory_size}, memory_init);
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{mem_c, input[0]}, 1);
auto split = ngraph::builder::makeVariadicSplit(concat, {3 * memory_size, memory_size}, 1);
auto VariadicSplit = ngraph::builder::makeVariadicSplit(concat, {memory_size / 2, 3 * memory_size + memory_size / 2}, 1);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{3 * memory_size, memory_size});
auto split = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op, num_split);
auto split_axis_op_variadic = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split_variadic = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2},
std::vector<size_t>{memory_size / 2, 3 * memory_size + memory_size / 2});
auto VariadicSplit = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op_variadic, num_split_variadic);
auto relu2 = std::make_shared<ov::op::v0::Sigmoid>(VariadicSplit->output(1));
function = std::make_shared<ngraph::Function>(relu2, input, "delayed_copy_layer_nonmemory");

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/eltwise_conv_eltwise.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -69,7 +70,7 @@ void EltwiseAfterConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -161,7 +162,7 @@ void EltwiseBeforeConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -240,7 +241,7 @@ void EltwiseWithTwoConvsAsInputsTest::SetUp() {
auto filterWeights1 = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto stride_h = kernelShape[0] > 1 ? stride : 1;
auto conv1 = ngraph::builder::makeConvolution(reshape1,
auto conv1 = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{stride_h, stride},
@ -257,7 +258,7 @@ void EltwiseWithTwoConvsAsInputsTest::SetUp() {
auto filterWeights2 = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv2 = ngraph::builder::makeConvolution(reshape3,
auto conv2 = ov::test::utils::make_convolution(reshape3,
ngPrc,
{kernelShape[0], kernelShape[1]},
{stride_h, stride},

View File

@ -3,6 +3,8 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/eltwise_reshape_activation.hpp"
#include "common_test_utils/node_builders/activation.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -37,12 +39,12 @@ void EltwiseReshapeActivation::SetUp() {
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(shapes[0])),
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(shapes[0]))};
auto eltw = ngraph::builder::makeEltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto eltw = ov::test::utils::make_eltwise(input[0], input[1], ngraph::helpers::EltwiseTypes::ADD);
auto reshape_pattern1 = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64, ngraph::Shape{shapes[1].size()}, shapes[1]);
auto reshape1 = std::make_shared<ov::op::v1::Reshape>(eltw, reshape_pattern1, false);
auto relu = ngraph::builder::makeActivation(reshape1, ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto relu = ov::test::utils::make_activation(reshape1, ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto reshape_pattern2 = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64, ngraph::Shape{shapes[0].size()}, shapes[0]);
auto reshape2 = std::make_shared<ov::op::v1::Reshape>(relu, reshape_pattern2, false);

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/fc_conv_fc.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -69,7 +70,7 @@ void FcAfterConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.1f, 0.1f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -159,7 +160,7 @@ void FcBeforeConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.1f, 0.1f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -236,7 +237,7 @@ void FcBetweenConvsTest::SetUp() {
auto filter1Weights = ov::test::utils::generate_float_numbers(outputChannels * conv1InputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv1 = ngraph::builder::makeConvolution(reshape1,
auto conv1 = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -259,7 +260,7 @@ void FcBetweenConvsTest::SetUp() {
auto filter2Weights = ov::test::utils::generate_float_numbers(outputChannels * conv2InputShape[1],
-0.2f, 0.2f);
auto conv2 = ngraph::builder::makeConvolution(reshape3, ngPrc, { 1, 1 }, { 1, 1 }, { 0, 0 },
auto conv2 = ov::test::utils::make_convolution(reshape3, ngPrc, { 1, 1 }, { 1, 1 }, { 0, 0 },
{ 0, 0 }, { 1, 1 }, ov::op::PadType::VALID, outputChannels, false, filter2Weights);
std::vector<size_t> outFormShapes2 = {1, outputChannels * conv2InputShape[3]};

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/fq_with_mixed_levels.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -42,7 +43,7 @@ void FqWithMixedLevelsTest::SetUp() {
auto fake2 = ngraph::builder::makeFakeQuantize(constant, ngPrc, level2, { 1 }, data2[0], data2[1], data2[2], data2[3]);
auto matmul = std::make_shared<ov::op::v0::MatMul>(fake1, fake2, false, true);
auto bias = ngraph::builder::makeConstant(ngPrc, std::vector<size_t>{shapes[0][0], shapes[1][0]}, std::vector<float>{ 1.0 });
auto add = ngraph::builder::makeEltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(matmul, bias, ngraph::helpers::EltwiseTypes::ADD);
return ngraph::builder::makeFakeQuantize(add, ngPrc, level3, { 1 }, data3[0], data3[1], data3[2], data3[3]);
};

View File

@ -6,6 +6,8 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/base/ov_subgraph.hpp"
#include "common_test_utils/node_builders/activation.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace ov {
namespace test {
@ -61,22 +63,22 @@ void OutputBeforeActivation::SetUp() {
std::shared_ptr<ngraph::Node> midLayer;
switch (outputType) {
case ov::test::midOutputType::Sum: {
midLayer = ngraph::builder::makeEltwise(input0, input1, ngraph::helpers::EltwiseTypes::ADD);
midLayer = ov::test::utils::make_eltwise(input0, input1, ngraph::helpers::EltwiseTypes::ADD);
break;
}
case ov::test::midOutputType::Sub: {
midLayer = ngraph::builder::makeEltwise(input0, input1, ngraph::helpers::EltwiseTypes::SUBTRACT);
midLayer = ov::test::utils::make_eltwise(input0, input1, ngraph::helpers::EltwiseTypes::SUBTRACT);
break;
}
case ov::test::midOutputType::Mul: {
midLayer = ngraph::builder::makeEltwise(input0, input1, ngraph::helpers::EltwiseTypes::MULTIPLY);
midLayer = ov::test::utils::make_eltwise(input0, input1, ngraph::helpers::EltwiseTypes::MULTIPLY);
break;
}
default:
GTEST_FAIL() << "Unknown midOutputType";
}
auto act = ngraph::builder::makeActivation(midLayer, element_type, ngraph::helpers::ActivationTypes::Tanh);
auto act = ov::test::utils::make_activation(midLayer, element_type, ngraph::helpers::ActivationTypes::Tanh);
outputs.insert(outputs.end(), {midLayer, act});
function = std::make_shared<ngraph::Function>(outputs, input_parameter, "output_before_activation");
}

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/handling_orientation_conv.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
std::string HandlingOrientationClass::getTestCaseName(const testing::TestParamInfo<HandlingOrientationParams> &obj) {
@ -35,13 +36,13 @@ namespace SubgraphTestsDefinitions {
auto permute1 = std::make_shared<ov::op::v1::Transpose>(reshape1,
ov::op::v0::Constant::create(ngraph::element::i64, ngraph::Shape{ 4 }, { 0, 3, 1, 2 }));
auto conv1 = ngraph::builder::makeConvolution(permute1, ngPrc, { 1, 8 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
auto conv1 = ov::test::utils::make_convolution(permute1, ngPrc, { 1, 8 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
ov::op::PadType::VALID, 12);
auto permute2 = std::make_shared<ov::op::v1::Transpose>(conv1,
ov::op::v0::Constant::create(ngraph::element::i64, ngraph::Shape{ 4 }, { 0, 2, 3, 1 }));
auto conv2 = ngraph::builder::makeConvolution(reshape2, ngPrc, { 1, 1 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
auto conv2 = ov::test::utils::make_convolution(reshape2, ngPrc, { 1, 1 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { 1, 1 },
ov::op::PadType::VALID, 336);
std::vector<size_t> outFormShapes3 = { 1, 1932 };

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/input_conv.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -79,7 +80,7 @@ void InputConvTest::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};
auto conv0 = ngraph::builder::makeConvolution(params[0],
auto conv0 = ov::test::utils::make_convolution(params[0],
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -62,10 +62,10 @@ void MemoryLSTMCellTest::SetUp() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);
@ -159,10 +159,10 @@ void MemoryLSTMCellTest::switch_to_friendly_model() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);
@ -215,10 +215,10 @@ void MemoryLSTMCellTest::create_pure_tensor_iterator_model() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);

View File

@ -6,6 +6,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/memory_eltwise_reshape_concat.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -58,7 +59,7 @@ void MemoryEltwiseReshapeConcatTest::initTestModel() {
auto memory_read = std::make_shared<ov::op::v3::ReadValue>(memory_constant, "memory");
memory_read->set_friendly_name("memory_read");
auto mul = ngraph::builder::makeEltwise(input_parameter[0], memory_read, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(input_parameter[0], memory_read, ngraph::helpers::EltwiseTypes::MULTIPLY);
mul->set_friendly_name("multiplication");
auto memory_write = std::make_shared<ov::op::v3::Assign>(mul, "memory");
@ -91,7 +92,7 @@ void MemoryEltwiseReshapeConcatTest::initNgraphFriendlyModel() {
auto memory_constant = ngraph::builder::makeConstant<float>(ngPrc, input_dims, memory_init);
memory_constant->set_friendly_name("memory_constant");
auto mul = ngraph::builder::makeEltwise(input_parameter[0], memory_constant, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(input_parameter[0], memory_constant, ngraph::helpers::EltwiseTypes::MULTIPLY);
mul->set_friendly_name("multiplication");
auto reshape_pattern = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64, ngraph::Shape{3}, std::vector<size_t>({1, inputSize, concatSize}));

View File

@ -4,6 +4,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/multi_input_scale.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -44,7 +45,7 @@ void MultipleInputScaleTest::SetUp() {
auto fc1 = ngraph::builder::makeFullyConnected(input[0], ngPrc, inputSize, false, {inputSize, inputSize}, fc1_weights);
auto fc2 = ngraph::builder::makeFullyConnected(input[1], ngPrc, inputSize, false, {inputSize, inputSize}, fc2_weights);
auto add = ngraph::builder::makeEltwise(fc1, fc2, ngraph::helpers::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(fc1, fc2, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ov::op::v0::Result>(add);
function = std::make_shared<ngraph::Function>(result, input, "multiple_input_scale");

View File

@ -57,10 +57,10 @@ void MultipleLSTMCellTest::SetUp() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);
@ -230,10 +230,10 @@ void MultipleLSTMCellTest::switch_to_friendly_model() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);
@ -322,10 +322,10 @@ void MultipleLSTMCellTest::create_pure_tensor_iterator_model() {
input_parameter[0]->set_friendly_name("Parameter_1");
auto input_add_const = ngraph::builder::makeConstant(element_type, input_dims, input_bias);
auto add = ov::test::utils::makeEltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto add = ov::test::utils::make_eltwise(input_parameter[0], input_add_const, ov::test::utils::EltwiseTypes::ADD);
auto input_mul_const = ngraph::builder::makeConstant(element_type, input_dims, input_weights);
auto mul = ov::test::utils::makeEltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto mul = ov::test::utils::make_eltwise(add, input_mul_const, ov::test::utils::EltwiseTypes::MULTIPLY);
auto unsqueeze_input_const = std::make_shared<ov::op::v0::Constant>(element::i64, Shape{1}, squeeze_axes);
auto unsqueeze_input = std::make_shared<ov::op::v0::Unsqueeze>(mul, unsqueeze_input_const);

View File

@ -4,6 +4,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/multiple_concat.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace SubgraphTestsDefinitions {
@ -49,7 +50,7 @@ void MultipleConcatTest::SetUp() {
auto const_2 = ngraph::builder::makeConstant(ngPrc, constant_dims, concat_1_vals);
auto concat_2 = std::make_shared<ov::op::v0::Concat>(ov::NodeVector{concat_1, const_2}, 1);
auto act = ngraph::builder::makeActivation(concat_2, ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto act = ov::test::utils::make_activation(concat_2, ngPrc, ngraph::helpers::ActivationTypes::Relu);
function = std::make_shared<ngraph::Function>(act, input_parameter, "multiple_concat");
}

View File

@ -4,6 +4,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/multiple_input_fq.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
@ -38,23 +39,23 @@ void MultipleInputTest::SetUp() {
std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, inputSize})};
auto fake1 = ngraph::builder::makeFakeQuantize(input[0], ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ minInput }, { maxInput }, { minInput }, { maxInput });
auto add1 = ngraph::builder::makeEltwise(input[0], fake1, ngraph::helpers::EltwiseTypes::ADD);
auto add1 = ov::test::utils::make_eltwise(input[0], fake1, ngraph::helpers::EltwiseTypes::ADD);
auto fake_add1 = ngraph::builder::makeFakeQuantize(add1, ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ 2 * minInput }, { 2 * maxInput }, { 2 * minInput }, { 2 * maxInput });
auto fake2 = ngraph::builder::makeFakeQuantize(input[1], ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ minInput }, { maxInput }, { minInput }, { maxInput });
auto add2 = ngraph::builder::makeEltwise(input[1], fake2, ngraph::helpers::EltwiseTypes::ADD);
auto add2 = ov::test::utils::make_eltwise(input[1], fake2, ngraph::helpers::EltwiseTypes::ADD);
auto fake_add2 = ngraph::builder::makeFakeQuantize(add2, ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ 2 * minInput }, { 2 * maxInput }, { 2 * minInput }, { 2 * maxInput });
auto add3 = ngraph::builder::makeEltwise(fake_add1, fake_add2, ngraph::helpers::EltwiseTypes::ADD);
auto add3 = ov::test::utils::make_eltwise(fake_add1, fake_add2, ngraph::helpers::EltwiseTypes::ADD);
auto fake_add3 = ngraph::builder::makeFakeQuantize(add3, ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ 4 * minInput }, { 4 * maxInput }, { 4 * minInput }, { 4 * maxInput });
auto fake3 = ngraph::builder::makeFakeQuantize(input[2], ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ minInput }, { maxInput }, { minInput }, { maxInput });
auto add4 = ngraph::builder::makeEltwise(fake3, fake_add3, ngraph::helpers::EltwiseTypes::ADD);
auto add4 = ov::test::utils::make_eltwise(fake3, fake_add3, ngraph::helpers::EltwiseTypes::ADD);
auto fake_add4 = ngraph::builder::makeFakeQuantize(add4, ngPrc, std::numeric_limits<uint16_t>::max(), { 1 },
{ 5 * minInput }, { 5 * maxInput }, { 5 * minInput }, { 5 * maxInput });

View File

@ -41,7 +41,11 @@ namespace SubgraphTestsDefinitions {
// Use memory layer as the second input of 'concat' to get negative offset
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ input[0], mem_r }, 1);
auto split = ngraph::builder::makeVariadicSplit(concat, { hiddenSize, inputSize }, 1);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{hiddenSize, inputSize});
auto split = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op, num_split);
auto mem_w = std::make_shared<ov::op::v3::Assign>(split->output(0), "memory");
auto sigm = std::make_shared<ov::op::v0::Sigmoid>(split->output(1));
@ -60,7 +64,11 @@ namespace SubgraphTestsDefinitions {
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, inputSize})};
auto mem_c = std::make_shared<ov::op::v0::Constant>(ngPrc, ngraph::Shape{ 1, hiddenSize }, memory_init);
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ input[0], mem_c }, 1);
auto split = ngraph::builder::makeVariadicSplit(concat, { hiddenSize, inputSize }, 1);
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, std::vector<int64_t>{1});
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{2}, std::vector<size_t>{hiddenSize, inputSize});
auto split = std::make_shared<ov::op::v1::VariadicSplit>(concat, split_axis_op, num_split);
auto sigm = std::make_shared<ov::op::v0::Sigmoid>(split->output(1));
function = std::make_shared<ngraph::Function>(sigm, input, "negative_memory_layer_offset_nonmemory");

View File

@ -7,6 +7,7 @@
#include "common_test_utils/data_utils.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace ov {
namespace test {
@ -63,7 +64,7 @@ void PermConvPermConcat::SetUp() {
auto conv_in_shape = permute_in->get_output_shape(0);
auto conv_weights_size = output_channels * (conv_in_shape[1]) * kernel_shape[0] * kernel_shape[1];
auto conv =
ngraph::builder::makeConvolution(permute_in,
ov::test::utils::make_convolution(permute_in,
element_type,
{kernel_shape[0], kernel_shape[1]},
{1, 1},

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/quantized_convolution_backprop_data.hpp"
#include "common_test_utils/node_builders/convolution_backprop_data.hpp"
#include "ov_models/builders.hpp"
#include "ov_models/utils/ov_helpers.hpp"
@ -71,7 +72,7 @@ void QuantConvBackpropDataLayerTest::SetUp() {
auto weightsFq = ngraph::builder::makeFakeQuantize(weightsNode, element_type, quantLevels, weightsFqConstShapes);
auto convBackpropData = std::dynamic_pointer_cast<ov::op::v1::ConvolutionBackpropData>(
ngraph::builder::makeConvolutionBackpropData(dataFq, weightsFq, element_type, stride, padBegin, padEnd, dilation, padType));
ov::test::utils::make_convolution_backprop_data(dataFq, weightsFq, element_type, stride, padBegin, padEnd, dilation, padType));
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(convBackpropData)};
function = std::make_shared<ov::Model>(results, params, "QuantConvolutionBackpropData");

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/quantized_group_convolution.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/group_convolution.hpp"
#include "ov_models/utils/ov_helpers.hpp"
namespace ov {
@ -85,7 +86,7 @@ void QuantGroupConvLayerTest::SetUp() {
}
auto groupConv = std::dynamic_pointer_cast<ov::op::v1::GroupConvolution>(
ngraph::builder::makeGroupConvolution(dataFq, weights, element_type, stride, padBegin, padEnd, dilation, padType));
ov::test::utils::make_group_convolution(dataFq, weights, element_type, stride, padBegin, padEnd, dilation, padType));
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(groupConv)};
function = std::make_shared<ov::Model>(results, params, "QuantGroupConvolution");

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/quantized_group_convolution_backprop_data.hpp"
#include "common_test_utils/node_builders/group_convolution_backprop_data.hpp"
#include "ov_models/builders.hpp"
#include "ov_models/utils/ov_helpers.hpp"
@ -77,7 +78,7 @@ void QuantGroupConvBackpropDataLayerTest::SetUp() {
auto weightsFq = ngraph::builder::makeFakeQuantize(weightsNode, element_type, quantLevels, weightsFqConstShapes);
auto groupConvBackpropData = std::dynamic_pointer_cast<ov::opset1::GroupConvolutionBackpropData>(
ngraph::builder::makeGroupConvolutionBackpropData(dataFq, weightsFq, element_type, stride, padBegin, padEnd, dilation, padType));
ov::test::utils::make_group_convolution_backprop_data(dataFq, weightsFq, element_type, stride, padBegin, padEnd, dilation, padType));
ov::ResultVector results{std::make_shared<ov::op::v0::Result>(groupConvBackpropData)};
function = std::make_shared<ov::Model>(results, params, "QuantGroupConvolutionBackpropData");

View File

@ -5,6 +5,7 @@
#include "shared_test_classes/subgraph/range_add.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace ov {
namespace test {
@ -38,7 +39,7 @@ void RangeAddSubgraphTest::SetUp() {
auto range = std::make_shared<ov::op::v0::Range>(startConstant, stopConstant, stepConstant);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(element_type, range->get_shape())};
auto eltwise = ngraph::builder::makeEltwise(params.front(), range, ov::test::utils::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(params.front(), range, ov::test::utils::EltwiseTypes::ADD);
const ov::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwise)};
function = std::make_shared<ov::Model>(results, params, "RangeEltwise");
}
@ -73,7 +74,7 @@ void RangeNumpyAddSubgraphTest::SetUp() {
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(element_type, range->get_shape())};
auto eltwise = ngraph::builder::makeEltwise(params.front(), range, ov::test::utils::EltwiseTypes::ADD);
auto eltwise = ov::test::utils::make_eltwise(params.front(), range, ov::test::utils::EltwiseTypes::ADD);
const ov::ResultVector results{std::make_shared<ov::op::v0::Result>(eltwise)};
function = std::make_shared<ov::Model>(results, params, "RangeEltwise");
}

View File

@ -4,6 +4,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/subgraph/reduce_eltwise.hpp"
#include "common_test_utils/node_builders/eltwise.hpp"
namespace SubgraphTestsDefinitions {
std::string ReduceEltwiseTest::getTestCaseName(const testing::TestParamInfo<ReduceEltwiseParamsTuple> &obj) {
@ -58,7 +59,7 @@ void ReduceEltwiseTest::SetUp() {
ASSERT_GT(constShape.size(), 2);
constShape[2] = inputShape.back();
auto constant = ngraph::builder::makeConstant<float>(ngPrc, constShape, {}, true);
auto eltw = ngraph::builder::makeEltwise(reduce, constant, ngraph::helpers::EltwiseTypes::MULTIPLY);
auto eltw = ov::test::utils::make_eltwise(reduce, constant, ngraph::helpers::EltwiseTypes::MULTIPLY);
ngraph::ResultVector results{std::make_shared<ov::op::v0::Result>(eltw)};
function = std::make_shared<ngraph::Function>(results, params, "ReduceEltwise");
}

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/reshape_permute_conv_permute_reshape_act.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace ov {
namespace test {
@ -61,7 +62,7 @@ void ConvReshapeAct::SetUp() {
ov::Shape{permute_in_order});
auto permute_in = std::make_shared<ov::op::v1::Transpose>(reshape_in, permute_in_params);
auto conv = ngraph::builder::makeConvolution(permute_in, model_type, {kernel_shape[0], kernel_shape[1]}, {1, 1}, {0, 0}, {0, 0}, {1, 1},
auto conv = ov::test::utils::make_convolution(permute_in, model_type, {kernel_shape[0], kernel_shape[1]}, {1, 1}, {0, 0}, {0, 0}, {1, 1},
ov::op::PadType::VALID, output_channels);
auto permute_out_params = std::make_shared<ov::op::v0::Constant>(ov::element::i64,

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/scaleshift_conv_scaleshift.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -69,7 +70,7 @@ void ScaleShiftAfterConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},
@ -169,7 +170,7 @@ void ScaleShiftBeforeConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.1f, 0.1f);
auto conv = ngraph::builder::makeConvolution(reshape2,
auto conv = ov::test::utils::make_convolution(reshape2,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/split_conv.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -76,7 +77,7 @@ void SplitConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * convInputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(reshape1,
auto conv = ov::test::utils::make_convolution(reshape1,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -9,6 +9,7 @@
#include "ov_models/builders.hpp"
#include "shared_test_classes/base/layer_test_utils.hpp"
#include "shared_test_classes/base/ov_subgraph.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace ov {
namespace test {
@ -46,7 +47,7 @@ void SplitConvConcatBase::configure_test(const ov::test::BasicParams& param) {
filterWeights1 = ov::test::utils::generate_float_numbers(8 * inputShape[1] / 2 * 3, -0.2f, 0.2f);
filterWeights2 = ov::test::utils::generate_float_numbers(8 * inputShape[1] / 2 * 3, -0.2f, 0.2f);
}
auto conv1 = ngraph::builder::makeConvolution(split->output(0),
auto conv1 = ov::test::utils::make_convolution(split->output(0),
element_type,
{1, 3},
{1, 1},
@ -59,7 +60,7 @@ void SplitConvConcatBase::configure_test(const ov::test::BasicParams& param) {
filterWeights1);
auto relu1 = std::make_shared<ov::op::v0::Relu>(conv1);
auto conv2 = ngraph::builder::makeConvolution(split->output(1),
auto conv2 = ov::test::utils::make_convolution(split->output(1),
element_type,
{1, 3},
{1, 1},

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/split_trivial_permute_concat.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace SubgraphTestsDefinitions {
std::string SplitTrivialPermuteConcatTest::getTestCaseName(const testing::TestParamInfo<SplitTrivialPermuteConcatTuple>& obj) {
@ -45,7 +46,7 @@ namespace SubgraphTestsDefinitions {
auto permute_1 = std::make_shared<ov::op::v1::Transpose>(split->output(1), permute_in_params);
auto concat = std::make_shared<ov::op::v0::Concat>(ngraph::OutputVector{ permute_0, permute_1 }, concatAxis);
auto act = ngraph::builder::makeActivation(concat, ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto act = ov::test::utils::make_activation(concat, ngPrc, ngraph::helpers::ActivationTypes::Relu);
function = std::make_shared<ngraph::Function>(act, input, "split_trivial_permute_concat");
}
} // namespace SubgraphTestsDefinitions

View File

@ -4,6 +4,7 @@
#include "shared_test_classes/subgraph/stridedslice_conv.hpp"
#include "ov_models/builders.hpp"
#include "common_test_utils/node_builders/convolution.hpp"
namespace SubgraphTestsDefinitions {
@ -76,7 +77,7 @@ void SliceConvTest::SetUp() {
auto filterWeights = ov::test::utils::generate_float_numbers(outputChannels * inputShape[1] * kernelShape[0] * kernelShape[1],
-0.2f, 0.2f);
auto conv = ngraph::builder::makeConvolution(ss,
auto conv = ov::test::utils::make_convolution(ss,
ngPrc,
{kernelShape[0], kernelShape[1]},
{kernelShape[0] > 1 ? stride : 1, stride},

View File

@ -3,6 +3,7 @@
//
#include "shared_test_classes/subgraph/trivial_concat.hpp"
#include "common_test_utils/node_builders/activation.hpp"
namespace SubgraphTestsDefinitions {
@ -30,7 +31,7 @@ void TrivialConcatLayerTest::SetUp() {
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape{1, total_size})};
auto input_relu = ngraph::builder::makeActivation(params[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto input_relu = ov::test::utils::make_activation(params[0], ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto input_reshape_pattern = std::make_shared<ov::op::v0::Constant>(ngraph::element::i64,
ngraph::Shape{inputShape.size()}, std::vector<size_t>(inputShape));
@ -47,7 +48,7 @@ void TrivialConcatLayerTest::SetUp() {
ngraph::Shape{2}, std::vector<size_t>({1, 2 * total_size}));
auto final_reshape = std::make_shared<ov::op::v1::Reshape>(concat, final_reshape_pattern, false);
auto act = ngraph::builder::makeActivation(final_reshape, ngPrc, ngraph::helpers::ActivationTypes::Relu);
auto act = ov::test::utils::make_activation(final_reshape, ngPrc, ngraph::helpers::ActivationTypes::Relu);
ngraph::ResultVector results{std::make_shared<ov::op::v0::Result>(act)};
function = std::make_shared<ngraph::Function>(results, params, "trivial_concat");

View File

@ -43,9 +43,12 @@ void VariadicSplitPad::SetUp() {
std::tie(input_shape, axis, numSplits, connectIndexes, padBegin, padEnd, padMode, element_type, targetDevice) =
this->GetParam();
ov::ParameterVector input{std::make_shared<ov::op::v0::Parameter>(element_type, ov::Shape(input_shape))};
auto split = ngraph::builder::makeVariadicSplit(input[0], numSplits, axis);
ov::ResultVector results;
auto split_axis_op = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{}, axis);
auto num_split = std::make_shared<ov::op::v0::Constant>(ov::element::u64, ov::Shape{numSplits.size()}, numSplits);
auto split = std::make_shared<ov::op::v1::VariadicSplit>(input[0], split_axis_op, num_split);
ov::ResultVector results;
for (size_t i : connectIndexes) {
auto pads_begin = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{padBegin.size()}, padBegin.data());
auto pads_end = std::make_shared<ov::op::v0::Constant>(ov::element::i64, ov::Shape{padEnd.size()}, padEnd.data());

View File

@ -253,10 +253,6 @@ std::shared_ptr<ov::Node> makeSplit(const ov::Output<Node>& in,
size_t numSplits,
int64_t axis);
std::shared_ptr<ov::Node> makeVariadicSplit(const ov::Output<Node>& in,
const std::vector<size_t> numSplits,
int64_t axis);
std::shared_ptr<ov::Node> makeActivation(const ov::Output<Node>& in,
const element::Type& type,
ov::test::utils::ActivationTypes activationType,
@ -377,12 +373,6 @@ std::shared_ptr<ov::Node> makeEmbeddingBagOffsetsSum(const element::Type& dataTy
bool with_weights,
bool with_default_index);
std::shared_ptr<ov::Node> makeEmbeddingBagPackedSum(const element::Type& dataType,
const element::Type& indicesType,
const ov::Output<Node>& emb_table_node,
const std::vector<std::vector<size_t>>& indices,
bool with_weights);
std::shared_ptr<ov::Node> makeEmbeddingSegmentsSum(const element::Type& dataType,
const element::Type& indicesType,
const ov::Output<Node>& emb_table_node,
@ -461,14 +451,6 @@ std::shared_ptr<ov::Node> makeGRU(
ov::op::RecurrentSequenceDirection direction = ov::op::RecurrentSequenceDirection::FORWARD,
ov::test::utils::SequenceTestsMode mode = ov::test::utils::SequenceTestsMode::PURE_SEQ);
std::shared_ptr<ov::Node> makeAUGRU(
const OutputVector& in,
const std::vector<ov::Shape>& constants,
std::size_t hidden_size,
bool make_sequence = false,
ov::op::RecurrentSequenceDirection direction = ov::op::RecurrentSequenceDirection::FORWARD,
ov::test::utils::SequenceTestsMode mode = ov::test::utils::SequenceTestsMode::PURE_SEQ);
std::shared_ptr<ov::Node> makeRNN(
const OutputVector& in,
const std::vector<ov::Shape>& constants,

View File

@ -1,90 +0,0 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "ov_ops/augru_cell.hpp"
#include <memory>
#include <vector>
#include "ov_models/builders.hpp"
#include "ov_ops/augru_sequence.hpp"
namespace ngraph {
namespace builder {
/**
* There are 2 options to paramter "in" when "make_sequence" is true.
* 0 1 2 3
* X init_hidden_state attention seq_length
* or,
* 0 1 2
* X init_hidden_state attention
*
*/
std::shared_ptr<ov::Node> makeAUGRU(const OutputVector& in,
const std::vector<ov::Shape>& constants,
std::size_t hidden_size,
bool make_sequence,
ov::op::RecurrentSequenceDirection direction,
ov::test::utils::SequenceTestsMode mode) {
std::vector<float> empty;
auto W = ngraph::builder::makeConstant(in[0].get_element_type(), constants[0], empty, true);
W->set_friendly_name("augru_w");
auto R = ngraph::builder::makeConstant(in[0].get_element_type(), constants[1], empty, true);
R->set_friendly_name("augru_r");
auto B = ngraph::builder::makeConstant(in[0].get_element_type(), constants[2], empty, true);
B->set_friendly_name("augru_b");
if (!make_sequence) {
return std::make_shared<ov::op::internal::AUGRUCell>(in[0], in[1], W, R, B, in[2], hidden_size);
} else {
if (in.size() > 3 && in[3].get_partial_shape().is_dynamic()) {
return std::make_shared<ov::op::internal::AUGRUSequence>(in[0], in[1], in[3], W, R, B, in[2], hidden_size);
} else {
std::shared_ptr<Node> seq_lengths;
switch (mode) {
case ov::test::utils::SequenceTestsMode::PURE_SEQ:
case ov::test::utils::SequenceTestsMode::CONVERT_TO_TI_MAX_SEQ_LEN_CONST: {
std::vector<float> lengths(in[0].get_partial_shape()[0].get_min_length(),
in[0].get_partial_shape()[1].get_min_length());
seq_lengths = ngraph::builder::makeConstant(element::i64, constants[3], lengths, false);
break;
}
case ov::test::utils::SequenceTestsMode::CONVERT_TO_TI_RAND_SEQ_LEN_CONST:
case ov::test::utils::SequenceTestsMode::PURE_SEQ_RAND_SEQ_LEN_CONST: {
for (size_t i = 0; i <= in[0].get_shape().at(0); ++i) {
std::vector<float> lengths;
seq_lengths = ngraph::builder::makeConstant(element::i64,
constants[3],
lengths,
true,
static_cast<float>(in[0].get_shape()[1]),
0.f);
}
break;
}
case ov::test::utils::SequenceTestsMode::CONVERT_TO_TI_RAND_SEQ_LEN_PARAM:
case ov::test::utils::SequenceTestsMode::CONVERT_TO_TI_MAX_SEQ_LEN_PARAM:
case ov::test::utils::SequenceTestsMode::PURE_SEQ_RAND_SEQ_LEN_PARAM: {
// Seq_lengths should be as a Parameter node for these two modes
if (in.size() < 4)
throw std::runtime_error("Incorrect number of inputs for creation of Sequence operation");
seq_lengths = in.at(3).get_node_shared_ptr();
break;
}
default:
throw std::runtime_error("Incorrect mode for creation of Sequence operation");
}
return std::make_shared<ov::op::internal::AUGRUSequence>(in[0],
in[1],
seq_lengths,
W,
R,
B,
in[2],
hidden_size);
}
}
}
} // namespace builder
} // namespace ngraph

View File

@ -48,4 +48,4 @@ std::shared_ptr<ov::Node> makeEltwise(const ov::Output<Node>& in0,
}
} // namespace builder
} // namespace ngraph
} // namespace ngraph

View File

@ -1,38 +0,0 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <memory>
#include <vector>
#include "openvino/op/embeddingbag_packedsum.hpp"
#include "ov_models/builders.hpp"
namespace ngraph {
namespace builder {
std::shared_ptr<Node> makeEmbeddingBagPackedSum(const element::Type& dataType,
const element::Type& indicesType,
const ov::Output<Node>& embTableNode,
const std::vector<std::vector<size_t>>& indices,
bool with_weights) {
std::vector<size_t> i_shape({indices.size(), indices[0].size()});
size_t i_size = ov::shape_size(i_shape);
std::vector<size_t> i_values(i_size);
for (int i = 0; i < indices.size(); i++)
memcpy(i_values.data() + indices[0].size() * i, indices[i].data(), indices[0].size() * sizeof(size_t));
auto indicesNode = std::make_shared<ov::op::v0::Constant>(indicesType, i_shape, i_values);
std::shared_ptr<Node> embBag;
if (with_weights) {
auto weightsNode = makeConstant<float>(dataType, i_shape, {}, true);
embBag = std::make_shared<ov::op::v3::EmbeddingBagPackedSum>(embTableNode, indicesNode, weightsNode);
} else {
embBag = std::make_shared<ov::op::v3::EmbeddingBagPackedSum>(embTableNode, indicesNode);
}
return embBag;
}
} // namespace builder
} // namespace ngraph

View File

@ -1,23 +0,0 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/op/variadic_split.hpp"
#include <memory>
#include <vector>
#include "ov_models/builders.hpp"
namespace ngraph {
namespace builder {
std::shared_ptr<ov::Node> makeVariadicSplit(const ov::Output<Node>& in,
const std::vector<size_t> numSplits,
int64_t axis) {
auto splitAxisOp = std::make_shared<ov::op::v0::Constant>(element::i64, ov::Shape{}, std::vector<int64_t>{axis});
auto numSplit = std::make_shared<ov::op::v0::Constant>(element::u64, ov::Shape{numSplits.size()}, numSplits);
auto VariadicSplitNode = std::make_shared<ov::op::v1::VariadicSplit>(in, splitAxisOp, numSplit);
return VariadicSplitNode;
}
} // namespace builder
} // namespace ngraph

View File

@ -11,9 +11,9 @@
namespace ov {
namespace test {
namespace utils {
std::shared_ptr<ov::Node> makeEltwise(const ov::Output<Node>& in0,
const ov::Output<Node>& in1,
ov::test::utils::EltwiseTypes eltwise_type);
std::shared_ptr<ov::Node> make_eltwise(const ov::Output<Node>& in0,
const ov::Output<Node>& in1,
ov::test::utils::EltwiseTypes eltwise_type);
} // namespace utils
} // namespace test
} // namespace ov

View File

@ -21,9 +21,9 @@
namespace ov {
namespace test {
namespace utils {
std::shared_ptr<ov::Node> makeEltwise(const ov::Output<Node>& in0,
const ov::Output<Node>& in1,
ov::test::utils::EltwiseTypes eltwiseType) {
std::shared_ptr<ov::Node> make_eltwise(const ov::Output<Node>& in0,
const ov::Output<Node>& in1,
ov::test::utils::EltwiseTypes eltwiseType) {
switch (eltwiseType) {
case ov::test::utils::EltwiseTypes::ADD:
return std::make_shared<ov::op::v1::Add>(in0, in1);