parent
973b194776
commit
30260e3c7c
@ -5,144 +5,129 @@
|
||||
#include <vector>
|
||||
|
||||
#include "common_test_utils/test_enums.hpp"
|
||||
#include "single_layer_tests/activation.hpp"
|
||||
#include "single_op_tests/activation.hpp"
|
||||
#include "common_test_utils/test_constants.hpp"
|
||||
|
||||
using namespace LayerTestsDefinitions;
|
||||
using namespace ov::test::utils;
|
||||
namespace {
|
||||
// Common params
|
||||
const std::vector<InferenceEngine::Precision> inputPrecisions = {
|
||||
InferenceEngine::Precision::FP32
|
||||
// TODO: Fix Issue-27390
|
||||
// InferenceEngine::Precision::I16,
|
||||
// InferenceEngine::Precision::U8
|
||||
using ov::test::ActivationLayerTest;
|
||||
using ov::test::ActivationParamLayerTest;
|
||||
using ov::test::utils::ActivationTypes;
|
||||
|
||||
const std::vector<ov::element::Type> model_types = {
|
||||
ov::element::f32,
|
||||
ov::element::f16
|
||||
};
|
||||
|
||||
const std::vector<InferenceEngine::Precision> netPrecisions = {
|
||||
InferenceEngine::Precision::FP32,
|
||||
InferenceEngine::Precision::FP16
|
||||
};
|
||||
|
||||
const std::vector<InferenceEngine::Precision> intPrecisions = {
|
||||
InferenceEngine::Precision::I32,
|
||||
};
|
||||
|
||||
const std::map<ov::test::utils::ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
|
||||
{Sigmoid, {}},
|
||||
{Tan, {}},
|
||||
{Tanh, {}},
|
||||
{Relu, {}},
|
||||
{Exp, {}},
|
||||
{Log, {}},
|
||||
{Sign, {}},
|
||||
{Abs, {}},
|
||||
{Clamp, {{-2.0f, 2.0f}}},
|
||||
{Negative, {}},
|
||||
{Acos, {}},
|
||||
{Acosh, {}},
|
||||
{Asin, {}},
|
||||
{Asinh, {}},
|
||||
{Atan, {}},
|
||||
{Atanh, {}},
|
||||
{Cos, {}},
|
||||
{Cosh, {}},
|
||||
{Floor, {}},
|
||||
{Sin, {}},
|
||||
{Sinh, {}},
|
||||
{Sqrt, {}},
|
||||
{Elu, {{0.1f}}},
|
||||
{Erf, {}},
|
||||
{HardSigmoid, {{0.2f, 0.5f}}},
|
||||
{Selu, {{1.6732f, 1.0507f}}},
|
||||
{Ceiling, {}},
|
||||
{Mish, {}},
|
||||
{HSwish, {}},
|
||||
{SoftPlus, {}},
|
||||
{HSigmoid, {}},
|
||||
{RoundHalfToEven, {}},
|
||||
{RoundHalfAwayFromZero, {}},
|
||||
{GeluErf, {}},
|
||||
{GeluTanh, {}},
|
||||
{Swish, {{0.4f}}}
|
||||
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
|
||||
{ActivationTypes::Sigmoid, {}},
|
||||
{ActivationTypes::Tan, {}},
|
||||
{ActivationTypes::Tanh, {}},
|
||||
{ActivationTypes::Relu, {}},
|
||||
{ActivationTypes::Exp, {}},
|
||||
{ActivationTypes::Log, {}},
|
||||
{ActivationTypes::Sign, {}},
|
||||
{ActivationTypes::Abs, {}},
|
||||
{ActivationTypes::Clamp, {{-2.0f, 2.0f}}},
|
||||
{ActivationTypes::Negative, {}},
|
||||
{ActivationTypes::Acos, {}},
|
||||
{ActivationTypes::Acosh, {}},
|
||||
{ActivationTypes::Asin, {}},
|
||||
{ActivationTypes::Asinh, {}},
|
||||
{ActivationTypes::Atan, {}},
|
||||
{ActivationTypes::Atanh, {}},
|
||||
{ActivationTypes::Cos, {}},
|
||||
{ActivationTypes::Cosh, {}},
|
||||
{ActivationTypes::Floor, {}},
|
||||
{ActivationTypes::Sin, {}},
|
||||
{ActivationTypes::Sinh, {}},
|
||||
{ActivationTypes::Sqrt, {}},
|
||||
{ActivationTypes::Elu, {{0.1f}}},
|
||||
{ActivationTypes::Erf, {}},
|
||||
{ActivationTypes::HardSigmoid, {{0.2f, 0.5f}}},
|
||||
{ActivationTypes::Selu, {{1.6732f, 1.0507f}}},
|
||||
{ActivationTypes::Ceiling, {}},
|
||||
{ActivationTypes::Mish, {}},
|
||||
{ActivationTypes::HSwish, {}},
|
||||
{ActivationTypes::SoftPlus, {}},
|
||||
{ActivationTypes::HSigmoid, {}},
|
||||
{ActivationTypes::RoundHalfToEven, {}},
|
||||
{ActivationTypes::RoundHalfAwayFromZero, {}},
|
||||
{ActivationTypes::GeluErf, {}},
|
||||
{ActivationTypes::GeluTanh, {}},
|
||||
{ActivationTypes::Swish, {{0.4f}}}
|
||||
};
|
||||
|
||||
// List of operations that should be tested also with integer precision
|
||||
const std::map<ActivationTypes, std::vector<std::vector<float>>> intActivationTypes = {
|
||||
{Acosh, {}},
|
||||
{Asinh, {}},
|
||||
{Atan, {}},
|
||||
{Negative, {}},
|
||||
{Ceiling, {}},
|
||||
{Cos, {}},
|
||||
{Cosh, {}},
|
||||
{Sign, {}},
|
||||
{Sinh, {}},
|
||||
{Sqrt, {}},
|
||||
{Tan, {}},
|
||||
{Tanh, {}},
|
||||
{ActivationTypes::Acosh, {}},
|
||||
{ActivationTypes::Asinh, {}},
|
||||
{ActivationTypes::Atan, {}},
|
||||
{ActivationTypes::Negative, {}},
|
||||
{ActivationTypes::Ceiling, {}},
|
||||
{ActivationTypes::Cos, {}},
|
||||
{ActivationTypes::Cosh, {}},
|
||||
{ActivationTypes::Sign, {}},
|
||||
{ActivationTypes::Sinh, {}},
|
||||
{ActivationTypes::Sqrt, {}},
|
||||
{ActivationTypes::Tan, {}},
|
||||
{ActivationTypes::Tanh, {}},
|
||||
};
|
||||
|
||||
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
|
||||
{PReLu, {{}}}, // Slope will be filled with increasing values from -10 to match slope input shape
|
||||
{LeakyRelu, {{0.01f}}}
|
||||
{ActivationTypes::PReLu, {{}}}, // Slope will be filled with increasing values from -10 to match slope input shape
|
||||
{ActivationTypes::LeakyRelu, {{0.01f}}}
|
||||
};
|
||||
|
||||
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
|
||||
{{1, 50}, {{}}},
|
||||
{{5, 128}, {{}}},
|
||||
{{2, 2, 2, 2, 2, 2, 2, 2}, {{}}},
|
||||
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> basic_input_shapes_static = {
|
||||
{{{1, 50}}, {}},
|
||||
{{{5, 128}}, {}},
|
||||
{{{2, 2, 2, 2, 2, 2, 2, 2}}, {}},
|
||||
};
|
||||
|
||||
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
|
||||
{{1, 50}, {{1}, {50}}},
|
||||
{{1, 128}, {{1}, {128}}},
|
||||
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> prelu_basic_input_shapes_static = {
|
||||
{{{1, 50}}, {{1}, {50}}},
|
||||
{{{1, 128}}, {{1}, {128}}},
|
||||
|
||||
// Broadcast check
|
||||
{{3, 2}, {{1}, {2}, {3, 2}}},
|
||||
{{3, 2, 5}, {{1}, {2}, {5}, {2, 5}, {3, 1, 5}, {1, 2, 1}, {1, 1, 5}, {3, 1, 1}, {3, 2, 5}}},
|
||||
{{2, 1, 2}, {{2}, {2, 1, 1}}},
|
||||
{{3, 2, 5, 7}, {{1}, {7}, {2}, {5, 7}, {2, 5, 7}, {2, 1, 1}, {1, 2, 1, 1}, {3, 2, 1, 1}, {3, 2, 5, 7}}},
|
||||
{{2, 2, 2, 2, 2, 2, 2, 2}, {{2}, {2, 2}, {2, 1, 1, 2}}},
|
||||
{{{3, 2}}, {{1}, {2}, {3, 2}}},
|
||||
{{{3, 2, 5}}, {{1}, {2}, {5}, {2, 5}, {3, 1, 5}, {1, 2, 1}, {1, 1, 5}, {3, 1, 1}, {3, 2, 5}}},
|
||||
{{{2, 1, 2}}, {{2}, {2, 1, 1}}},
|
||||
{{{3, 2, 5, 7}}, {{1}, {7}, {2}, {5, 7}, {2, 5, 7}, {2, 1, 1}, {1, 2, 1, 1}, {3, 2, 1, 1}, {3, 2, 5, 7}}},
|
||||
{{{2, 2, 2, 2, 2, 2, 2, 2}}, {{2}, {2, 2}, {2, 1, 1, 2}}},
|
||||
};
|
||||
|
||||
const auto basicCases = ::testing::Combine(
|
||||
auto static_shapes_param_transform = [](const std::vector<std::pair<std::vector<ov::Shape>, ov::Shape>>& original_shapes) {
|
||||
std::vector<std::pair<std::vector<ov::test::InputShape>, ov::Shape>> new_shapes;
|
||||
for (const auto& shape_element : original_shapes) {
|
||||
new_shapes.emplace_back(ov::test::static_shapes_to_test_representation(shape_element.first), shape_element.second);
|
||||
}
|
||||
return new_shapes;
|
||||
};
|
||||
|
||||
const auto basic_case_params = ::testing::Combine(
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(activationTypes)),
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(basic)),
|
||||
::testing::ValuesIn(model_types),
|
||||
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(basic_input_shapes_static))),
|
||||
::testing::Values(ov::test::utils::DEVICE_CPU)
|
||||
);
|
||||
|
||||
const auto basicPreluCases = ::testing::Combine(
|
||||
const auto basic_prelu_cases_params = ::testing::Combine(
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(activationParamTypes)),
|
||||
::testing::ValuesIn(netPrecisions),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(preluBasic)),
|
||||
::testing::ValuesIn(model_types),
|
||||
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(prelu_basic_input_shapes_static))),
|
||||
::testing::Values(ov::test::utils::DEVICE_CPU)
|
||||
);
|
||||
|
||||
const auto basicIntegerOperations = ::testing::Combine(
|
||||
const auto basic_integer_operations_params = ::testing::Combine(
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(intActivationTypes)),
|
||||
::testing::ValuesIn(intPrecisions),
|
||||
::testing::ValuesIn(intPrecisions),
|
||||
::testing::ValuesIn(intPrecisions),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::Values(InferenceEngine::Layout::ANY),
|
||||
::testing::ValuesIn(ov::test::utils::combineParams(basic)),
|
||||
::testing::Values(ov::element::i32),
|
||||
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(basic_input_shapes_static))),
|
||||
::testing::Values(ov::test::utils::DEVICE_CPU)
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationLayerTest, basicCases, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationDynamicLayerTest, basicCases, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Integer_Activation_Basic, ActivationLayerTest, basicIntegerOperations, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationLayerTest, basic_case_params, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Integer_Activation_Basic, ActivationLayerTest, basic_integer_operations_params, ActivationLayerTest::getTestCaseName);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Const, ActivationLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Param, ActivationParamLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Const, ActivationLayerTest, basic_prelu_cases_params, ActivationLayerTest::getTestCaseName);
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Param, ActivationParamLayerTest, basic_prelu_cases_params, ActivationLayerTest::getTestCaseName);
|
||||
} // namespace
|
||||
|
@ -187,6 +187,8 @@ std::vector<std::string> disabledTestPatterns() {
|
||||
R"(smoke_GroupConvBackpropData.*paddingDefined/GroupConvBackpropLayerTest.Inference.*f32.*)",
|
||||
// Issue: 122177
|
||||
R"(smoke_LSTMSequenceCommon.*LSTMSequenceTest.Inference.*CONVERT_TO_TI.*)",
|
||||
// Issue: 122081
|
||||
R"(smoke_Activation_Basic_Prelu_Const/ActivationLayerTest.Inference/.*_TS=\(3.2.5.7\).*)",
|
||||
// Issue: 122094
|
||||
R"(smoke_Interpolate_Basic_Down_Sample_Tail/InterpolateLayerTest.Inference.*(asymmetric|align_corners).*f16.*)",
|
||||
// Need to generate sequence exactly in the i64 data type. Enable in scope of i64 enabling.
|
||||
|
@ -0,0 +1,29 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "shared_test_classes/single_op/activation.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
|
||||
TEST_P(ActivationLayerTest, Inference) {
|
||||
run();
|
||||
}
|
||||
|
||||
TEST_P(ActivationParamLayerTest, Inference) {
|
||||
run();
|
||||
}
|
||||
|
||||
TEST_P(ActivationLayerTest, QueryModel) {
|
||||
query_model();
|
||||
}
|
||||
|
||||
TEST_P(ActivationParamLayerTest, QueryModel) {
|
||||
query_model();
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace ov
|
@ -4,6 +4,9 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "ngraph/node.hpp"
|
||||
#include "ngraph/op/proposal.hpp"
|
||||
#include "ngraph/op/power.hpp"
|
||||
@ -26,9 +29,44 @@
|
||||
#include "openvino/op/logical_or.hpp"
|
||||
#include "openvino/op/logical_xor.hpp"
|
||||
#include "openvino/op/logical_not.hpp"
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include "openvino/op/abs.hpp"
|
||||
#include "openvino/op/acos.hpp"
|
||||
#include "openvino/op/acosh.hpp"
|
||||
#include "openvino/op/asin.hpp"
|
||||
#include "openvino/op/asinh.hpp"
|
||||
#include "openvino/op/atan.hpp"
|
||||
#include "openvino/op/atanh.hpp"
|
||||
#include "openvino/op/ceiling.hpp"
|
||||
#include "openvino/op/clamp.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/cos.hpp"
|
||||
#include "openvino/op/cosh.hpp"
|
||||
#include "openvino/op/elu.hpp"
|
||||
#include "openvino/op/erf.hpp"
|
||||
#include "openvino/op/exp.hpp"
|
||||
#include "openvino/op/floor.hpp"
|
||||
#include "openvino/op/gelu.hpp"
|
||||
#include "openvino/op/hard_sigmoid.hpp"
|
||||
#include "openvino/op/hsigmoid.hpp"
|
||||
#include "openvino/op/hswish.hpp"
|
||||
#include "openvino/op/log.hpp"
|
||||
#include "openvino/op/mish.hpp"
|
||||
#include "openvino/op/negative.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/prelu.hpp"
|
||||
#include "openvino/op/relu.hpp"
|
||||
#include "openvino/op/round.hpp"
|
||||
#include "openvino/op/selu.hpp"
|
||||
#include "openvino/op/sigmoid.hpp"
|
||||
#include "openvino/op/sign.hpp"
|
||||
#include "openvino/op/sin.hpp"
|
||||
#include "openvino/op/sinh.hpp"
|
||||
#include "openvino/op/softplus.hpp"
|
||||
#include "openvino/op/softsign.hpp"
|
||||
#include "openvino/op/sqrt.hpp"
|
||||
#include "openvino/op/swish.hpp"
|
||||
#include "openvino/op/tan.hpp"
|
||||
#include "openvino/op/tanh.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
@ -94,6 +132,51 @@ static std::map<ov::NodeTypeInfo, std::vector<std::vector<InputGenerateData>>> i
|
||||
{ ov::op::v1::LogicalOr::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
|
||||
{ ov::op::v1::LogicalNot::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
|
||||
{ ov::op::v1::LogicalXor::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
|
||||
{ ov::op::v7::IDFT::get_type_info_static(), {{{0, 1}}, {{0, 1, 1000000}}} },
|
||||
{ ov::op::v0::Sigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Tanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Relu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::PRelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Exp::get_type_info_static(), {{{0, 15}}, {{-10, 20, 32768}}} },
|
||||
{ ov::op::v0::Log::get_type_info_static(), {{{0, 15}}, {{1, 20, 32768}}} },
|
||||
{ ov::op::v0::Sign::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Abs::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Clamp::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Negative::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Acos::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v3::Acosh::get_type_info_static(), {{{1, 15}}, {{1, 200, 32768}}} },
|
||||
{ ov::op::v0::Asin::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v3::Asinh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Atan::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v3::Atanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Cos::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Cosh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Floor::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Sin::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Sinh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Sqrt::get_type_info_static(), {{{0, 15}}, {{1, 20, 32768}}} },
|
||||
{ ov::op::v0::Tan::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Elu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Erf::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::HardSigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Selu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Sigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Tanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Relu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Exp::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Log::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Sign::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Abs::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Gelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v0::Ceiling::get_type_info_static(), {{{0, 15}}, {{-1000, 2000, 32768}}} },
|
||||
{ ov::op::v4::Mish::get_type_info_static(), {{{0, 15}}, {{-10, 60, 32768}}} },
|
||||
{ ov::op::v4::HSwish::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v4::SoftPlus::get_type_info_static(), {{{0, 15}}, {{-100, 200, 32768}}} },
|
||||
{ ov::op::v4::Swish::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v5::HSigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v5::Round::get_type_info_static(), {{{0, 15}}, {{-10, 20, 4}}} },
|
||||
{ ov::op::v7::Gelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
|
||||
{ ov::op::v9::SoftSign::get_type_info_static(), {{{0, 15}}, {{-100, 200, 32768}}} },
|
||||
};
|
||||
|
||||
} // namespace utils
|
||||
|
@ -0,0 +1,93 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <tuple>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <set>
|
||||
|
||||
|
||||
#include "shared_test_classes/base/ov_subgraph.hpp"
|
||||
|
||||
#include "common_test_utils/test_enums.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
using ov::test::utils::ActivationTypes;
|
||||
|
||||
static std::map<ActivationTypes, std::string> activationNames = {
|
||||
{ActivationTypes::Sigmoid, "Sigmoid"},
|
||||
{ActivationTypes::Tanh, "Tanh"},
|
||||
{ActivationTypes::Relu, "Relu"},
|
||||
{ActivationTypes::LeakyRelu, "LeakyRelu"},
|
||||
{ActivationTypes::Exp, "Exp"},
|
||||
{ActivationTypes::Log, "Log"},
|
||||
{ActivationTypes::Sign, "Sign"},
|
||||
{ActivationTypes::Abs, "Abs"},
|
||||
{ActivationTypes::Clamp, "Clamp"},
|
||||
{ActivationTypes::Negative, "Negative"},
|
||||
{ActivationTypes::Acos, "Acos"},
|
||||
{ActivationTypes::Acosh, "Acosh"},
|
||||
{ActivationTypes::Asin, "Asin"},
|
||||
{ActivationTypes::Asinh, "Asinh"},
|
||||
{ActivationTypes::Atan, "Atan"},
|
||||
{ActivationTypes::Atanh, "Atanh"},
|
||||
{ActivationTypes::Cos, "Cos"},
|
||||
{ActivationTypes::Cosh, "Cosh"},
|
||||
{ActivationTypes::Floor, "Floor"},
|
||||
{ActivationTypes::Sin, "Sin"},
|
||||
{ActivationTypes::Sinh, "Sinh"},
|
||||
{ActivationTypes::Sqrt, "Sqrt"},
|
||||
{ActivationTypes::Tan, "Tan"},
|
||||
{ActivationTypes::Elu, "Elu"},
|
||||
{ActivationTypes::Erf, "Erf"},
|
||||
{ActivationTypes::HardSigmoid, "HardSigmoid"},
|
||||
{ActivationTypes::Selu, "Selu"},
|
||||
{ActivationTypes::Sigmoid, "Sigmoid"},
|
||||
{ActivationTypes::Tanh, "Tanh"},
|
||||
{ActivationTypes::Relu, "Relu"},
|
||||
{ActivationTypes::Exp, "Exp"},
|
||||
{ActivationTypes::Log, "Log"},
|
||||
{ActivationTypes::Sign, "Sign"},
|
||||
{ActivationTypes::Abs, "Abs"},
|
||||
{ActivationTypes::Gelu, "Gelu"},
|
||||
{ActivationTypes::Ceiling, "Ceiling"},
|
||||
{ActivationTypes::PReLu, "PReLu"},
|
||||
{ActivationTypes::Mish, "Mish"},
|
||||
{ActivationTypes::HSwish, "HSwish"},
|
||||
{ActivationTypes::SoftPlus, "SoftPlus"},
|
||||
{ActivationTypes::Swish, "Swish"},
|
||||
{ActivationTypes::HSigmoid, "HSigmoid"},
|
||||
{ActivationTypes::RoundHalfToEven, "RoundHalfToEven"},
|
||||
{ActivationTypes::RoundHalfAwayFromZero, "RoundHalfAwayFromZero"},
|
||||
{ActivationTypes::GeluErf, "GeluErf"},
|
||||
{ActivationTypes::GeluTanh, "GeluTanh"},
|
||||
{ActivationTypes::SoftSign, "SoftSign"},
|
||||
};
|
||||
|
||||
typedef std::tuple<
|
||||
std::pair<ActivationTypes, std::vector<float>>, // Activation type and constant value
|
||||
ov::element::Type, // Model type
|
||||
std::pair<std::vector<InputShape>, // Input shapes
|
||||
ov::Shape>, // 2nd input const shape
|
||||
std::string> activationParams;
|
||||
|
||||
class ActivationLayerTest : public testing::WithParamInterface<activationParams>,
|
||||
virtual public ov::test::SubgraphBaseTest {
|
||||
public:
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<activationParams> &obj);
|
||||
|
||||
protected:
|
||||
void SetUp() override;
|
||||
};
|
||||
|
||||
class ActivationParamLayerTest : public ActivationLayerTest {
|
||||
protected:
|
||||
void SetUp() override;
|
||||
};
|
||||
} // namespace test
|
||||
} // namespace ov
|
@ -83,60 +83,63 @@ ov::runtime::Tensor generate(const ov::element::Type& elemType,
|
||||
}
|
||||
} // namespace Activation
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Abs>& node,
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::HardSigmoid>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
switch (port) {
|
||||
case 1: {
|
||||
return ov::test::utils::create_and_fill_tensor(elemType, targetShape, 0, 0.2f);
|
||||
}
|
||||
case 2: {
|
||||
return ov::test::utils::create_and_fill_tensor(elemType, targetShape, 0, 0.5f);
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Acos>& node,
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::PRelu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-1, 2, 32768, 1));
|
||||
switch (port) {
|
||||
case 1: {
|
||||
auto name = node->input(1).get_node()->get_friendly_name();
|
||||
if (0 == name.compare("leakySlope")) {
|
||||
return ov::test::utils::create_and_fill_tensor(elemType, targetShape, 0, 0.01f, 100);
|
||||
} else if (0 == name.compare("negativeSlope")) {
|
||||
return ov::test::utils::create_and_fill_tensor(elemType, targetShape, 0, -0.01f, 100);
|
||||
} else {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Asin>& node,
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Selu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-1, 2, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Atan>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-1, 2, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Ceiling>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-1000, 2000, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Clamp>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Cos>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Cosh>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
switch (port) {
|
||||
case 1: {
|
||||
std::vector<float> alpha(node->get_input_shape(1).size(), 1.6732f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, alpha, alpha.size());
|
||||
}
|
||||
case 2: {
|
||||
std::vector<float> lambda(node->get_input_shape(2).size(), 1.0507f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, lambda, lambda.size());
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::DetectionOutput>& node,
|
||||
@ -166,56 +169,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::DetectionOutp
|
||||
return ov::test::utils::create_and_fill_tensor(elemType, targetShape, inGenData.range, inGenData.start_from, inGenData.resolution, inGenData.seed);
|
||||
}
|
||||
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Elu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Exp>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-10, 20, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Floor>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Gelu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::HardSigmoid>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
switch (port) {
|
||||
case 1: {
|
||||
std::vector<float> alpha(node->get_input_shape(1).size(), 0.2f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, alpha, alpha.size());
|
||||
}
|
||||
case 2: {
|
||||
std::vector<float> beta(node->get_input_shape(2).size(), 0.5f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, beta, beta.size());
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::FakeQuantize>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
@ -279,35 +232,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::FakeQuantize>
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Log>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(1, 20, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Negative>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::PRelu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
switch (port) {
|
||||
case 1: {
|
||||
std::vector<float> negativeSlope(node->get_input_shape(1).size(), -0.01f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, negativeSlope, negativeSlope.size());
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ov::op::v0::PSROIPooling>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
@ -371,73 +295,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ov::op::v0::ROIPooling>& node
|
||||
return generate(std::dynamic_pointer_cast<ov::Node>(node), port, elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Selu>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
switch (port) {
|
||||
case 1: {
|
||||
std::vector<float> alpha(node->get_input_shape(1).size(), 1.6732f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, alpha, alpha.size());
|
||||
}
|
||||
case 2: {
|
||||
std::vector<float> lambda(node->get_input_shape(2).size(), 1.0507f);
|
||||
return ov::test::utils::create_tensor<float>(elemType, targetShape, lambda, lambda.size());
|
||||
}
|
||||
default: {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Sigmoid>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Sign>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Sin>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Sinh>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Sqrt>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(1, 20, 32768, 1));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Tan>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v0::Tanh>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v1::GatherTree>& node,
|
||||
size_t port,
|
||||
@ -559,20 +416,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ov::op::v3::ROIAlign>& node,
|
||||
}
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::HSwish>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::Mish>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::Proposal>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
@ -593,20 +436,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::Proposal>& no
|
||||
return generate(std::dynamic_pointer_cast<ov::Node>(node), port, elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::SoftPlus>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v4::Swish>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v5::BatchNormInference>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
@ -625,13 +454,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v5::GRUSequence>&
|
||||
return generate(std::dynamic_pointer_cast<ov::Node>(node), port, elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v5::HSigmoid>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ov::op::v5::LSTMSequence>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
@ -833,13 +655,6 @@ ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v5::RNNSequence>&
|
||||
return generate(std::dynamic_pointer_cast<ov::Node>(node), port, elemType, targetShape);
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v5::Round>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
const ov::Shape& targetShape) {
|
||||
return Activation::generate(elemType, targetShape, InputGenerateData(-10, 20, 4));
|
||||
}
|
||||
|
||||
ov::runtime::Tensor generate(const std::shared_ptr<ngraph::op::v8::Softmax>& node,
|
||||
size_t port,
|
||||
const ov::element::Type& elemType,
|
||||
|
@ -0,0 +1,139 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "shared_test_classes/single_op/activation.hpp"
|
||||
|
||||
#include "ov_models/builders.hpp"
|
||||
#include "openvino/op/parameter.hpp"
|
||||
#include "openvino/op/constant.hpp"
|
||||
#include "openvino/op/result.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace test {
|
||||
using ov::test::utils::ActivationTypes;
|
||||
|
||||
std::string ActivationLayerTest::getTestCaseName(const testing::TestParamInfo<activationParams> &obj) {
|
||||
ov::element::Type model_type;
|
||||
std::pair<std::vector<InputShape>, ov::Shape> input_shapes;
|
||||
std::string target_device;
|
||||
std::pair<ActivationTypes, std::vector<float>> activationDecl;
|
||||
std::tie(activationDecl, model_type, input_shapes, target_device) = obj.param;
|
||||
|
||||
auto shapes = input_shapes.first;
|
||||
auto const_shape = input_shapes.second;
|
||||
|
||||
std::ostringstream result;
|
||||
const char separator = '_';
|
||||
result << "IS=(";
|
||||
for (size_t i = 0lu; i < shapes.size(); i++) {
|
||||
result << ov::test::utils::partialShape2str({shapes[i].first}) << (i < shapes.size() - 1lu ? "_" : "");
|
||||
}
|
||||
result << ")_TS=";
|
||||
for (size_t i = 0lu; i < shapes.front().second.size(); i++) {
|
||||
result << "{";
|
||||
for (size_t j = 0lu; j < shapes.size(); j++) {
|
||||
result << ov::test::utils::vec2str(shapes[j].second[i]) << (j < shapes.size() - 1lu ? "_" : "");
|
||||
}
|
||||
result << "}_";
|
||||
}
|
||||
result << "TS=" << ov::test::utils::vec2str(const_shape) << separator;
|
||||
result << activationNames[activationDecl.first] << separator;
|
||||
result << "constants_value=" << ov::test::utils::vec2str(activationDecl.second) << separator;
|
||||
result << "netPRC=" << model_type.get_type_name() << separator;
|
||||
result << "trgDev=" << target_device;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
void ActivationLayerTest::SetUp() {
|
||||
ov::element::Type model_type;
|
||||
std::pair<std::vector<InputShape>, ov::Shape> input_shapes;
|
||||
std::pair<ActivationTypes, std::vector<float>> activationDecl;
|
||||
std::tie(activationDecl, model_type, input_shapes, targetDevice) = GetParam();
|
||||
init_input_shapes(input_shapes.first);
|
||||
auto const_shape = input_shapes.second;
|
||||
|
||||
auto activationType = activationDecl.first;
|
||||
auto constants_value = activationDecl.second;
|
||||
|
||||
auto param = std::make_shared<ov::op::v0::Parameter>(model_type, inputDynamicShapes.front());
|
||||
param->set_friendly_name("Input");
|
||||
|
||||
if (activationType == ActivationTypes::PReLu && constants_value.empty()) {
|
||||
auto elemnts_count = ov::shape_size(const_shape);
|
||||
constants_value.resize(elemnts_count);
|
||||
std::iota(constants_value.begin(), constants_value.end(), -10);
|
||||
}
|
||||
|
||||
auto activation = ngraph::builder::makeActivation(param, model_type, activationType, const_shape, constants_value);
|
||||
|
||||
auto result = std::make_shared<ov::op::v0::Result>(activation);
|
||||
|
||||
function = std::make_shared<ov::Model>(result, ov::ParameterVector{param}, "Activation");
|
||||
}
|
||||
|
||||
void ActivationParamLayerTest::SetUp() {
|
||||
ov::element::Type model_type;
|
||||
std::pair<std::vector<InputShape>, ov::Shape> input_shapes;
|
||||
std::pair<ActivationTypes, std::vector<float>> activationDecl;
|
||||
std::tie(activationDecl, model_type, input_shapes, targetDevice) = GetParam();
|
||||
auto shapes = input_shapes.first;
|
||||
auto const_shape = input_shapes.second;
|
||||
|
||||
auto activationType = activationDecl.first;
|
||||
auto constants_value = activationDecl.second;
|
||||
|
||||
switch (activationType) {
|
||||
case ActivationTypes::PReLu:
|
||||
case ActivationTypes::LeakyRelu: {
|
||||
shapes.push_back(ov::test::static_shapes_to_test_representation({const_shape}).front());
|
||||
break;
|
||||
}
|
||||
case ActivationTypes::HardSigmoid:
|
||||
case ActivationTypes::Selu: {
|
||||
shapes.push_back(ov::test::static_shapes_to_test_representation({const_shape}).front());
|
||||
shapes.push_back(ov::test::static_shapes_to_test_representation({const_shape}).front());
|
||||
break;
|
||||
}
|
||||
default:
|
||||
OPENVINO_THROW("Unsupported activation type for Params test type");
|
||||
}
|
||||
|
||||
init_input_shapes(shapes);
|
||||
|
||||
ov::ParameterVector params;
|
||||
for (const auto& shape : inputDynamicShapes) {
|
||||
params.push_back(std::make_shared<ov::op::v0::Parameter>(model_type, shape));
|
||||
}
|
||||
|
||||
switch (activationType) {
|
||||
case ActivationTypes::PReLu: {
|
||||
params[1]->set_friendly_name("negativeSlope");
|
||||
break;
|
||||
}
|
||||
case ActivationTypes::LeakyRelu: {
|
||||
params[1]->set_friendly_name("leakySlope");
|
||||
break;
|
||||
}
|
||||
case ActivationTypes::HardSigmoid: {
|
||||
params[1]->set_friendly_name("alpha");
|
||||
params[2]->set_friendly_name("beta");
|
||||
break;
|
||||
}
|
||||
case ActivationTypes::Selu: {
|
||||
params[1]->set_friendly_name("alpha");
|
||||
params[2]->set_friendly_name("lambda");
|
||||
break;
|
||||
}
|
||||
default:
|
||||
OPENVINO_THROW("Unsupported activation type for Params test type");
|
||||
}
|
||||
|
||||
params[0]->set_friendly_name("Input");
|
||||
|
||||
auto activation = ngraph::builder::makeActivation(params, model_type, activationType);
|
||||
auto result = std::make_shared<ov::op::v0::Result>(activation);
|
||||
function = std::make_shared<ov::Model>(result, params);
|
||||
}
|
||||
} // namespace test
|
||||
} // namespace ov
|
Loading…
Reference in New Issue
Block a user