[GPU] Add unit tests for activation functions for blocked layouts (#11948)
This commit is contained in:
committed by
GitHub
parent
e2080cd8a4
commit
10ccf50ee0
@@ -1499,14 +1499,12 @@ TEST(activation_f32_fw_gpu, b_fs_yx_fsv16_prelu) {
|
||||
}
|
||||
}
|
||||
|
||||
struct activation_random_test_params {
|
||||
data_types input_type;
|
||||
format::type input_format;
|
||||
tensor input_size;
|
||||
activation_func func_type;
|
||||
activation_additional_params additional_params;
|
||||
padding padd;
|
||||
};
|
||||
using activation_random_test_params = std::tuple<data_types,
|
||||
format::type, // input_format
|
||||
tensor, // input_size
|
||||
activation_func, // func_type
|
||||
activation_additional_params, // additional_params
|
||||
padding>;
|
||||
|
||||
struct activation_random_test : testing::TestWithParam<activation_random_test_params>
|
||||
{
|
||||
@@ -1598,15 +1596,23 @@ struct activation_random_test : testing::TestWithParam<activation_random_test_pa
|
||||
void execute_compare(const activation_random_test_params& params, bool check_result) {
|
||||
auto& engine = get_test_engine();
|
||||
|
||||
auto in_layout = layout(params.input_type, format::bfyx, params.input_size);
|
||||
data_types input_type;
|
||||
format::type input_format;
|
||||
tensor input_size;
|
||||
activation_func func_type;
|
||||
activation_additional_params additional_params;
|
||||
padding padd;
|
||||
std::tie(input_type, input_format, input_size, func_type, additional_params, padd) = params;
|
||||
auto in_layout = layout(input_type, format::bfyx, input_size);
|
||||
|
||||
auto in_mem = engine.allocate_memory(in_layout);
|
||||
fill_random(in_mem);
|
||||
|
||||
/// bfyx
|
||||
cldnn::topology topo;
|
||||
topo.add(input_layout("in", in_layout));
|
||||
auto prim = activation("activation", "in", params.func_type);
|
||||
prim.additional_params = params.additional_params;
|
||||
auto prim = activation("activation", "in", func_type);
|
||||
prim.additional_params = additional_params;
|
||||
topo.add(prim);
|
||||
|
||||
auto build_opts = build_options();
|
||||
@@ -1621,18 +1627,19 @@ struct activation_random_test : testing::TestWithParam<activation_random_test_pa
|
||||
|
||||
cldnn::topology topo_opt;
|
||||
topo_opt.add(input_layout("in", in_layout));
|
||||
topo_opt.add(reorder("in_to_input_type", "in", params.input_format, params.input_type));
|
||||
auto prim_opt = activation("activation_blocked", "in_to_input_type", params.func_type);
|
||||
prim_opt.additional_params = params.additional_params;
|
||||
topo_opt.add(reorder("in_to_input_type", "in", input_format, input_type));
|
||||
auto prim_opt = activation("activation_blocked", "in_to_input_type", func_type);
|
||||
prim_opt.additional_params = additional_params;
|
||||
topo_opt.add(prim_opt);
|
||||
// force output format to input format.
|
||||
topo_opt.add(reorder("res_to_input_format", "activation_blocked", params.input_format, params.input_type));
|
||||
topo_opt.add(reorder("res_to_input_format", "activation_blocked", input_format, input_type));
|
||||
|
||||
auto build_opts_opt = build_options();
|
||||
build_opts_opt.set_option(build_option::outputs({"activation_blocked", "res_to_input_format"}));
|
||||
auto activation_impl_desc = implementation_desc();
|
||||
activation_impl_desc.output_format = params.input_format;
|
||||
build_opts_opt.set_option(build_option::force_implementations({{"activation_blocked", {params.input_format, "activation_ref"} }}));
|
||||
activation_impl_desc.output_format = input_format;
|
||||
build_opts_opt.set_option(
|
||||
build_option::force_implementations({{"activation_blocked", {input_format, "activation_ref"}}}));
|
||||
|
||||
network net_opt(engine, topo_opt, build_opts_opt);
|
||||
|
||||
@@ -1645,16 +1652,16 @@ struct activation_random_test : testing::TestWithParam<activation_random_test_pa
|
||||
|
||||
if (check_result == true) {
|
||||
// Check data_types
|
||||
if (params.input_type == data_types::f32) {
|
||||
if (input_type == data_types::f32) {
|
||||
compare_outputs<float>(output, output_opt);
|
||||
} else if (params.input_type == data_types::f16) {
|
||||
} else if (input_type == data_types::f16) {
|
||||
compare_outputs<FLOAT16>(output, output_opt);
|
||||
} else if (params.input_type == data_types::i8) {
|
||||
} else if (input_type == data_types::i8) {
|
||||
compare_outputs<int8_t>(output, output_opt);
|
||||
} else if (params.input_type == data_types::u8) {
|
||||
} else if (input_type == data_types::u8) {
|
||||
compare_outputs<uint8_t>(output, output_opt);
|
||||
} else {
|
||||
FAIL() << "Not supported data type: " << static_cast<size_t>(params.input_type);
|
||||
FAIL() << "Not supported data type: " << static_cast<size_t>(input_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1665,14 +1672,86 @@ TEST_P(activation_random_test, random) {
|
||||
execute_compare(param, true);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(activation_blocked_tests,
|
||||
activation_random_test,
|
||||
testing::ValuesIn(
|
||||
std::vector<activation_random_test_params>{
|
||||
{ data_types::i8, format::b_fs_yx_fsv32, { 1, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{ data_types::i8, format::bs_fs_yx_bsv32_fsv32, {32, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{ data_types::f16, format::bs_fs_yx_bsv32_fsv16, {32, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{ data_types::i8, format::bs_fs_yx_bsv32_fsv32, {16, 16, 5, 5}, activation_func::relu, {}, {}},
|
||||
{ data_types::f16, format::bs_fs_yx_bsv32_fsv16, {16, 16, 5, 5}, activation_func::relu, {}, {}},
|
||||
}
|
||||
));
|
||||
const auto reluParams = testing::ValuesIn(std::vector<activation_random_test_params>{
|
||||
{data_types::i8, format::b_fs_yx_fsv32, {1, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{data_types::i8, format::bs_fs_yx_bsv32_fsv32, {32, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{data_types::f16, format::bs_fs_yx_bsv32_fsv16, {32, 32, 5, 5}, activation_func::relu, {}, {}},
|
||||
{data_types::i8, format::bs_fs_yx_bsv32_fsv32, {16, 16, 5, 5}, activation_func::relu, {}, {}},
|
||||
{data_types::f16, format::bs_fs_yx_bsv32_fsv16, {16, 16, 5, 5}, activation_func::relu, {}, {}},
|
||||
});
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(relu_activation_blocked_tests, activation_random_test, reluParams);
|
||||
|
||||
const std::vector<data_types> dataTypes = {data_types::f16, data_types::f32};
|
||||
const std::vector<format::type> types = {format::bfyx,
|
||||
format::bfzyx,
|
||||
format::yxfb,
|
||||
format::byxf,
|
||||
format::fyxb,
|
||||
format::b_fs_yx_fsv2,
|
||||
format::b_fs_zyx_fsv2,
|
||||
format::bs_fs_yx_bsv32_fsv32,
|
||||
format::bs_fs_yx_bsv32_fsv16};
|
||||
|
||||
// TODO: need to investigate input for commented activation functions
|
||||
const std::vector<activation_func> activationFunctions = {activation_func::none,
|
||||
activation_func::logistic,
|
||||
activation_func::gelu,
|
||||
activation_func::hyperbolic_tan,
|
||||
activation_func::relu,
|
||||
activation_func::relu_negative_slope,
|
||||
activation_func::clamp,
|
||||
activation_func::softrelu,
|
||||
activation_func::abs,
|
||||
activation_func::linear,
|
||||
activation_func::square,
|
||||
// activation_func::sqrt,
|
||||
activation_func::elu,
|
||||
activation_func::sin,
|
||||
// activation_func::asin,
|
||||
activation_func::sinh,
|
||||
// activation_func::asinh,
|
||||
activation_func::cos,
|
||||
// activation_func::acos,
|
||||
activation_func::cosh,
|
||||
// activation_func::acosh,
|
||||
// activation_func::log,
|
||||
// activation_func::log2,
|
||||
activation_func::exp,
|
||||
activation_func::tan,
|
||||
activation_func::atan,
|
||||
// activation_func::atanh,
|
||||
activation_func::floor,
|
||||
activation_func::ceil,
|
||||
activation_func::negative,
|
||||
activation_func::negation,
|
||||
activation_func::pow,
|
||||
activation_func::reciprocal,
|
||||
activation_func::erf,
|
||||
activation_func::hard_sigmoid,
|
||||
activation_func::hsigmoid,
|
||||
activation_func::selu,
|
||||
activation_func::sign,
|
||||
activation_func::softplus,
|
||||
activation_func::swish,
|
||||
activation_func::hswish,
|
||||
activation_func::mish,
|
||||
activation_func::round_half_to_even,
|
||||
activation_func::round_half_away_from_zero,
|
||||
activation_func::gelu_tanh,
|
||||
activation_func::softsign};
|
||||
|
||||
const std::vector<tensor> inputShapes = {
|
||||
{1, 32, 5, 5},
|
||||
{32, 32, 5, 5},
|
||||
{16, 16, 5, 5},
|
||||
};
|
||||
|
||||
const auto fpFunctionsParams = ::testing::Combine(::testing::ValuesIn(dataTypes),
|
||||
::testing::ValuesIn(types),
|
||||
::testing::ValuesIn(inputShapes),
|
||||
::testing::ValuesIn(activationFunctions),
|
||||
::testing::Values(activation_additional_params{}),
|
||||
::testing::Values(padding{}));
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(fp_activation_blocked_tests, activation_random_test, fpFunctionsParams);
|
||||
|
||||
Reference in New Issue
Block a user