From bc6402fb2732a5d85569511b2cf8e11938715cd5 Mon Sep 17 00:00:00 2001 From: PuQing Date: Thu, 4 May 2023 16:18:44 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90PaddlePaddle=20Hackathon=204=E3=80=91a?= =?UTF-8?q?dd=20paddle=20softshrink=20op=20(#15845)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add softshrink * Modify the implementation * fix input unsigned * fix name typo * fix unsigned * fix selct * add input dtype check --------- Co-authored-by: Bo Liu --- src/frontends/paddle/src/op/softshrink.cpp | 46 +++++++++++++++ src/frontends/paddle/src/op_table.cpp | 2 + src/frontends/paddle/tests/op_fuzzy.cpp | 2 + .../gen_scripts/generate_softshrink.py | 56 +++++++++++++++++++ 4 files changed, 106 insertions(+) create mode 100644 src/frontends/paddle/src/op/softshrink.cpp create mode 100644 src/frontends/paddle/tests/test_models/gen_scripts/generate_softshrink.py diff --git a/src/frontends/paddle/src/op/softshrink.cpp b/src/frontends/paddle/src/op/softshrink.cpp new file mode 100644 index 00000000000..2feda391336 --- /dev/null +++ b/src/frontends/paddle/src/op/softshrink.cpp @@ -0,0 +1,46 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "default_opset.hpp" +#include "openvino/frontend/paddle/node_context.hpp" + +namespace ov { +namespace frontend { +namespace paddle { +namespace op { +NamedOutputs softshrink(const NodeContext& node) { + auto data = node.get_input("X"); + const float lambda = node.get_attribute("lambda", 0.5f); + const auto input_element_type = data.get_element_type(); + PADDLE_OP_CHECK(node, lambda >= 0, "Softshrink op lambda must be non-negative."); + PADDLE_OP_CHECK(node, input_element_type.is_signed(), "Softshrink op input must be signed type."); + + std::shared_ptr output; + const auto positive_lambda = default_opset::Constant::create(input_element_type, Shape{}, {lambda}); + const auto negative_lambda = default_opset::Constant::create(input_element_type, Shape{}, {-lambda}); + std::shared_ptr negative_node = std::make_shared(data, positive_lambda); + std::shared_ptr positive_node = std::make_shared(data, positive_lambda); + + std::shared_ptr zero_node = default_opset::Constant::create(input_element_type, Shape{}, {0}); + + // Create masks for values below negative lambda and above positive lambda + std::shared_ptr values_below_neg_lambda = + std::make_shared(data, negative_lambda); + std::shared_ptr values_above_pos_lambda = + std::make_shared(data, positive_lambda); + + output = std::make_shared(values_above_pos_lambda, negative_node, data); + output = std::make_shared(values_below_neg_lambda, positive_node, output); + + std::shared_ptr zero_mask = + std::make_shared(values_below_neg_lambda, values_above_pos_lambda); + + output = std::make_shared(zero_mask, output, zero_node); + + return node.default_single_output_mapping({output}, {"Out"}); +} +} // namespace op +} // namespace paddle +} // namespace frontend +} // namespace ov diff --git a/src/frontends/paddle/src/op_table.cpp b/src/frontends/paddle/src/op_table.cpp index 0a21af065b9..e790d7a9c6d 100644 --- a/src/frontends/paddle/src/op_table.cpp +++ b/src/frontends/paddle/src/op_table.cpp @@ -93,6 +93,7 @@ OP_CONVERTER(shape); OP_CONVERTER(slice); OP_CONVERTER(softmax); OP_CONVERTER(softplus); +OP_CONVERTER(softshrink); OP_CONVERTER(sigmoid); OP_CONVERTER(split); OP_CONVERTER(sqrt); @@ -209,6 +210,7 @@ std::map get_supported_ops() { {"slice", op::slice}, {"softmax", op::softmax}, {"softplus", op::softplus}, + {"softshrink", op::softshrink}, {"sigmoid", op::sigmoid}, {"split", op::split}, {"sqrt", op::sqrt}, diff --git a/src/frontends/paddle/tests/op_fuzzy.cpp b/src/frontends/paddle/tests/op_fuzzy.cpp index f5d6479c9dd..a967278ec1e 100644 --- a/src/frontends/paddle/tests/op_fuzzy.cpp +++ b/src/frontends/paddle/tests/op_fuzzy.cpp @@ -454,6 +454,8 @@ static const std::vector models{ std::string("softmax"), std::string("softmax_minus"), std::string("softplus_default_params"), + std::string("softshrink_default_params"), + std::string("softshrink_threshold_0.6"), std::string("split_test1"), std::string("split_test2"), std::string("split_test3"), diff --git a/src/frontends/paddle/tests/test_models/gen_scripts/generate_softshrink.py b/src/frontends/paddle/tests/test_models/gen_scripts/generate_softshrink.py new file mode 100644 index 00000000000..80ebe94e670 --- /dev/null +++ b/src/frontends/paddle/tests/test_models/gen_scripts/generate_softshrink.py @@ -0,0 +1,56 @@ +# Copyright (C) 2018-2023 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# +# softshrink paddle model generator +# +import numpy as np +import sys +from save_model import saveModel + + +def softshrink(name: str, x, threshold): + import paddle + + paddle.enable_static() + + node_x = paddle.static.data(name="x", shape=x.shape, dtype="float32") + if threshold == None: + out = paddle.nn.functional.softshrink(node_x) + else: + out = paddle.nn.functional.softshrink(node_x, threshold) + + cpu = paddle.static.cpu_places(1) + exe = paddle.static.Executor(cpu[0]) + # startup program will call initializer to initialize the parameters. + exe.run(paddle.static.default_startup_program()) + + outs = exe.run(feed={"x": x}, fetch_list=[out]) + + saveModel( + name, + exe, + feedkeys=["x"], + fetchlist=[out], + inputs=[x], + outputs=[outs[0]], + target_dir=sys.argv[1], + ) + + return outs[0] + + +def main(): + data = np.array( + [ + [[2.0, 3.0, 4.0, 5.0], [0.0, 4.0, -5.0, 6.0], [7.0, -8.0, 8.0, 9.0]], + [[-1.0, 2.0, 3.0, 4.0], [-5.0, 6.0, 7.0, 8.0], [6.0, 7.0, 8.0, 9.0]], + ] + ).astype(np.float32) + + softshrink("softshrink_default_params", data, threshold=None) + softshrink("softshrink_threshold_0.6", data, threshold=0.6) + + +if __name__ == "__main__": + main()