【PaddlePaddle Hackathon 4】add paddle softshrink op (#15845)

* add softshrink

* Modify the implementation

* fix input unsigned

* fix name typo

* fix unsigned

* fix selct

* add input dtype check

---------

Co-authored-by: Bo Liu <bo4.liu@intel.com>
This commit is contained in:
PuQing 2023-05-04 16:18:44 +08:00 committed by GitHub
parent c1933fcaf0
commit bc6402fb27
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 106 additions and 0 deletions

View File

@ -0,0 +1,46 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "default_opset.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
namespace ov {
namespace frontend {
namespace paddle {
namespace op {
NamedOutputs softshrink(const NodeContext& node) {
auto data = node.get_input("X");
const float lambda = node.get_attribute<float>("lambda", 0.5f);
const auto input_element_type = data.get_element_type();
PADDLE_OP_CHECK(node, lambda >= 0, "Softshrink op lambda must be non-negative.");
PADDLE_OP_CHECK(node, input_element_type.is_signed(), "Softshrink op input must be signed type.");
std::shared_ptr<ngraph::Node> output;
const auto positive_lambda = default_opset::Constant::create(input_element_type, Shape{}, {lambda});
const auto negative_lambda = default_opset::Constant::create(input_element_type, Shape{}, {-lambda});
std::shared_ptr<ngraph::Node> negative_node = std::make_shared<default_opset::Subtract>(data, positive_lambda);
std::shared_ptr<ngraph::Node> positive_node = std::make_shared<default_opset::Add>(data, positive_lambda);
std::shared_ptr<ngraph::Node> zero_node = default_opset::Constant::create(input_element_type, Shape{}, {0});
// Create masks for values below negative lambda and above positive lambda
std::shared_ptr<ngraph::Node> values_below_neg_lambda =
std::make_shared<default_opset::Less>(data, negative_lambda);
std::shared_ptr<ngraph::Node> values_above_pos_lambda =
std::make_shared<default_opset::Greater>(data, positive_lambda);
output = std::make_shared<default_opset::Select>(values_above_pos_lambda, negative_node, data);
output = std::make_shared<default_opset::Select>(values_below_neg_lambda, positive_node, output);
std::shared_ptr<ngraph::Node> zero_mask =
std::make_shared<default_opset::LogicalOr>(values_below_neg_lambda, values_above_pos_lambda);
output = std::make_shared<default_opset::Select>(zero_mask, output, zero_node);
return node.default_single_output_mapping({output}, {"Out"});
}
} // namespace op
} // namespace paddle
} // namespace frontend
} // namespace ov

View File

@ -93,6 +93,7 @@ OP_CONVERTER(shape);
OP_CONVERTER(slice);
OP_CONVERTER(softmax);
OP_CONVERTER(softplus);
OP_CONVERTER(softshrink);
OP_CONVERTER(sigmoid);
OP_CONVERTER(split);
OP_CONVERTER(sqrt);
@ -209,6 +210,7 @@ std::map<std::string, CreatorFunction> get_supported_ops() {
{"slice", op::slice},
{"softmax", op::softmax},
{"softplus", op::softplus},
{"softshrink", op::softshrink},
{"sigmoid", op::sigmoid},
{"split", op::split},
{"sqrt", op::sqrt},

View File

@ -454,6 +454,8 @@ static const std::vector<std::string> models{
std::string("softmax"),
std::string("softmax_minus"),
std::string("softplus_default_params"),
std::string("softshrink_default_params"),
std::string("softshrink_threshold_0.6"),
std::string("split_test1"),
std::string("split_test2"),
std::string("split_test3"),

View File

@ -0,0 +1,56 @@
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
# softshrink paddle model generator
#
import numpy as np
import sys
from save_model import saveModel
def softshrink(name: str, x, threshold):
import paddle
paddle.enable_static()
node_x = paddle.static.data(name="x", shape=x.shape, dtype="float32")
if threshold == None:
out = paddle.nn.functional.softshrink(node_x)
else:
out = paddle.nn.functional.softshrink(node_x, threshold)
cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())
outs = exe.run(feed={"x": x}, fetch_list=[out])
saveModel(
name,
exe,
feedkeys=["x"],
fetchlist=[out],
inputs=[x],
outputs=[outs[0]],
target_dir=sys.argv[1],
)
return outs[0]
def main():
data = np.array(
[
[[2.0, 3.0, 4.0, 5.0], [0.0, 4.0, -5.0, 6.0], [7.0, -8.0, 8.0, 9.0]],
[[-1.0, 2.0, 3.0, 4.0], [-5.0, 6.0, 7.0, 8.0], [6.0, 7.0, 8.0, 9.0]],
]
).astype(np.float32)
softshrink("softshrink_default_params", data, threshold=None)
softshrink("softshrink_threshold_0.6", data, threshold=0.6)
if __name__ == "__main__":
main()