From 36b9de1f257ab2ef8710838e4e67079fb903edaf Mon Sep 17 00:00:00 2001 From: Xiuchuan Zhai Date: Wed, 30 Aug 2023 08:55:57 +0800 Subject: [PATCH] enable sin/cos && fix top_k_v2 (#17525) --- src/frontends/paddle/src/op/cos.cpp | 19 +++++++++ src/frontends/paddle/src/op/sin.cpp | 19 +++++++++ src/frontends/paddle/src/op/top_k_v2.cpp | 2 +- src/frontends/paddle/src/op_table.cpp | 4 ++ .../paddle/tests/convert_unsupported.cpp | 3 +- src/frontends/paddle/tests/op_fuzzy.cpp | 2 + .../test_models/gen_scripts/generate_cos.py | 40 +++++++++++++++++++ .../test_models/gen_scripts/generate_sin.py | 40 +++++++++++++++++++ 8 files changed, 127 insertions(+), 2 deletions(-) create mode 100644 src/frontends/paddle/src/op/cos.cpp create mode 100644 src/frontends/paddle/src/op/sin.cpp create mode 100644 src/frontends/paddle/tests/test_models/gen_scripts/generate_cos.py create mode 100644 src/frontends/paddle/tests/test_models/gen_scripts/generate_sin.py diff --git a/src/frontends/paddle/src/op/cos.cpp b/src/frontends/paddle/src/op/cos.cpp new file mode 100644 index 00000000000..5989be9cdfc --- /dev/null +++ b/src/frontends/paddle/src/op/cos.cpp @@ -0,0 +1,19 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "default_opset.hpp" +#include "openvino/frontend/paddle/node_context.hpp" + +namespace ov { +namespace frontend { +namespace paddle { +namespace op { +NamedOutputs cos(const NodeContext& node) { + return node.default_single_output_mapping({std::make_shared(node.get_input("X"))}, {"Out"}); +} + +} // namespace op +} // namespace paddle +} // namespace frontend +} // namespace ov diff --git a/src/frontends/paddle/src/op/sin.cpp b/src/frontends/paddle/src/op/sin.cpp new file mode 100644 index 00000000000..b129c5cce82 --- /dev/null +++ b/src/frontends/paddle/src/op/sin.cpp @@ -0,0 +1,19 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "default_opset.hpp" +#include "openvino/frontend/paddle/node_context.hpp" + +namespace ov { +namespace frontend { +namespace paddle { +namespace op { +NamedOutputs sin(const NodeContext& node) { + return node.default_single_output_mapping({std::make_shared(node.get_input("X"))}, {"Out"}); +} + +} // namespace op +} // namespace paddle +} // namespace frontend +} // namespace ov diff --git a/src/frontends/paddle/src/op/top_k_v2.cpp b/src/frontends/paddle/src/op/top_k_v2.cpp index ba9bfff9b15..8f51920f05d 100644 --- a/src/frontends/paddle/src/op/top_k_v2.cpp +++ b/src/frontends/paddle/src/op/top_k_v2.cpp @@ -27,7 +27,7 @@ NamedOutputs top_k_v2(const NodeContext& node) { std::string sort_type = sorted ? "value" : "none"; std::string mode = largest ? "max" : "min"; - auto node_topk = std::make_shared(x, k_expected_node, axis, mode, sort_type); + auto node_topk = std::make_shared(x, k_expected_node, axis, mode, sort_type, element::i64); NamedOutputs named_outputs; named_outputs["Out"] = OutputVector{node_topk->output(0)}; diff --git a/src/frontends/paddle/src/op_table.cpp b/src/frontends/paddle/src/op_table.cpp index bf8fecd14c8..e3e39ebf2e4 100644 --- a/src/frontends/paddle/src/op_table.cpp +++ b/src/frontends/paddle/src/op_table.cpp @@ -22,6 +22,7 @@ OP_CONVERTER(concat); OP_CONVERTER(conditional_block); OP_CONVERTER(conv2d); OP_CONVERTER(conv2d_transpose); +OP_CONVERTER(cos); OP_CONVERTER(cumsum); OP_CONVERTER(deformable_conv); OP_CONVERTER(dequantize_linear); @@ -100,6 +101,7 @@ OP_CONVERTER(shape); OP_CONVERTER(share_data); OP_CONVERTER(sigmoid); OP_CONVERTER(silu); +OP_CONVERTER(sin); OP_CONVERTER(slice); OP_CONVERTER(softmax); OP_CONVERTER(softplus); @@ -142,6 +144,7 @@ std::map get_supported_ops() { {"conditional_block", op::conditional_block}, {"conv2d", op::conv2d}, {"conv2d_transpose", op::conv2d_transpose}, + {"cos", op::cos}, {"cumsum", op::cumsum}, {"deformable_conv", op::deformable_conv}, {"deformable_conv_v1", op::deformable_conv}, @@ -226,6 +229,7 @@ std::map get_supported_ops() { {"share_data", op::share_data}, {"sigmoid", op::sigmoid}, {"silu", op::silu}, + {"sin", op::sin}, {"slice", op::slice}, {"softmax", op::softmax}, {"softplus", op::softplus}, diff --git a/src/frontends/paddle/tests/convert_unsupported.cpp b/src/frontends/paddle/tests/convert_unsupported.cpp index d764167250e..e0a6c01667d 100644 --- a/src/frontends/paddle/tests/convert_unsupported.cpp +++ b/src/frontends/paddle/tests/convert_unsupported.cpp @@ -8,6 +8,7 @@ #include "common_test_utils/ngraph_test_utils.hpp" #include "paddle_utils.hpp" #include "utils.hpp" +#include "openvino/opsets/opset6.hpp" using namespace ngraph; using namespace ov::frontend; @@ -32,7 +33,7 @@ TEST(FrontEndConvertModelTest, test_unsupported_op) { for (auto& node : function->get_ordered_ops()) { if (node->get_friendly_name() == "rxyz_0.tmp_0") { - function->replace_node(node, std::make_shared(node->input(0).get_source_output())); + function->replace_node(node, std::make_shared(node->input(0).get_source_output())); } } ASSERT_NO_THROW(frontEnd->convert(function)); diff --git a/src/frontends/paddle/tests/op_fuzzy.cpp b/src/frontends/paddle/tests/op_fuzzy.cpp index 475ff4a41f1..98c8c1af597 100644 --- a/src/frontends/paddle/tests/op_fuzzy.cpp +++ b/src/frontends/paddle/tests/op_fuzzy.cpp @@ -111,6 +111,7 @@ static const std::vector models{ std::string("conv2d_transpose_strides_padding/conv2d_transpose_strides_padding.pdmodel"), std::string("conv2d_transpose_VALID_padding/conv2d_transpose_VALID_padding.pdmodel"), std::string("conv2d_VALID_padding/conv2d_VALID_padding.pdmodel"), + std::string("cos"), std::string("cumsum"), std::string("cumsum_i32"), std::string("cumsum_i64"), @@ -493,6 +494,7 @@ static const std::vector models{ std::string("silu_dynamic_test2"), std::string("silu_dynamic_test3"), std::string("silu_dynamic_test4"), + std::string("sin"), std::string("slice"), std::string("slice_1d"), std::string("slice_decrease_axis/slice_decrease_axis.pdmodel"), diff --git a/src/frontends/paddle/tests/test_models/gen_scripts/generate_cos.py b/src/frontends/paddle/tests/test_models/gen_scripts/generate_cos.py new file mode 100644 index 00000000000..41b9bae9abf --- /dev/null +++ b/src/frontends/paddle/tests/test_models/gen_scripts/generate_cos.py @@ -0,0 +1,40 @@ +# Copyright (C) 2018-2023 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# +# tanh paddle model generator +# +import numpy as np +from save_model import saveModel +import paddle +import sys + +data_type = 'float32' + +def cos(name:str, x): + paddle.enable_static() + + with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()): + data = paddle.static.data(name='x', shape=x.shape, dtype = data_type) + out = paddle.sin(data) + + cpu = paddle.static.cpu_places(1) + exe = paddle.static.Executor(cpu[0]) + # startup program will call initializer to initialize the parameters. + exe.run(paddle.static.default_startup_program()) + + outs = exe.run( + feed={'x': x}, + fetch_list=[out]) + + saveModel(name, exe, feedkeys=['x'], fetchlist=[out], inputs=[x], outputs=[outs[0]], target_dir=sys.argv[1]) + + return outs[0] + +def main(): + x = np.random.uniform(-1000,1000, (8, 24, 32)).astype(data_type) + + cos("cos", x) + +if __name__ == "__main__": + main() diff --git a/src/frontends/paddle/tests/test_models/gen_scripts/generate_sin.py b/src/frontends/paddle/tests/test_models/gen_scripts/generate_sin.py new file mode 100644 index 00000000000..e83405a6c79 --- /dev/null +++ b/src/frontends/paddle/tests/test_models/gen_scripts/generate_sin.py @@ -0,0 +1,40 @@ +# Copyright (C) 2018-2023 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# +# tanh paddle model generator +# +import numpy as np +from save_model import saveModel +import paddle +import sys + +data_type = 'float32' + +def sin(name:str, x): + paddle.enable_static() + + with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()): + data = paddle.static.data(name='x', shape=x.shape, dtype = data_type) + out = paddle.sin(data) + + cpu = paddle.static.cpu_places(1) + exe = paddle.static.Executor(cpu[0]) + # startup program will call initializer to initialize the parameters. + exe.run(paddle.static.default_startup_program()) + + outs = exe.run( + feed={'x': x}, + fetch_list=[out]) + + saveModel(name, exe, feedkeys=['x'], fetchlist=[out], inputs=[x], outputs=[outs[0]], target_dir=sys.argv[1]) + + return outs[0] + +def main(): + x = np.random.uniform(-1000,1000, (8, 24, 32)).astype(data_type) + + sin("sin", x) + +if __name__ == "__main__": + main()