diff --git a/ngraph/frontend/paddlepaddle/src/frontend.cpp b/ngraph/frontend/paddlepaddle/src/frontend.cpp index 0ef30903455..08e9d2c7adf 100644 --- a/ngraph/frontend/paddlepaddle/src/frontend.cpp +++ b/ngraph/frontend/paddlepaddle/src/frontend.cpp @@ -49,8 +49,15 @@ NamedOutputs make_ng_node(const std::map>& nodes, named_inputs[input_port.parameter()].push_back(node_it->second); } } + NamedOutputs outputs; + // In case the conversion function throws exception + try { + outputs = creator_it->second(NodeContext(DecoderPDPDProto(op_place), named_inputs)); + } catch (std::exception& ex) { + FRONT_END_OP_CONVERSION_CHECK(false, "Fail to convert " + op_desc.type() + " Exception " + ex.what()); + } - return creator_it->second(NodeContext(DecoderPDPDProto(op_place), named_inputs)); + return outputs; } NamedOutputs make_framework_node(const std::map>& nodes, diff --git a/ngraph/frontend/paddlepaddle/src/model.cpp b/ngraph/frontend/paddlepaddle/src/model.cpp index 22a6c330469..4ba7aaa7888 100644 --- a/ngraph/frontend/paddlepaddle/src/model.cpp +++ b/ngraph/frontend/paddlepaddle/src/model.cpp @@ -289,7 +289,14 @@ InputModelPDPD::InputModelPDPDImpl::InputModelPDPDImpl(const std::basic_stringParseFromIstream(&pb_stream), "Model can't be parsed"); - + // According to Paddle, the saved model has the framework version + // For example Paddle 2.1.0 is encoded as 2001000. 0 means the latest framework. + // https://github.com/PaddlePaddle/Paddle/blob/develop/cmake/version.cmake + // https://github.com/PaddlePaddle/Paddle/blob/2100816c5190693cc7dee181e96af72e9f0fbd1d/paddle/fluid/framework/program_desc.cc#L52 + int64_t version = m_fw_ptr->version().version(); + FRONT_END_GENERAL_CHECK( + version >= 2000000 || version == 0, + "[Frontend]Only Support Paddle greater than 2.0.0, current version " + std::to_string(version)); loadPlaces(); if (weights_stream && weights_stream.is_open()) { loadConsts(std::basic_string{}, &weights_stream); @@ -307,7 +314,10 @@ InputModelPDPD::InputModelPDPDImpl::InputModelPDPDImpl(const std::vectorParseFromIstream(streams[0]), "Model can't be parsed"); - + int64_t version = m_fw_ptr->version().version(); + FRONT_END_GENERAL_CHECK( + version >= 2000000 || version == 0, + "[Frontend]Only Support Paddle greater than 2.0.0, current version " + std::to_string(version)); loadPlaces(); if (streams.size() > 1) loadConsts(std::string(), streams[1]); diff --git a/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_lower_version.py b/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_lower_version.py new file mode 100644 index 00000000000..414eec0bd9b --- /dev/null +++ b/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_lower_version.py @@ -0,0 +1,39 @@ +import paddle +import numpy as np +import os +import sys +from paddle.fluid.proto import framework_pb2 + +paddle.enable_static() + +inp_blob = np.random.randn(1, 3, 4, 4).astype(np.float32) +print(sys.path) +main_program = paddle.static.Program() +startup_program = paddle.static.Program() + +with paddle.static.program_guard(main_program, startup_program): + x = paddle.static.data(name='x', shape=[1, 3, 4, 4], dtype='float32') + test_layer = paddle.static.nn.conv2d(input=x, num_filters=5, filter_size=(1, 1), stride=(1, 1), padding=(1, 1), + dilation=(1, 1), groups=1, bias_attr=False) + + cpu = paddle.static.cpu_places(1) + exe = paddle.static.Executor(cpu[0]) + exe.run(startup_program) + inp_dict = {'x': inp_blob} + var = [test_layer] + res_paddle = exe.run(paddle.static.default_main_program(), fetch_list=var, feed=inp_dict) + paddle.static.save_inference_model(os.path.join(sys.argv[1], "lower_version/", "lower_version"), [x], [test_layer], exe, program=main_program) + + +fw_model = framework_pb2.ProgramDesc() +with open(os.path.join(sys.argv[1], "lower_version", "lower_version.pdmodel"), mode='rb') as file: + fw_model.ParseFromString(file.read()) + +fw_model.version.version = 1800000 +print(fw_model.version.version) +with open(os.path.join(sys.argv[1], "lower_version", "lower_version.pdmodel"), "wb") as f: + f.write(fw_model.SerializeToString()) + + + + diff --git a/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_pad3d.py b/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_pad3d.py index 51b3a81f0e9..5a711f6f83d 100644 --- a/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_pad3d.py +++ b/ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_pad3d.py @@ -57,6 +57,12 @@ def main(): input_data = np.random.rand(*input_shape).astype(np.float32) pad3d("pad3d_test3", input_data, in_dtype, pad, data_format, mode) + input_shape = (2, 3, 4, 5, 6) + pad = [1, 2, 1, 1, 1, 2] + mode = "circular" + data_format = 'NDHWC' + input_data = np.random.rand(*input_shape).astype(np.float32) + pad3d("throw_in_conversion", input_data, in_dtype, pad, data_format, mode) # padding of type int feature only supported by PaddlePaddle 'develop' version(>=2.1.0) # input_shape = (1, 2, 3, 4, 5) # pad_int = 1 diff --git a/ngraph/test/frontend/paddlepaddle/throw_in_conversion.cpp b/ngraph/test/frontend/paddlepaddle/throw_in_conversion.cpp new file mode 100644 index 00000000000..3032c272202 --- /dev/null +++ b/ngraph/test/frontend/paddlepaddle/throw_in_conversion.cpp @@ -0,0 +1,39 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include + +#include "common_test_utils/ngraph_test_utils.hpp" +#include "paddle_utils.hpp" +#include "utils.hpp" + +using namespace ngraph; +using namespace ngraph::frontend; + +TEST(FrontEndConvertModelTest, throw_in_conversion) { + FrontEndManager fem; + FrontEnd::Ptr frontEnd; + InputModel::Ptr inputModel; + ASSERT_NO_THROW(frontEnd = fem.load_by_framework(PADDLE_FE)); + ASSERT_NE(frontEnd, nullptr); + auto model_filename = FrontEndTestUtils::make_model_path( + std::string(TEST_PADDLE_MODELS_DIRNAME) + std::string("throw_in_conversion/throw_in_conversion.pdmodel")); + ASSERT_NO_THROW(inputModel = frontEnd->load(model_filename)); + ASSERT_NE(inputModel, nullptr); + std::shared_ptr function; + ASSERT_THROW(function = frontEnd->convert(inputModel), OpConversionFailure); +} + +TEST(FrontEndConvertModelTest, unsupported_version) { + FrontEndManager fem; + FrontEnd::Ptr frontEnd; + InputModel::Ptr inputModel; + ASSERT_NO_THROW(frontEnd = fem.load_by_framework(PADDLE_FE)); + ASSERT_NE(frontEnd, nullptr); + auto model_filename = FrontEndTestUtils::make_model_path(std::string(TEST_PADDLE_MODELS_DIRNAME) + + std::string("lower_version/lower_version.pdmodel")); + + ASSERT_THROW(inputModel = frontEnd->load(model_filename), GeneralFailure); +}