Move paddle reader tests from ieFuncTests to paddle_tests (#14166)

* Move paddle reader tests from ieFuncTests to paddle_tests

Fix code style
Modify Paddle reader test to use generated model

* Enable paddle tests in Azure CI

Co-authored-by: Ilya Churaev <ilya.churaev@intel.com>
This commit is contained in:
Oleg Pipikin
2022-12-22 11:58:48 +01:00
committed by GitHub
parent b9749a984d
commit eac0be0f82
8 changed files with 103 additions and 94 deletions

View File

@@ -380,6 +380,10 @@ jobs:
displayName: 'Conditional Compilation Tests'
continueOnError: false
- script: $(RUN_PREFIX) $(INSTALL_TEST_DIR)/paddle_tests --gtest_print_time=1 --gtest_output=xml:$(INSTALL_TEST_DIR)/TEST-PaddleTests.xml
displayName: 'Paddle Tests'
continueOnError: false
- script: $(RUN_PREFIX) $(INSTALL_TEST_DIR)/ov_ir_frontend_tests --gtest_print_time=1 --gtest_output=xml:$(INSTALL_TEST_DIR)/TEST-IRFrontend.xml
displayName: 'IR Frontend Tests'
continueOnError: false

View File

@@ -278,6 +278,10 @@ jobs:
displayName: 'Conditional Compilation Tests'
continueOnError: false
- script: call $(SETUPVARS) && $(INSTALL_TEST_DIR)\paddle_tests --gtest_print_time=1 --gtest_output=xml:$(INSTALL_TEST_DIR)\TEST-PaddleTests.xml
displayName: 'Paddle Tests'
continueOnError: false
- script: call $(SETUPVARS) && $(INSTALL_TEST_DIR)\ov_ir_frontend_tests --gtest_print_time=1 --gtest_output=xml:$(INSTALL_TEST_DIR)\TEST-IRFrontend.xml
displayName: 'IR Frontend Tests'
continueOnError: false

View File

@@ -12,11 +12,12 @@ ov_add_test_target(
openvino_paddle_frontend
paddle_fe_standalone_build_test
LINK_LIBRARIES
cnpy
cnpy
frontend_shared_test_classes
openvino_paddle_frontend
openvino::runtime
openvino_paddle_frontend
openvino::runtime
gtest_main_manifest
funcTestUtils
ADD_CLANG_FORMAT
LABELS
OV

View File

@@ -1,7 +1,7 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "lib_close.hpp"
#include "functional_test_utils/lib_close.hpp"
#include <gtest/gtest.h>

View File

@@ -0,0 +1,73 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <file_utils.h>
#include <gtest/gtest.h>
#include <ie_blob.h>
#include <fstream>
#include <ie_core.hpp>
#include <set>
#include <string>
#include "common_test_utils/ngraph_test_utils.hpp"
#include "common_test_utils/unicode_utils.hpp"
#include "frontend/shared/include/utils.hpp"
#include "openvino/openvino.hpp"
#include "openvino/opsets/opset1.hpp"
#include "transformations/serialize.hpp"
TEST(Paddle_Reader_Tests, ImportBasicModelToCore) {
auto model = std::string(TEST_PADDLE_MODELS_DIRNAME) + "relu/relu.pdmodel";
ov::Core core;
auto function = core.read_model(FrontEndTestUtils::make_model_path(model));
const auto inputType = ov::element::f32;
const auto inputShape = ov::Shape{3};
const auto data = std::make_shared<ov::opset1::Parameter>(inputType, inputShape);
data->set_friendly_name("x");
data->output(0).get_tensor().add_names({"x"});
const auto relu = std::make_shared<ov::opset1::Relu>(data->output(0));
relu->set_friendly_name("relu_0.tmp_0");
relu->output(0).get_tensor().add_names({"relu_0.tmp_0"});
const auto result = std::make_shared<ov::opset1::Result>(relu->output(0));
result->set_friendly_name("relu_0.tmp_0/Result");
const auto reference = std::make_shared<ov::Model>(ov::NodeVector{result}, ov::ParameterVector{data}, "Model0");
const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES);
const FunctionsComparator::Result res = func_comparator(function, reference);
ASSERT_TRUE(res.valid) << res.message;
}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
TEST(Paddle_Reader_Tests, ImportBasicModelToCoreWstring) {
std::string win_dir_path{TEST_PADDLE_MODELS_DIRNAME "relu/relu.pdmodel"};
win_dir_path = FrontEndTestUtils::make_model_path(win_dir_path);
std::wstring wmodel =
CommonTestUtils::addUnicodePostfixToPath(win_dir_path, CommonTestUtils::test_unicode_postfix_vector[0]);
bool is_copy_successfully = CommonTestUtils::copyFile(win_dir_path, wmodel);
if (!is_copy_successfully) {
FAIL() << "Unable to copy from '" << win_dir_path << "' to '" << ov::util::wstring_to_string(wmodel) << "'";
}
ov::Core core;
auto function = core.read_model(wmodel);
CommonTestUtils::removeFile(wmodel);
const auto inputType = ov::element::f32;
const auto inputShape = ov::Shape{3};
const auto data = std::make_shared<ov::opset1::Parameter>(inputType, inputShape);
data->set_friendly_name("x");
data->output(0).get_tensor().add_names({"x"});
const auto relu = std::make_shared<ov::opset1::Relu>(data->output(0));
relu->set_friendly_name("relu_0.tmp_0");
relu->output(0).get_tensor().add_names({"relu_0.tmp_0"});
const auto result = std::make_shared<ov::opset1::Result>(relu->output(0));
result->set_friendly_name("relu_0.tmp_0/Result");
const auto reference = std::make_shared<ov::Model>(ov::NodeVector{result}, ov::ParameterVector{data}, "Model0");
const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES);
const FunctionsComparator::Result res = func_comparator(function, reference);
ASSERT_TRUE(res.valid) << res.message;
}
#endif

View File

@@ -0,0 +1,17 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "functional_test_utils/skip_tests_config.hpp"
#include <string>
#include <vector>
std::vector<std::string> disabledTestPatterns() {
return {
#ifndef BUILD_SHARED_LIBS
// Disable tests for static libraries
".*FrontendLibCloseTest.*"
#endif
};
}

View File

@@ -1,90 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include <set>
#include <string>
#include <fstream>
#include <ie_blob.h>
#include <ie_core.hpp>
#include <file_utils.h>
#include <ngraph/ngraph.hpp>
#include <ngraph/opsets/opset8.hpp>
#include "common_test_utils/ngraph_test_utils.hpp"
#include "common_test_utils/unicode_utils.hpp"
TEST(Paddle_Reader_Tests, ImportBasicModelToCore) {
auto model = std::string(PADDLE_TEST_MODELS) + "relu.pdmodel";
InferenceEngine::Core ie;
auto cnnNetwork = ie.ReadNetwork(model);
auto function = cnnNetwork.getFunction();
const auto inputType = ngraph::element::f32;
const auto inputShape = ngraph::Shape{ 3 };
const auto data = std::make_shared<ngraph::opset8::Parameter>(inputType, inputShape);
data->set_friendly_name("x");
data->output(0).get_tensor().add_names({ "x" });
const auto relu = std::make_shared<ngraph::opset8::Relu>(data->output(0));
relu->set_friendly_name("relu_0.tmp_0");
relu->output(0).get_tensor().add_names({ "relu_0.tmp_0" });
const auto scale = std::make_shared<ngraph::opset8::Constant>(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector<float>{1});
const auto bias = std::make_shared<ngraph::opset8::Constant>(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector<float>{0});
const auto node_multiply = std::make_shared<ngraph::opset8::Multiply>(relu->output(0), scale);
const auto node_add = std::make_shared<ngraph::opset8::Add>(node_multiply, bias);
node_add->set_friendly_name("save_infer_model/scale_0.tmp_1");
node_add->output(0).get_tensor().add_names({ "save_infer_model/scale_0.tmp_1" });
const auto result = std::make_shared<ngraph::opset8::Result>(node_add->output(0));
result->set_friendly_name("save_infer_model/scale_0.tmp_1/Result");
const auto reference = std::make_shared<ngraph::Function>(
ngraph::NodeVector{ result },
ngraph::ParameterVector{ data },
"RefPaddleFunction");
const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES);
const FunctionsComparator::Result res = func_comparator(function, reference);
ASSERT_TRUE(res.valid) << res.message;
}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
TEST(Paddle_Reader_Tests, ImportBasicModelToCoreWstring) {
std::string win_dir_path{ PADDLE_TEST_MODELS "relu.pdmodel" };
std::wstring wmodel = CommonTestUtils::addUnicodePostfixToPath(win_dir_path,
CommonTestUtils::test_unicode_postfix_vector[0]);
bool is_copy_successfully = CommonTestUtils::copyFile(win_dir_path, wmodel);
if (!is_copy_successfully) {
FAIL() << "Unable to copy from '" << win_dir_path << "' to '"
<< ov::util::wstring_to_string(wmodel) << "'";
}
InferenceEngine::Core ie;
auto cnnNetwork = ie.ReadNetwork(wmodel);
CommonTestUtils::removeFile(wmodel);
auto function = cnnNetwork.getFunction();
const auto inputType = ngraph::element::f32;
const auto inputShape = ngraph::Shape{ 3 };
const auto data = std::make_shared<ngraph::opset8::Parameter>(inputType, inputShape);
data->set_friendly_name("x");
data->output(0).get_tensor().add_names({ "x" });
const auto relu = std::make_shared<ngraph::opset8::Relu>(data->output(0));
relu->set_friendly_name("relu_0.tmp_0");
relu->output(0).get_tensor().add_names({ "relu_0.tmp_0" });
const auto scale = std::make_shared<ngraph::opset8::Constant>(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector<float>{1});
const auto bias = std::make_shared<ngraph::opset8::Constant>(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector<float>{0});
const auto node_multiply = std::make_shared<ngraph::opset8::Multiply>(relu->output(0), scale);
const auto node_add = std::make_shared<ngraph::opset8::Add>(node_multiply, bias);
node_add->set_friendly_name("save_infer_model/scale_0.tmp_1");
node_add->output(0).get_tensor().add_names({ "save_infer_model/scale_0.tmp_1" });
const auto result = std::make_shared<ngraph::opset8::Result>(node_add->output(0));
result->set_friendly_name("save_infer_model/scale_0.tmp_1/Result");
const auto reference = std::make_shared<ngraph::Function>(
ngraph::NodeVector{ result },
ngraph::ParameterVector{ data },
"RefPaddleFunction");
const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES);
const FunctionsComparator::Result res = func_comparator(function, reference);
ASSERT_TRUE(res.valid) << res.message;
}
#endif