Gracefully fail if test models weren't generated (#19705)

* Gracefully fail if test models weren't generated

* Add assert instead of return `nullptr`
This commit is contained in:
Vitaliy Urusovskij 2023-09-11 15:18:45 +04:00 committed by GitHub
parent 3d872f14e4
commit 9f4e918ee2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 9 additions and 6 deletions

View File

@ -8,7 +8,7 @@ if(ENABLE_OV_ONNX_FRONTEND)
# if requirements are not installed automatically, we need to checks whether they are here
ov_check_pip_packages(REQUIREMENTS_FILE "${OpenVINO_SOURCE_DIR}/src/frontends/onnx/tests/requirements.txt"
RESULT_VAR onnx_FOUND
WARNING_MESSAGE "ONNX frontend tests will be skipped"
WARNING_MESSAGE "ONNX testing models weren't generated, some tests will fail due .onnx models not found"
MESSAGE_MODE WARNING)
endif()

View File

@ -7,11 +7,13 @@
#include "openvino/core/except.hpp"
#include "openvino/core/model.hpp"
#include "openvino/frontend/manager.hpp"
#include "openvino/util/file_util.hpp"
namespace ov {
namespace test {
inline std::shared_ptr<ov::Model> readModel(const std::string& model_path, const std::string& weights_path) {
OPENVINO_ASSERT(ov::util::file_exists(model_path), "Model ", model_path, " not found");
static ov::frontend::FrontEndManager manager;
ov::frontend::FrontEnd::Ptr FE;
ov::frontend::InputModel::Ptr inputModel;
@ -27,7 +29,7 @@ inline std::shared_ptr<ov::Model> readModel(const std::string& model_path, const
if (inputModel)
return FE->convert(inputModel);
OPENVINO_ASSERT(false, "Failed to read the model");
OPENVINO_ASSERT(false, "Failed to read the model ", model_path);
}
inline std::shared_ptr<ov::Model> readModel(const std::string& model) {
@ -46,7 +48,7 @@ inline std::shared_ptr<ov::Model> readModel(const std::string& model) {
if (inputModel)
return FE->convert(inputModel);
return nullptr;
OPENVINO_ASSERT(false, "Failed to read the model");
}
} // namespace test

View File

@ -36,7 +36,8 @@ endif()
if(NOT EXIT_CODE EQUAL 0)
set(paddlepaddle_FOUND OFF)
message(WARNING "Python requirement file ${PADDLE_REQ} is not installed, PaddlePaddle frontend unit tests will be skipped")
message(WARNING "Python requirement file ${PADDLE_REQ} is not installed, PaddlePaddle testing models weren't generated,
some tests will fail due models not found")
else()
set(paddlepaddle_FOUND ON)
endif()

View File

@ -30,7 +30,7 @@ ov_add_test_target(
ov_check_pip_packages(REQUIREMENTS_FILE "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt"
MESSAGE_MODE WARNING
WARNING_MESSAGE "TensorFlow frontend unit tests will be skipped"
WARNING_MESSAGE "TensorFlow testing models weren't generated, some tests will fail due models not found"
RESULT_VAR tensorflow_FOUND)
set(TEST_TENSORFLOW_MODELS_DIRNAME test_model_zoo/tensorflow_test_models)

View File

@ -24,7 +24,7 @@ ov_add_test_target(
ov_check_pip_packages(REQUIREMENTS_FILE "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt"
MESSAGE_MODE WARNING
WARNING_MESSAGE "TensorFlow Lite frontend unit tests will be skipped"
WARNING_MESSAGE "TensorFlow Lite testing models weren't generated, some tests will fail due models not found"
RESULT_VAR tensorflow_FOUND)
set(TEST_TENSORFLOW_LITE_MODELS_DIRNAME test_model_zoo/tensorflow_lite_test_models)