[IE TESTS] ImportExport reporting + extension of base class (#20765)

* [IE TESTS] ImportExport reporting + extension of base class

* reuse dynamic check
This commit is contained in:
Irina Efode 2023-10-31 19:00:36 +04:00 committed by GitHub
parent 8d6f56dd12
commit 8eee1b52ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 164 additions and 150 deletions

View File

@ -0,0 +1,43 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
#pragma once
#include "openvino/core/node.hpp"
#include "openvino/core/model.hpp"
namespace ov {
namespace tools {
namespace subgraph_dumper {
inline bool is_dynamic_node(const std::shared_ptr<ov::Node>& node) {
for (size_t i = 0; i < node->get_input_size(); ++i) {
if (node->get_input_partial_shape(i).is_dynamic()) {
return true;
}
}
for (size_t i = 0; i < node->get_output_size(); ++i) {
if (node->get_output_partial_shape(i).is_dynamic()) {
return true;
}
}
return false;
}
inline bool is_dynamic_model(const std::shared_ptr<ov::Model>& model) {
for (const auto& parameter : model->get_parameters()) {
if (is_dynamic_node(parameter)) {
return true;
}
}
for (const auto& result : model->get_results()) {
if (is_dynamic_node(result)) {
return true;
}
}
return false;
}
} // namespace subgraph_dumper
} // namespace tools
} // namespace ov

View File

@ -18,6 +18,7 @@
#include "cache/cache.hpp"
#include "utils/node.hpp"
#include "utils/dynamism.hpp"
namespace ov {
namespace tools {
@ -75,7 +76,6 @@ std::map<ModelCacheStatus, std::vector<std::string>> cache_models(
void save_model_status_to_file(const std::map<ModelCacheStatus, std::vector<std::string>>& caching_status,
const std::string& output_dir);
bool is_dynamic_model(const std::shared_ptr<ov::Model>& model);
std::string get_model_type(const std::shared_ptr<ov::Model>& model);
std::map<std::string, InputInfo>

View File

@ -6,8 +6,11 @@
#include <memory>
#include "cache/meta/input_info.hpp"
#include "utils/dynamism.hpp"
#include "functional_test_utils/node_utils.hpp"
#include "functional_test_utils/summary/op_info.hpp"
#include "openvino/openvino.hpp"
#include "openvino/pass/manager.hpp"
#include "openvino/pass/constant_folding.hpp"
@ -45,20 +48,6 @@ std::shared_ptr<ov::op::v0::Parameter> convert_const_to_param(const std::shared_
// all inputs are defined as parameters and contains detailed info in meta
std::shared_ptr<ov::Model> generate_model_by_node(const std::shared_ptr<ov::Node>& node);
inline bool is_dynamic_node(const std::shared_ptr<ov::Node>& node) {
for (size_t i = 0; i < node->get_input_size(); ++i) {
if (node->get_input_partial_shape(i).is_dynamic()) {
return true;
}
}
for (size_t i = 0; i < node->get_output_size(); ++i) {
if (node->get_output_partial_shape(i).is_dynamic()) {
return true;
}
}
return false;
}
inline std::string get_node_type(const std::shared_ptr<ov::Node>& node) {
if (is_dynamic_node(node)) {
return "dynamic";

View File

@ -68,20 +68,6 @@ find_models(const std::vector<std::string> &dirs, const std::string& regexp) {
return { models, { ModelCacheStatus::NOT_READ, not_read_model } };
}
bool is_dynamic_model(const std::shared_ptr<ov::Model>& model) {
for (const auto& parameter : model->get_parameters()) {
if (is_dynamic_node(parameter)) {
return true;
}
}
for (const auto& result : model->get_results()) {
if (is_dynamic_node(result)) {
return true;
}
}
return false;
}
std::string get_model_type(const std::shared_ptr<ov::Model>& model) {
if (is_dynamic_model(model)) {
return "dynamic";

View File

@ -13,6 +13,7 @@ ov_add_test_target(
PRIVATE
"${CMAKE_CURRENT_SOURCE_DIR}/include"
"${OpenVINO_SOURCE_DIR}/src/tests/functional/plugin/conformance/subgraphs_dumper/include/cache/meta/"
"${OpenVINO_SOURCE_DIR}/src/tests/functional/plugin/conformance/subgraphs_dumper/include/utils/"
ADD_CPPLINT
LINK_LIBRARIES
PUBLIC

View File

@ -27,8 +27,6 @@ class ReadIRTest : public testing::WithParamInterface<ReadIRParams>,
virtual public ov::test::SubgraphBaseTest {
public:
static std::string getTestCaseName(const testing::TestParamInfo<ReadIRParams> &obj);
void query_model() override;
void import_export();
std::vector<ov::Tensor> calculate_refs() override;
protected:

View File

@ -21,6 +21,7 @@
#include "functional_test_utils/summary/op_info.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "dynamism.hpp"
#include "input_info.hpp"
#include "conformance.hpp"
#include "read_ir_test/read_ir.hpp"
@ -116,99 +117,6 @@ std::string ReadIRTest::getTestCaseName(const testing::TestParamInfo<ReadIRParam
return result.str();
}
void ReadIRTest::query_model() {
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<ov::test::utils::CrashHandler>(new ov::test::utils::CrashHandler());
auto &s = ov::test::utils::OpSummary::getInstance();
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
jmpRes = setjmp(ov::test::utils::env);
#else
jmpRes = sigsetjmp(ov::test::utils::env, 1);
#endif
if (jmpRes == ov::test::utils::JMP_STATUS::ok) {
crashHandler->StartTimer();
if (functionRefs == nullptr) {
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("refFunction");
}
s.setDeviceName(targetDevice);
if (ov::test::utils::current_test_is_disabled()) {
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::SKIPPED, rel_influence_coef);
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
} else {
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::CRASHED, rel_influence_coef);
}
try {
SubgraphBaseTest::query_model();
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::PASSED, rel_influence_coef);
} catch (std::exception& err) {
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED, rel_influence_coef);
GTEST_FAIL() << err.what();
} catch (...) {
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::FAILED, rel_influence_coef);
GTEST_FAIL() << "Something is wrong in Query model! Please check";
}
} else if (jmpRes == ov::test::utils::JMP_STATUS::alarmErr) {
s.updateOPsStats(functionRefs, ov::test::utils::PassRate::Statuses::HANGED, rel_influence_coef);
IE_THROW() << "Crash happens";
} else if (jmpRes == ov::test::utils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
}
}
void ReadIRTest::import_export() {
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<ov::test::utils::CrashHandler>(new ov::test::utils::CrashHandler());
auto &summary = ov::test::utils::OpSummary::getInstance();
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
jmpRes = setjmp(ov::test::utils::env);
#else
jmpRes = sigsetjmp(ov::test::utils::env, 1);
#endif
if (jmpRes == ov::test::utils::JMP_STATUS::ok) {
crashHandler->StartTimer();
summary.setDeviceName(targetDevice);
try {
ov::CompiledModel model = core->compile_model(function, targetDevice, configuration);
std::stringstream strm;
model.export_model(strm);
ov::CompiledModel importedModel = core->import_model(strm, targetDevice, configuration);
auto comparator = FunctionsComparator::with_default()
.enable(FunctionsComparator::ATTRIBUTES)
.enable(FunctionsComparator::NAMES)
.enable(FunctionsComparator::CONST_VALUES);
auto importedFunction = importedModel.get_runtime_model()->clone();
auto res = comparator.compare(importedFunction, function);
EXPECT_TRUE(res.valid) << res.message;
summary.updateOPsImplStatus(function, true);
} catch (const std::exception &e) {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Exception in the Core::compile_model() method call: " << e.what();
} catch (...) {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Error in the Core::query_model() method call!";
}
} else if (jmpRes == ov::test::utils::JMP_STATUS::anyError) {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Crash happens";
} else if (jmpRes == ov::test::utils::JMP_STATUS::alarmErr) {
summary.updateOPsImplStatus(function, false);
GTEST_FAIL() << "Hang happens";
}
}
uint64_t clip(uint64_t n, uint64_t lower, uint64_t upper) {
return std::max(lower, std::min(n, upper));
}
@ -267,21 +175,7 @@ void ReadIRTest::SetUp() {
}
}
bool hasDynamic = false;
for (const auto& param : function->get_parameters()) {
if (param->get_partial_shape().is_dynamic()) {
hasDynamic = true;
break;
}
}
if (!hasDynamic) {
for (const auto& result : function->get_results()) {
if (result->get_output_partial_shape(0).is_dynamic()) {
hasDynamic = true;
break;
}
}
}
bool hasDynamic = tools::subgraph_dumper::is_dynamic_model(function);
#ifdef ENABLE_CONFORMANCE_PGQL
// Updating data in runtime. Should be set before possible call of a first GTEST status

View File

@ -31,6 +31,7 @@ public:
virtual void run();
virtual void serialize();
virtual void query_model();
virtual void import_export();
protected:
virtual void compare(const std::vector<ov::Tensor>& expected, const std::vector<ov::Tensor>& actual);

View File

@ -128,20 +128,122 @@ void SubgraphBaseTest::serialize() {
}
void SubgraphBaseTest::query_model() {
SKIP_IF_CURRENT_TEST_IS_DISABLED();
bool isCurrentTestDisabled = ov::test::utils::current_test_is_disabled();
auto queryNetworkResult = core->query_model(function, targetDevice);
std::set<std::string> expected;
for (auto&& node : function->get_ops()) {
expected.insert(node->get_friendly_name());
}
ov::test::utils::PassRate::Statuses status = isCurrentTestDisabled ?
ov::test::utils::PassRate::Statuses::SKIPPED :
ov::test::utils::PassRate::Statuses::CRASHED;
summary.setDeviceName(targetDevice);
summary.updateOPsStats(function, status, rel_influence_coef);
std::set<std::string> actual;
for (auto&& res : queryNetworkResult) {
actual.insert(res.first);
if (isCurrentTestDisabled)
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<ov::test::utils::CrashHandler>(new ov::test::utils::CrashHandler());
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
jmpRes = setjmp(ov::test::utils::env);
#else
jmpRes = sigsetjmp(ov::test::utils::env, 1);
#endif
if (jmpRes == ov::test::utils::JMP_STATUS::ok) {
crashHandler->StartTimer();
std::string errorMessage;
try {
auto queryNetworkResult = core->query_model(function, targetDevice);
std::set<std::string> expected;
for (auto&& node : function->get_ops()) {
expected.insert(node->get_friendly_name());
}
std::set<std::string> actual;
for (auto&& res : queryNetworkResult) {
actual.insert(res.first);
}
if (expected != actual) {
IE_THROW() << "Expected and actual are different";
}
status = ov::test::utils::PassRate::Statuses::PASSED;
} catch (const std::exception& ex) {
status = ov::test::utils::PassRate::Statuses::FAILED;
errorMessage = ex.what();
} catch (...) {
status = ov::test::utils::PassRate::Statuses::FAILED;
errorMessage = "Unknown failure occurred.";
}
summary.updateOPsStats(function, status, rel_influence_coef);
if (status != ov::test::utils::PassRate::Statuses::PASSED) {
GTEST_FATAL_FAILURE_(errorMessage.c_str());
}
} else if (jmpRes == ov::test::utils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == ov::test::utils::JMP_STATUS::alarmErr) {
summary.updateOPsStats(function, ov::test::utils::PassRate::Statuses::HANGED, rel_influence_coef);
IE_THROW() << "Crash happens";
}
if (expected != actual) {
IE_THROW() << "Expected and actual are different";
}
void SubgraphBaseTest::import_export() {
bool isCurrentTestDisabled = ov::test::utils::current_test_is_disabled();
ov::test::utils::PassRate::Statuses status = isCurrentTestDisabled ?
ov::test::utils::PassRate::Statuses::SKIPPED :
ov::test::utils::PassRate::Statuses::CRASHED;
summary.setDeviceName(targetDevice);
summary.updateOPsStats(function, status, rel_influence_coef);
if (isCurrentTestDisabled)
GTEST_SKIP() << "Disabled test due to configuration" << std::endl;
// in case of crash jump will be made and work will be continued
auto crashHandler = std::unique_ptr<ov::test::utils::CrashHandler>(new ov::test::utils::CrashHandler());
// place to jump in case of a crash
int jmpRes = 0;
#ifdef _WIN32
jmpRes = setjmp(ov::test::utils::env);
#else
jmpRes = sigsetjmp(ov::test::utils::env, 1);
#endif
if (jmpRes == ov::test::utils::JMP_STATUS::ok) {
crashHandler->StartTimer();
std::string errorMessage;
try {
compile_model();
std::stringstream strm;
compiledModel.export_model(strm);
ov::CompiledModel importedModel = core->import_model(strm, targetDevice, configuration);
auto importedFunction = importedModel.get_runtime_model()->clone();
auto comparator = FunctionsComparator::with_default()
.enable(FunctionsComparator::ATTRIBUTES)
.enable(FunctionsComparator::NAMES)
.enable(FunctionsComparator::CONST_VALUES);
auto res = comparator.compare(importedFunction, function);
if (!res.valid) {
throw std::runtime_error(res.message);
}
status = ov::test::utils::PassRate::Statuses::PASSED;
} catch (const std::exception& ex) {
status = ov::test::utils::PassRate::Statuses::FAILED;
errorMessage = ex.what();
} catch (...) {
status = ov::test::utils::PassRate::Statuses::FAILED;
errorMessage = "Unknown failure occurred.";
}
summary.updateOPsStats(function, status, rel_influence_coef);
if (status != ov::test::utils::PassRate::Statuses::PASSED) {
GTEST_FATAL_FAILURE_(errorMessage.c_str());
}
} else if (jmpRes == ov::test::utils::JMP_STATUS::anyError) {
IE_THROW() << "Crash happens";
} else if (jmpRes == ov::test::utils::JMP_STATUS::alarmErr) {
summary.updateOPsStats(function, ov::test::utils::PassRate::Statuses::HANGED, rel_influence_coef);
IE_THROW() << "Crash happens";
}
}