Fix incomprehensible error message during layout conversion when layout rank doesn't match with shape rank
This commit is contained in:
parent
82415f00d8
commit
37064741b2
@ -19,7 +19,7 @@ class Layout;
|
||||
|
||||
namespace layout {
|
||||
|
||||
std::vector<int64_t> find_permutation(const Layout& src_layout, const Rank& src_shape_rank, const Layout& dst_layout);
|
||||
std::vector<int64_t> find_permutation(const Layout& src_layout, const PartialShape& src_shape, const Layout& dst_layout);
|
||||
Layout apply_permutation(const Layout& src_layout, const std::vector<uint64_t>& dims);
|
||||
|
||||
} // namespace layout
|
||||
@ -88,7 +88,7 @@ private:
|
||||
friend Layout layout::apply_permutation(const Layout& src_layout, const std::vector<uint64_t>& dims);
|
||||
|
||||
friend std::vector<int64_t> layout::find_permutation(const Layout& src_layout,
|
||||
const Rank& src_shape_rank,
|
||||
const PartialShape& src_shape,
|
||||
const Layout& dst_layout);
|
||||
};
|
||||
|
||||
|
@ -274,7 +274,8 @@ Layout apply_permutation(const Layout& src_layout, const std::vector<uint64_t>&
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<int64_t> find_permutation(const Layout& src_layout, const Rank& rank, const Layout& dst) {
|
||||
std::vector<int64_t> find_permutation(const Layout& src_layout, const PartialShape& src_shape, const Layout& dst) {
|
||||
auto rank = src_shape.rank();
|
||||
auto check_trivial = [](std::vector<int64_t>& res) -> std::vector<int64_t>& {
|
||||
size_t i = 0;
|
||||
while (i < res.size() && res[i] == i) {
|
||||
@ -326,10 +327,21 @@ std::vector<int64_t> find_permutation(const Layout& src_layout, const Rank& rank
|
||||
auto dst_static = to_static(dst, rank);
|
||||
OPENVINO_ASSERT(src_static.m_left_size == dst_static.m_left_size,
|
||||
"Conversion is not supported for layouts with different sizes");
|
||||
OPENVINO_ASSERT(rank.is_dynamic() || src_static.m_left_size == rank.get_length(),
|
||||
"Conversion layout ",
|
||||
src_layout.to_string(),
|
||||
" <-> ",
|
||||
dst.to_string(),
|
||||
" failure. Layout is not consistent with input shape ",
|
||||
src_shape,
|
||||
". Layout length ",
|
||||
src_static.m_left_size,
|
||||
" shall match with input shape rank ",
|
||||
rank.get_length());
|
||||
std::vector<int64_t> res(src_static.m_left_size, -1);
|
||||
if (src_static.m_names.size() > dst_static.m_names.size()) {
|
||||
// find inverted permutation from least specified layout to most one
|
||||
auto inverted = find_permutation(dst_static, rank, src_static);
|
||||
auto inverted = find_permutation(dst_static, src_shape, src_static);
|
||||
if (inverted.empty()) {
|
||||
return {};
|
||||
}
|
||||
|
@ -399,7 +399,7 @@ std::shared_ptr<Function> PrePostProcessor::build() {
|
||||
param->get_layout() != input->get_tensor_data()->get_layout()) {
|
||||
// Find transpose between model and tensor layouts and update tensor shape
|
||||
auto net_to_tensor =
|
||||
layout::find_permutation(param->get_layout(), net_shape.rank(), input->get_tensor_data()->get_layout());
|
||||
layout::find_permutation(param->get_layout(), net_shape, input->get_tensor_data()->get_layout());
|
||||
if (!net_to_tensor.empty()) {
|
||||
std::vector<ov::Dimension> dims(new_param_shape.size());
|
||||
std::transform(net_to_tensor.begin(), net_to_tensor.end(), dims.begin(), [&](int64_t v) {
|
||||
|
@ -174,7 +174,7 @@ void PreStepsList::add_convert_layout_impl(const Layout& layout) {
|
||||
"Can't convert layout for multi-plane input. Suggesting to convert current image to "
|
||||
"RGB/BGR color format using 'convert_color'");
|
||||
Layout dst_layout = layout.empty() ? context.target_layout() : layout;
|
||||
auto permutation = layout::find_permutation(context.layout(), nodes[0].get_partial_shape().rank(), dst_layout);
|
||||
auto permutation = layout::find_permutation(context.layout(), nodes[0].get_partial_shape(), dst_layout);
|
||||
if (permutation.empty()) {
|
||||
// No transpose is needed, just update layout
|
||||
if (!layout.empty()) {
|
||||
@ -430,7 +430,7 @@ void PostStepsList::add_convert_impl(const element::Type& type) {
|
||||
void PostStepsList::add_convert_layout_impl(const Layout& layout) {
|
||||
m_actions.emplace_back([layout](const Output<Node>& node, PostprocessingContext& context) {
|
||||
Layout dst_layout = layout.empty() ? context.target_layout() : layout;
|
||||
auto permutation = layout::find_permutation(context.layout(), node.get_partial_shape().rank(), dst_layout);
|
||||
auto permutation = layout::find_permutation(context.layout(), node.get_partial_shape(), dst_layout);
|
||||
if (permutation.empty()) {
|
||||
// No transpose is needed, just update layout
|
||||
if (!layout.empty()) {
|
||||
|
@ -534,6 +534,33 @@ TEST(pre_post_process, reuse_model_layout_no_tensor_info) {
|
||||
EXPECT_EQ(f->get_parameters().front()->get_layout(), "NC??");
|
||||
}
|
||||
|
||||
TEST(pre_post_process, set_layout_out_of_bounds) {
|
||||
auto shape = PartialShape{Dimension::dynamic(), 3, 2, 1};
|
||||
std::stringstream shape_str;
|
||||
shape_str << shape;
|
||||
auto f = create_simple_function(element::f32, shape);
|
||||
Layout from {"N???C"};
|
||||
Layout to {"NC???"};
|
||||
// TODO: replace with EXPECT_THAT after upgrade gtest to v1.11
|
||||
try {
|
||||
auto p = PrePostProcessor(f);
|
||||
p.input().tensor().set_layout(from);
|
||||
p.input().model().set_layout(to);
|
||||
f = p.build();
|
||||
FAIL() << "Layout conversion shall throw";
|
||||
} catch (const ov::Exception& err) {
|
||||
std::cout << err.what() << "---\n";
|
||||
// Verify that error message contains tensor and network layout
|
||||
EXPECT_TRUE(std::string(err.what()).find(from.to_string()) != std::string::npos) << err.what();
|
||||
EXPECT_TRUE(std::string(err.what()).find(to.to_string()) != std::string::npos) << err.what();
|
||||
// Verify that error message contains 'shape' word
|
||||
EXPECT_TRUE(std::string(err.what()).find(shape_str.str()) != std::string::npos) << err.what();
|
||||
} catch (...) {
|
||||
FAIL() << "Expected ov::Exception";
|
||||
}
|
||||
// EXPECT_EQ(f->get_parameters().front()->get_layout(), "NC??");
|
||||
}
|
||||
|
||||
TEST(pre_post_process, reuse_model_layout_tensor_info) {
|
||||
auto f = create_simple_function(element::u8, PartialShape{Dimension::dynamic(), 3, 2, 1});
|
||||
f->get_parameters().front()->set_layout("NC??");
|
||||
|
Loading…
Reference in New Issue
Block a user