Layout: add _idx suffix for helper getters (#8196)

'layout::width' -> 'layout::width_idx', etc
This commit is contained in:
Mikhail Nosov 2021-10-28 11:26:55 +03:00 committed by GitHub
parent 6416b73855
commit 5cb2174721
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 84 additions and 62 deletions

View File

@ -131,8 +131,8 @@ int main(int argc, char* argv[]) {
slog::info << "Read input images" << slog::endl;
ov::Shape input_shape = model->input().get_shape();
const size_t width = input_shape[ov::layout::width(tensor_layout)];
const size_t height = input_shape[ov::layout::height(tensor_layout)];
const size_t width = input_shape[ov::layout::width_idx(tensor_layout)];
const size_t height = input_shape[ov::layout::height_idx(tensor_layout)];
std::vector<std::shared_ptr<unsigned char>> images_data;
std::vector<std::string> valid_image_names;
@ -155,7 +155,7 @@ int main(int argc, char* argv[]) {
// -------- Step 5. Loading model to the device --------
// Setting batch size using image count
const size_t batchSize = images_data.size();
input_shape[ov::layout::batch(tensor_layout)] = batchSize;
input_shape[ov::layout::batch_idx(tensor_layout)] = batchSize;
model->reshape({{model->input().get_any_name(), input_shape}});
slog::info << "Batch size is " << std::to_string(batchSize) << slog::endl;

View File

@ -129,8 +129,8 @@ int main(int argc, char* argv[]) {
tensor(InputTensorInfo().
set_element_type(ov::element::u8).
set_spatial_static_shape(
tensor_shape[ov::layout::height(tensor_layout)],
tensor_shape[ov::layout::width(tensor_layout)]).
tensor_shape[ov::layout::height_idx(tensor_layout)],
tensor_shape[ov::layout::width_idx(tensor_layout)]).
set_layout(tensor_layout)).
// 3) Adding explicit preprocessing steps:
// - convert layout to 'NCHW' (from 'NHWC' specified above at tensor layout)

View File

@ -300,8 +300,8 @@ int main(int argc, char* argv[]) {
const auto input = model->input();
auto input_shape = input.get_shape();
const size_t width = input_shape[layout::width(tensor_layout)];
const size_t height = input_shape[layout::height(tensor_layout)];
const size_t width = input_shape[layout::width_idx(tensor_layout)];
const size_t height = input_shape[layout::height_idx(tensor_layout)];
std::vector<std::shared_ptr<unsigned char>> imagesData;
for (auto& i : images) {
@ -327,7 +327,7 @@ int main(int argc, char* argv[]) {
// -------- Step 4. Reshape a model --------
// Setting batch size using image count
const size_t batch_size = imagesData.size();
input_shape[layout::batch(tensor_layout)] = batch_size;
input_shape[layout::batch_idx(tensor_layout)] = batch_size;
model->reshape({{input.get_any_name(), input_shape}});
slog::info << "Batch size is " << std::to_string(batch_size) << slog::endl;

View File

@ -101,7 +101,7 @@ OPENVINO_API bool has_batch(const Layout& layout);
///
/// \throws ov::AssertFailure if dimension doesn't exist.
///
OPENVINO_API std::int64_t batch(const Layout& layout);
OPENVINO_API std::int64_t batch_idx(const Layout& layout);
/// \brief Checks if layout has 'channels' dimension
///
@ -113,7 +113,7 @@ OPENVINO_API bool has_channels(const Layout& layout);
///
/// \throws ov::AssertFailure if dimension doesn't exist.
///
OPENVINO_API std::int64_t channels(const Layout& layout);
OPENVINO_API std::int64_t channels_idx(const Layout& layout);
/// \brief Checks if layout has 'depth' dimension
OPENVINO_API bool has_depth(const Layout& layout);
@ -122,7 +122,7 @@ OPENVINO_API bool has_depth(const Layout& layout);
///
/// \throws ov::AssertFailure if dimension doesn't exist.
///
OPENVINO_API std::int64_t depth(const Layout& layout);
OPENVINO_API std::int64_t depth_idx(const Layout& layout);
/// \brief Checks if layout has 'height' dimension
OPENVINO_API bool has_height(const Layout& layout);
@ -131,7 +131,7 @@ OPENVINO_API bool has_height(const Layout& layout);
///
/// \throws ov::AssertFailure if dimension doesn't exist.
///
OPENVINO_API std::int64_t height(const Layout& layout);
OPENVINO_API std::int64_t height_idx(const Layout& layout);
/// \brief Checks if layout has 'width' dimension
OPENVINO_API bool has_width(const Layout& layout);
@ -140,7 +140,7 @@ OPENVINO_API bool has_width(const Layout& layout);
///
/// \throws ov::AssertFailure if dimension doesn't exist.
///
OPENVINO_API std::int64_t width(const Layout& layout);
OPENVINO_API std::int64_t width_idx(const Layout& layout);
} // namespace layout

View File

@ -303,27 +303,49 @@ std::vector<int64_t> find_permutation(const Layout& src_layout, const Rank& rank
return res;
}
// Helper functions
bool has_batch(const Layout& layout) {
return layout.has_name(BATCH);
}
std::int64_t batch_idx(const Layout& layout) {
return layout.get_index_by_name(BATCH);
}
bool has_depth(const Layout& layout) {
return layout.has_name(DEPTH);
}
std::int64_t depth_idx(const Layout& layout) {
return layout.get_index_by_name(DEPTH);
}
bool has_channels(const Layout& layout) {
return layout.has_name(CHANNELS);
}
std::int64_t channels_idx(const Layout& layout) {
return layout.get_index_by_name(CHANNELS);
}
bool has_height(const Layout& layout) {
return layout.has_name(HEIGHT);
}
std::int64_t height_idx(const Layout& layout) {
return layout.get_index_by_name(HEIGHT);
}
bool has_width(const Layout& layout) {
return layout.has_name(WIDTH);
}
std::int64_t width_idx(const Layout& layout) {
return layout.get_index_by_name(WIDTH);
}
} // namespace layout
#define DEFINE_NAMED_DIMENSION(NAME, name) \
bool layout::has_##name(const Layout& layout) { \
return layout.has_name(NAME); \
} \
\
std::int64_t layout::name(const Layout& layout) { \
return layout.get_index_by_name(NAME); \
}
DEFINE_NAMED_DIMENSION(BATCH, batch)
DEFINE_NAMED_DIMENSION(CHANNELS, channels)
DEFINE_NAMED_DIMENSION(DEPTH, depth)
DEFINE_NAMED_DIMENSION(HEIGHT, height)
DEFINE_NAMED_DIMENSION(WIDTH, width)
constexpr DiscreteTypeInfo AttributeAdapter<ov::Layout>::type_info;
const std::string& AttributeAdapter<ov::Layout>::get() {

View File

@ -271,7 +271,7 @@ std::tuple<std::vector<Output<Node>>, bool> PreStepsList::reverse_channels(const
"Layout ",
context.layout().to_string(),
" doesn't have `channels` dimension");
auto channels_idx = ov::layout::channels(context.layout());
auto channels_idx = ov::layout::channels_idx(context.layout());
// Get shape of user's input tensor (e.g. Tensor[1, 3, 224, 224] -> {1, 3, 224, 224})
auto shape_of = std::make_shared<ov::op::v0::ShapeOf>(nodes[0]); // E.g. {1, 3, 224, 224}

View File

@ -21,7 +21,7 @@ namespace preprocess {
inline size_t get_and_check_width_idx(const Layout& layout, const PartialShape& shape) {
OPENVINO_ASSERT(ov::layout::has_width(layout), "Layout ", layout.to_string(), " doesn't have `width` dimension");
OPENVINO_ASSERT(shape.rank().is_static(), "Can't get shape width index for shape with dynamic rank");
auto idx = ov::layout::width(layout);
auto idx = ov::layout::width_idx(layout);
if (idx < 0) {
idx = shape.rank().get_length() + idx;
}
@ -34,7 +34,7 @@ inline size_t get_and_check_width_idx(const Layout& layout, const PartialShape&
inline size_t get_and_check_height_idx(const Layout& layout, const PartialShape& shape) {
OPENVINO_ASSERT(ov::layout::has_height(layout), "Layout ", layout.to_string(), " doesn't have `height` dimension");
OPENVINO_ASSERT(shape.rank().is_static(), "Can't get shape height index for shape with dynamic rank");
auto idx = ov::layout::height(layout);
auto idx = ov::layout::height_idx(layout);
if (idx < 0) {
idx = shape.rank().get_length() + idx;
}
@ -50,7 +50,7 @@ inline size_t get_and_check_channels_idx(const Layout& layout, const PartialShap
layout.to_string(),
" doesn't have `channels` dimension");
OPENVINO_ASSERT(shape.rank().is_static(), "Can't get shape channels index for shape with dynamic rank");
auto idx = ov::layout::channels(layout);
auto idx = ov::layout::channels_idx(layout);
if (idx < 0) {
idx = shape.rank().get_length() + idx;
}

View File

@ -11,29 +11,29 @@ using namespace ov;
TEST(layout, basic) {
Layout l = "NcDHw";
EXPECT_TRUE(layout::has_batch(l));
EXPECT_EQ(layout::batch(l), 0);
EXPECT_EQ(layout::batch_idx(l), 0);
EXPECT_TRUE(layout::has_channels(l));
EXPECT_EQ(layout::channels(l), 1);
EXPECT_EQ(layout::channels_idx(l), 1);
EXPECT_TRUE(layout::has_depth(l));
EXPECT_EQ(layout::depth(l), 2);
EXPECT_EQ(layout::depth_idx(l), 2);
EXPECT_TRUE(layout::has_height(l));
EXPECT_EQ(layout::height(l), 3);
EXPECT_EQ(layout::height_idx(l), 3);
EXPECT_TRUE(layout::has_width(l));
EXPECT_EQ(layout::width(l), 4);
EXPECT_EQ(layout::width_idx(l), 4);
}
TEST(layout, advanced_syntax) {
Layout l = "[batch, channels, depth, height, width]";
EXPECT_TRUE(layout::has_batch(l));
EXPECT_EQ(layout::batch(l), 0);
EXPECT_EQ(layout::batch_idx(l), 0);
EXPECT_TRUE(layout::has_channels(l));
EXPECT_EQ(layout::channels(l), 1);
EXPECT_EQ(layout::channels_idx(l), 1);
EXPECT_TRUE(layout::has_depth(l));
EXPECT_EQ(layout::depth(l), 2);
EXPECT_EQ(layout::depth_idx(l), 2);
EXPECT_TRUE(layout::has_height(l));
EXPECT_EQ(layout::height(l), 3);
EXPECT_EQ(layout::height_idx(l), 3);
EXPECT_TRUE(layout::has_width(l));
EXPECT_EQ(layout::width(l), 4);
EXPECT_EQ(layout::width_idx(l), 4);
EXPECT_EQ(l, Layout("ncdhw"));
l = "[custom1, ?, custom2]";
@ -50,15 +50,15 @@ TEST(layout, empty) {
Layout l;
EXPECT_TRUE(Layout("").empty());
EXPECT_FALSE(layout::has_batch(l));
EXPECT_THROW(layout::batch(l), ov::AssertFailure);
EXPECT_THROW(layout::batch_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_channels(l));
EXPECT_THROW(layout::channels(l), ov::AssertFailure);
EXPECT_THROW(layout::channels_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_depth(l));
EXPECT_THROW(layout::depth(l), ov::AssertFailure);
EXPECT_THROW(layout::depth_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_height(l));
EXPECT_THROW(layout::height(l), ov::AssertFailure);
EXPECT_THROW(layout::height_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_width(l));
EXPECT_THROW(layout::width(l), ov::AssertFailure);
EXPECT_THROW(layout::width_idx(l), ov::AssertFailure);
}
TEST(layout, to_string) {
@ -77,23 +77,23 @@ TEST(layout, to_string) {
TEST(layout, scalar) {
auto l = Layout::scalar();
EXPECT_FALSE(layout::has_batch(l));
EXPECT_THROW(layout::batch(l), ov::AssertFailure);
EXPECT_THROW(layout::batch_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_channels(l));
EXPECT_THROW(layout::channels(l), ov::AssertFailure);
EXPECT_THROW(layout::channels_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_depth(l));
EXPECT_THROW(layout::depth(l), ov::AssertFailure);
EXPECT_THROW(layout::depth_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_height(l));
EXPECT_THROW(layout::height(l), ov::AssertFailure);
EXPECT_THROW(layout::height_idx(l), ov::AssertFailure);
EXPECT_FALSE(layout::has_width(l));
EXPECT_THROW(layout::width(l), ov::AssertFailure);
EXPECT_THROW(layout::width_idx(l), ov::AssertFailure);
}
TEST(layout, custom_dims) {
Layout l = "0ac";
EXPECT_FALSE(layout::has_batch(l));
EXPECT_THROW(layout::batch(l), ov::AssertFailure);
EXPECT_THROW(layout::batch_idx(l), ov::AssertFailure);
EXPECT_TRUE(layout::has_channels(l));
EXPECT_EQ(layout::channels(l), 2);
EXPECT_EQ(layout::channels_idx(l), 2);
EXPECT_TRUE(l.has_name("0"));
EXPECT_TRUE(l.has_name("A"));
EXPECT_EQ(l.get_index_by_name("a"), 1);
@ -102,9 +102,9 @@ TEST(layout, custom_dims) {
TEST(layout, dims_unknown) {
Layout l = "n??c";
EXPECT_TRUE(layout::has_batch(l));
EXPECT_EQ(layout::batch(l), 0);
EXPECT_EQ(layout::batch_idx(l), 0);
EXPECT_TRUE(layout::has_channels(l));
EXPECT_EQ(layout::channels(l), 3);
EXPECT_EQ(layout::channels_idx(l), 3);
EXPECT_FALSE(l.has_name("?"));
EXPECT_EQ(l.get_index_by_name("C"), 3);
}
@ -112,9 +112,9 @@ TEST(layout, dims_unknown) {
TEST(layout, dims_undefined) {
Layout l = "?n?...?c?";
EXPECT_TRUE(layout::has_batch(l));
EXPECT_EQ(layout::batch(l), 1);
EXPECT_EQ(layout::batch_idx(l), 1);
EXPECT_TRUE(layout::has_channels(l));
EXPECT_EQ(layout::channels(l), -2);
EXPECT_EQ(layout::channels_idx(l), -2);
EXPECT_FALSE(l.has_name("?"));
}