priorbox_8 ngraph and inference-engine parts (#8114)

* ngraph and inference-engine parts

* add priorbox_8 python api

* remove 'PriorBoxAttrs' and 'PriorBox' from outside of opset namespace

* add common nGraph transformation 'ConvertPriorBox8To0'

* remove redundant alias of PriorBox::Attributes

* use new Tensor api for evaluate method

* change v0operation back to the former api, pass Attribute structure to the reference implement

* use new Tensor api for constant_fold

* add support for dynamic shapes of constant_fold new Tensor api

* fix Node 'create temp tensors' issue when shape==0'

* revert to 'HostTensor' api for PriorBox8

* Apply suggestions from code review and 'template_plugin reference' testcase replaced 'backend INTERPRETER' testcase

* transformation part Apply suggestions from code review

* python init file updated for opset8

* keep backward compatibility to fix CI issue

* rebase to new structure of OpenVINO repo

* revert 'thirdparty/onednn_gpu' mistake changes
This commit is contained in:
Bo Liu 2021-11-29 12:28:49 +08:00 committed by GitHub
parent 07651aa5c0
commit 25b2131d21
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 1056 additions and 127 deletions

View File

@ -2,11 +2,12 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/op/prior_box.hpp"
#include <gtest/gtest.h>
#include "openvino/op/prior_box.hpp"
#include "base_reference_test.hpp"
#include "openvino/opsets/opset1.hpp"
#include "openvino/op/constant.hpp"
using namespace reference_tests;
using namespace ov;
@ -17,9 +18,11 @@ struct PriorBoxParams {
PriorBoxParams(const std::vector<float>& min_size,
const std::vector<float>& aspect_ratio,
const bool scale_all_size,
const ov::Shape& layerShapeShape, const ov::Shape& imageShapeShape,
const ov::Shape& layerShapeShape,
const ov::Shape& imageShapeShape,
const ov::element::Type& iType,
const std::vector<IT>& layerShapeValues, const std::vector<IT>& imageShapeValues,
const std::vector<IT>& layerShapeValues,
const std::vector<IT>& imageShapeValues,
const std::vector<float>& oValues,
const std::string& testcaseName = "")
: layerShapeShape(layerShapeShape),
@ -30,10 +33,10 @@ struct PriorBoxParams {
imageShapeData(CreateTensor(iType, imageShapeValues)),
refData(CreateTensor(outType, oValues)),
testcaseName(testcaseName) {
attrs.min_size = min_size;
attrs.aspect_ratio = aspect_ratio;
attrs.scale_all_sizes = scale_all_size;
}
attrs.min_size = min_size;
attrs.aspect_ratio = aspect_ratio;
attrs.scale_all_sizes = scale_all_size;
}
ov::op::v0::PriorBox::Attributes attrs;
ov::Shape layerShapeShape;
@ -46,6 +49,46 @@ struct PriorBoxParams {
std::string testcaseName;
};
struct PriorBoxV8Params {
template <class IT>
PriorBoxV8Params(const std::vector<float>& min_size,
const std::vector<float>& max_size,
const std::vector<float>& aspect_ratio,
const bool scale_all_size,
const bool min_max_aspect_ratios_order,
const ov::Shape& layerShapeShape,
const ov::Shape& imageShapeShape,
const ov::element::Type& iType,
const std::vector<IT>& layerShapeValues,
const std::vector<IT>& imageShapeValues,
const std::vector<float>& oValues,
const std::string& testcaseName = "")
: layerShapeShape(layerShapeShape),
imageShapeShape(imageShapeShape),
inType(iType),
outType(ov::element::Type_t::f32),
layerShapeData(CreateTensor(iType, layerShapeValues)),
imageShapeData(CreateTensor(iType, imageShapeValues)),
refData(CreateTensor(outType, oValues)),
testcaseName(testcaseName) {
attrs.min_size = min_size;
attrs.max_size = max_size;
attrs.aspect_ratio = aspect_ratio;
attrs.scale_all_sizes = scale_all_size;
attrs.min_max_aspect_ratios_order = min_max_aspect_ratios_order;
}
ov::op::v8::PriorBox::Attributes attrs;
ov::Shape layerShapeShape;
ov::Shape imageShapeShape;
ov::element::Type inType;
ov::element::Type outType;
ov::runtime::Tensor layerShapeData;
ov::runtime::Tensor imageShapeData;
ov::runtime::Tensor refData;
std::string testcaseName;
};
class ReferencePriorBoxLayerTest : public testing::TestWithParam<PriorBoxParams>, public CommonReferenceTest {
public:
void SetUp() override {
@ -68,10 +111,43 @@ public:
private:
static std::shared_ptr<Function> CreateFunction(const PriorBoxParams& params) {
auto LS = std::make_shared<opset1::Constant>(params.inType, params.layerShapeShape, params.layerShapeData.data());
auto IS = std::make_shared<opset1::Constant>(params.inType, params.imageShapeShape, params.imageShapeData.data());
auto LS =
std::make_shared<op::v0::Constant>(params.inType, params.layerShapeShape, params.layerShapeData.data());
auto IS =
std::make_shared<op::v0::Constant>(params.inType, params.imageShapeShape, params.imageShapeData.data());
const auto PriorBox = std::make_shared<op::v0::PriorBox>(LS, IS, params.attrs);
return std::make_shared<ov::Function>(NodeVector {PriorBox}, ParameterVector {});
return std::make_shared<ov::Function>(NodeVector{PriorBox}, ParameterVector{});
}
};
class ReferencePriorBoxV8LayerTest : public testing::TestWithParam<PriorBoxV8Params>, public CommonReferenceTest {
public:
void SetUp() override {
auto params = GetParam();
function = CreateFunction(params);
inputData = {};
refOutData = {params.refData};
}
static std::string getTestCaseName(const testing::TestParamInfo<PriorBoxV8Params>& obj) {
auto param = obj.param;
std::ostringstream result;
result << "layerShapeShape=" << param.layerShapeShape << "_";
result << "imageShapeShape=" << param.imageShapeShape << "_";
result << "iType=" << param.inType << "_";
result << "oType=" << param.outType;
if (param.testcaseName != "")
result << "_" << param.testcaseName;
return result.str();
}
private:
static std::shared_ptr<Function> CreateFunction(const PriorBoxV8Params& params) {
auto LS =
std::make_shared<op::v0::Constant>(params.inType, params.layerShapeShape, params.layerShapeData.data());
auto IS =
std::make_shared<op::v0::Constant>(params.inType, params.imageShapeShape, params.imageShapeData.data());
const auto PriorBoxV8 = std::make_shared<op::v8::PriorBox>(LS, IS, params.attrs);
return std::make_shared<ov::Function>(NodeVector{PriorBoxV8}, ParameterVector{});
}
};
@ -79,13 +155,20 @@ TEST_P(ReferencePriorBoxLayerTest, CompareWithRefs) {
Exec();
}
TEST_P(ReferencePriorBoxV8LayerTest, CompareWithRefs) {
Exec();
}
template <element::Type_t IN_ET>
std::vector<PriorBoxParams> generatePriorBoxFloatParams() {
using T = typename element_type_traits<IN_ET>::value_type;
std::vector<PriorBoxParams> priorBoxParams {
PriorBoxParams({2.0f}, {1.5f}, false,
{2}, {2},
std::vector<PriorBoxParams> priorBoxParams{
PriorBoxParams({2.0f},
{1.5f},
false,
{2},
{2},
IN_ET,
std::vector<T>{2, 2},
std::vector<T>{10, 10},
@ -101,8 +184,37 @@ std::vector<PriorBoxParams> generatePriorBoxFloatParams() {
return priorBoxParams;
}
template <element::Type_t IN_ET>
std::vector<PriorBoxV8Params> generatePriorBoxV8FloatParams() {
using T = typename element_type_traits<IN_ET>::value_type;
std::vector<PriorBoxV8Params> priorBoxV8Params{
PriorBoxV8Params(
{2.0f},
{5.0f},
{1.5f},
true,
false,
{2},
{2},
IN_ET,
std::vector<T>{2, 2},
std::vector<T>{10, 10},
std::vector<float>{
0.15, 0.15, 0.35, 0.35, 0.127526, 0.16835, 0.372474, 0.33165, 0.0918861, 0.0918861, 0.408114, 0.408114,
0.65, 0.15, 0.85, 0.35, 0.627526, 0.16835, 0.872474, 0.33165, 0.591886, 0.0918861, 0.908114, 0.408114,
0.15, 0.65, 0.35, 0.85, 0.127526, 0.66835, 0.372474, 0.83165, 0.0918861, 0.591886, 0.408114, 0.908114,
0.65, 0.65, 0.85, 0.85, 0.627526, 0.66835, 0.872474, 0.83165, 0.591886, 0.591886, 0.908114, 0.908114,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1}),
};
return priorBoxV8Params;
}
std::vector<PriorBoxParams> generatePriorBoxCombinedParams() {
const std::vector<std::vector<PriorBoxParams>> priorBoxTypeParams {
const std::vector<std::vector<PriorBoxParams>> priorBoxTypeParams{
generatePriorBoxFloatParams<element::Type_t::i64>(),
generatePriorBoxFloatParams<element::Type_t::i32>(),
generatePriorBoxFloatParams<element::Type_t::i16>(),
@ -111,7 +223,7 @@ std::vector<PriorBoxParams> generatePriorBoxCombinedParams() {
generatePriorBoxFloatParams<element::Type_t::u32>(),
generatePriorBoxFloatParams<element::Type_t::u16>(),
generatePriorBoxFloatParams<element::Type_t::u8>(),
};
};
std::vector<PriorBoxParams> combinedParams;
for (const auto& params : priorBoxTypeParams) {
@ -120,7 +232,32 @@ std::vector<PriorBoxParams> generatePriorBoxCombinedParams() {
return combinedParams;
}
INSTANTIATE_TEST_SUITE_P(smoke_PriorBox_With_Hardcoded_Refs, ReferencePriorBoxLayerTest,
testing::ValuesIn(generatePriorBoxCombinedParams()), ReferencePriorBoxLayerTest::getTestCaseName);
std::vector<PriorBoxV8Params> generatePriorBoxV8CombinedParams() {
const std::vector<std::vector<PriorBoxV8Params>> priorBoxV8TypeParams{
generatePriorBoxV8FloatParams<element::Type_t::i64>(),
generatePriorBoxV8FloatParams<element::Type_t::i32>(),
generatePriorBoxV8FloatParams<element::Type_t::i16>(),
generatePriorBoxV8FloatParams<element::Type_t::i8>(),
generatePriorBoxV8FloatParams<element::Type_t::u64>(),
generatePriorBoxV8FloatParams<element::Type_t::u32>(),
generatePriorBoxV8FloatParams<element::Type_t::u16>(),
generatePriorBoxV8FloatParams<element::Type_t::u8>(),
};
std::vector<PriorBoxV8Params> combinedParams;
} // namespace
for (const auto& params : priorBoxV8TypeParams) {
combinedParams.insert(combinedParams.end(), params.begin(), params.end());
}
return combinedParams;
}
INSTANTIATE_TEST_SUITE_P(smoke_PriorBox_With_Hardcoded_Refs,
ReferencePriorBoxLayerTest,
testing::ValuesIn(generatePriorBoxCombinedParams()),
ReferencePriorBoxLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_PriorBoxV8_With_Hardcoded_Refs,
ReferencePriorBoxV8LayerTest,
testing::ValuesIn(generatePriorBoxV8CombinedParams()),
ReferencePriorBoxV8LayerTest::getTestCaseName);
} // namespace

View File

@ -66,6 +66,10 @@ namespace {
true, false
};
const std::vector<bool> min_max_aspect_ratios_order = {
true, false
};
const std::vector<size_t> inputShape = {128, 128};
const std::vector<size_t> imageShape = {50, 50};
@ -81,7 +85,8 @@ namespace {
::testing::ValuesIn(steps),
::testing::ValuesIn(offsets),
::testing::ValuesIn(variances),
::testing::ValuesIn(scale_all_sizes));
::testing::ValuesIn(scale_all_sizes),
::testing::ValuesIn(min_max_aspect_ratios_order));
INSTANTIATE_TEST_SUITE_P(smoke_PriorBox_Basic, PriorBoxLayerTest,
::testing::Combine(

View File

@ -10,6 +10,7 @@
#include <memory>
#include <ngraph/opsets/opset3.hpp>
#include <ngraph/opsets/opset8.hpp>
#include <ngraph/function.hpp>
#include <transformations/init_node_info.hpp>
#include <ngraph/pass/constant_folding.hpp>
@ -24,7 +25,7 @@ TEST(TransformationTests, ConstFoldingPriorBox) {
{
auto in = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{2});
ngraph::op::PriorBoxAttrs attrs;
ngraph::op::v0::PriorBox::Attributes attrs;
attrs.min_size = {256.0f};
attrs.max_size = {315.0f};
attrs.aspect_ratio = {2.0f};
@ -119,7 +120,7 @@ TEST(TransformationTests, ConstFoldingPriorBoxSubgraph) {
{
auto in = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{2, 3, 1, 1});
auto in_2 = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{2, 3, 300, 300});
ngraph::op::PriorBoxAttrs attrs;
ngraph::op::v0::PriorBox::Attributes attrs;
attrs.min_size = {256.0f};
attrs.max_size = {315.0f};
attrs.aspect_ratio = {2.0f};
@ -225,3 +226,111 @@ TEST(TransformationTests, ConstFoldingPriorBoxClusteredSubgraph) {
EXPECT_TRUE(ref != nullptr);
EXPECT_TRUE(fused->get_vector<float>() == ref->get_vector<float>());
}
TEST(TransformationTests, ConstFoldingPriorBox8) {
std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
{
auto in = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::i64, ngraph::Shape {2});
ngraph::op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f};
attrs.max_size = {5.0f};
attrs.aspect_ratio = {1.5f};
attrs.scale_all_sizes = true;
attrs.min_max_aspect_ratios_order = false;
auto layer_shape = ngraph::opset8::Constant::create<int64_t>(ngraph::element::i64, ngraph::Shape {2}, {2, 2});
auto image_shape = ngraph::opset8::Constant::create<int64_t>(ngraph::element::i64, ngraph::Shape {2}, {10, 10});
auto pb = std::make_shared<ngraph::opset8::PriorBox>(layer_shape, image_shape, attrs);
auto res = std::make_shared<ngraph::opset8::Result>(pb);
f = std::make_shared<ngraph::Function>(ngraph::NodeVector {res}, ngraph::ParameterVector {in});
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::InitNodeInfo>();
manager.register_pass<ngraph::pass::ConstantFolding>();
manager.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
}
{
auto layer_shape = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::i64, ngraph::Shape {2});
auto const_prior_box = ngraph::opset8::Constant::create<float>(
ngraph::element::f32, ngraph::Shape {2, 48},
{0.15, 0.15, 0.35, 0.35, 0.127526, 0.16835, 0.372474, 0.33165, 0.0918861, 0.0918861, 0.408114, 0.408114, 0.65, 0.15,
0.85, 0.35, 0.627526, 0.16835, 0.872474, 0.33165, 0.591886, 0.0918861, 0.908114, 0.408114, 0.15, 0.65, 0.35, 0.85,
0.127526, 0.66835, 0.372474, 0.83165, 0.0918861, 0.591886, 0.408114, 0.908114, 0.65, 0.65, 0.85, 0.85, 0.627526, 0.66835,
0.872474, 0.83165, 0.591886, 0.591886, 0.908114, 0.908114, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1});
auto res = std::make_shared<ngraph::opset8::Result>(const_prior_box);
f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector {res}, ngraph::ParameterVector {layer_shape});
}
auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
auto fused = std::dynamic_pointer_cast<ngraph::opset8::Constant>(f->get_result()->input_value(0).get_node_shared_ptr());
auto ref = std::dynamic_pointer_cast<ngraph::opset8::Constant>(f->get_result()->input_value(0).get_node_shared_ptr());
EXPECT_TRUE(fused != nullptr);
EXPECT_TRUE(ref != nullptr);
EXPECT_TRUE(fused->get_vector<float>() == ref->get_vector<float>());
}
TEST(TransformationTests, ConstFoldingPriorBox8Subgraph) {
std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
{
auto in = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::i64, ngraph::Shape {2, 3, 2, 2});
auto in_2 = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::i64, ngraph::Shape {2, 3, 10, 10});
ngraph::op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f};
attrs.max_size = {5.0f};
attrs.aspect_ratio = {1.5f};
attrs.scale_all_sizes = true;
attrs.min_max_aspect_ratios_order = false;
auto layer_shape = std::make_shared<ngraph::opset8::ShapeOf>(in);
auto image_shape = std::make_shared<ngraph::opset8::ShapeOf>(in_2);
auto begin = ngraph::opset8::Constant::create(ngraph::element::i64, ngraph::Shape {1}, {2});
auto end = ngraph::opset8::Constant::create(ngraph::element::i64, ngraph::Shape {1}, {4});
auto stride = ngraph::opset8::Constant::create(ngraph::element::i64, ngraph::Shape {1}, {1});
auto ss_data = std::make_shared<ngraph::opset8::StridedSlice>(layer_shape, begin, end, stride, std::vector<int64_t> {0}, std::vector<int64_t> {0});
auto ss_image = std::make_shared<ngraph::opset8::StridedSlice>(image_shape, begin, end, stride, std::vector<int64_t> {0}, std::vector<int64_t> {0});
auto pb = std::make_shared<ngraph::opset8::PriorBox>(ss_data, ss_image, attrs);
auto res = std::make_shared<ngraph::opset8::Result>(pb);
f = std::make_shared<ngraph::Function>(ngraph::NodeVector {res}, ngraph::ParameterVector {in, in_2});
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::InitNodeInfo>();
manager.register_pass<ngraph::pass::ConstantFolding>();
manager.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
}
{
auto layer_shape = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::i64, ngraph::Shape {2});
auto const_prior_box = ngraph::opset8::Constant::create<float>(
ngraph::element::f32, ngraph::Shape {2, 48},
{0.15, 0.15, 0.35, 0.35, 0.127526, 0.16835, 0.372474, 0.33165, 0.0918861, 0.0918861, 0.408114, 0.408114, 0.65, 0.15,
0.85, 0.35, 0.627526, 0.16835, 0.872474, 0.33165, 0.591886, 0.0918861, 0.908114, 0.408114, 0.15, 0.65, 0.35, 0.85,
0.127526, 0.66835, 0.372474, 0.83165, 0.0918861, 0.591886, 0.408114, 0.908114, 0.65, 0.65, 0.85, 0.85, 0.627526, 0.66835,
0.872474, 0.83165, 0.591886, 0.591886, 0.908114, 0.908114, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1});
auto res = std::make_shared<ngraph::opset8::Result>(const_prior_box);
f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector {res}, ngraph::ParameterVector {layer_shape});
}
auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
auto fused = std::dynamic_pointer_cast<ngraph::opset8::Constant>(f->get_result()->input_value(0).get_node_shared_ptr());
auto ref = std::dynamic_pointer_cast<ngraph::opset8::Constant>(f->get_result()->input_value(0).get_node_shared_ptr());
EXPECT_TRUE(fused != nullptr);
EXPECT_TRUE(ref != nullptr);
EXPECT_TRUE(fused->get_vector<float>() == ref->get_vector<float>());
}

View File

@ -0,0 +1,77 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include <memory>
#include <ngraph/function.hpp>
#include <ngraph/opsets/opset1.hpp>
#include <ngraph/opsets/opset8.hpp>
#include <ngraph/pass/manager.hpp>
#include <string>
#include <transformations/init_node_info.hpp>
#include <transformations/op_conversions/convert_prior_box_v8_to_v0.hpp>
#include "common_test_utils/ngraph_test_utils.hpp"
using namespace testing;
using namespace ngraph;
TEST_F(TransformationTestsF, ConvertPriorBox8To0) {
{
const Shape input_shape {2, 2};
const Shape image_Shape {10, 10};
op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f};
attrs.max_size = {5.0f};
attrs.aspect_ratio = {1.5f};
attrs.scale_all_sizes = true;
auto input = std::make_shared<opset8::Parameter>(element::i64, input_shape);
auto image = std::make_shared<opset8::Parameter>(element::i64, image_Shape);
auto prior_box = std::make_shared<opset8::PriorBox>(input, image, attrs);
function = std::make_shared<Function>(NodeVector {prior_box}, ParameterVector {input, image});
manager.register_pass<pass::ConvertPriorBox8To0>();
}
{
const Shape input_shape {2, 2};
const Shape image_Shape {10, 10};
op::v0::PriorBox::Attributes attrs;
attrs.min_size = {2.0f};
attrs.max_size = {5.0f};
attrs.aspect_ratio = {1.5f};
attrs.scale_all_sizes = true;
auto input = std::make_shared<opset1::Parameter>(element::i64, input_shape);
auto image = std::make_shared<opset1::Parameter>(element::i64, image_Shape);
auto prior_box = std::make_shared<opset1::PriorBox>(input, image, attrs);
function_ref = std::make_shared<Function>(NodeVector {prior_box}, ParameterVector {input, image});
}
}
TEST_F(TransformationTestsF, ConvertPriorBox8To0_min_max_aspect_ratios_order) {
{
const Shape input_shape {2, 2};
const Shape image_Shape {10, 10};
op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f};
attrs.max_size = {5.0f};
attrs.aspect_ratio = {1.5f};
attrs.scale_all_sizes = true;
attrs.min_max_aspect_ratios_order = false;
auto input = std::make_shared<opset8::Parameter>(element::i64, input_shape);
auto image = std::make_shared<opset8::Parameter>(element::i64, image_Shape);
auto prior_box = std::make_shared<opset8::PriorBox>(input, image, attrs);
function = std::make_shared<Function>(NodeVector {prior_box}, ParameterVector {input, image});
manager.register_pass<pass::ConvertPriorBox8To0>();
}
}

View File

@ -50,6 +50,9 @@ const std::vector<std::vector<float>> variances = {
const std::vector<bool> scale_all_sizes = {
false, true};
const std::vector<bool> min_max_aspect_ratios_order = {
false, true};
const std::vector<size_t> inputShape = {300, 300};
const std::vector<size_t> imageShape = {32, 32};
@ -65,7 +68,8 @@ const auto layerSpecificParams = ::testing::Combine(
::testing::ValuesIn(steps),
::testing::ValuesIn(offsets),
::testing::ValuesIn(variances),
::testing::ValuesIn(scale_all_sizes));
::testing::ValuesIn(scale_all_sizes),
::testing::ValuesIn(min_max_aspect_ratios_order));
INSTANTIATE_TEST_SUITE_P(smoke_PriorBox_Basic, PriorBoxLayerTest,
::testing::Combine(

View File

@ -39,7 +39,8 @@ using priorBoxSpecificParams = std::tuple<
float, // step
float, // offset
std::vector<float>, // variance
bool>; // scale_all_sizes
bool, // scale_all_sizes
bool>; // min_max_aspect_ratios_order
typedef std::tuple<
priorBoxSpecificParams,
@ -73,6 +74,7 @@ protected:
bool clip;
bool flip;
bool scale_all_sizes;
bool min_max_aspect_ratios_order;
void SetUp() override;
};

View File

@ -21,10 +21,10 @@ std::string PriorBoxLayerTest::getTestCaseName(const testing::TestParamInfo<prio
std::vector<float> min_size, max_size, aspect_ratio, density, fixed_ratio, fixed_size, variance;
float step, offset;
bool clip, flip, scale_all_sizes;
bool clip, flip, scale_all_sizes, min_max_aspect_ratios_order;
std::tie(min_size, max_size, aspect_ratio,
density, fixed_ratio, fixed_size, clip,
flip, step, offset, variance, scale_all_sizes) = specParams;
flip, step, offset, variance, scale_all_sizes, min_max_aspect_ratios_order) = specParams;
std::ostringstream result;
const char separator = '_';
@ -47,6 +47,7 @@ std::string PriorBoxLayerTest::getTestCaseName(const testing::TestParamInfo<prio
result << "clip=" << clip << separator;
result << "flip=" << flip<< separator;
result << "scale_all=" << scale_all_sizes << separator;
result << "min_max_aspect_ratios_order=" << min_max_aspect_ratios_order << separator;
result << "trgDev=" << targetDevice;
return result.str();
@ -58,14 +59,14 @@ void PriorBoxLayerTest::SetUp() {
inPrc, outPrc, inLayout, outLayout,
inputShapes, imageShapes, targetDevice) = GetParam();
std::tie(min_size, max_size, aspect_ratio,
density, fixed_ratio, fixed_size, clip,
flip, step, offset, variance, scale_all_sizes) = specParams;
std::tie(min_size, max_size, aspect_ratio, density, fixed_ratio, fixed_size,
clip, flip, step, offset, variance, scale_all_sizes,
min_max_aspect_ratios_order) = specParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShapes, imageShapes});
ngraph::op::PriorBoxAttrs attributes;
ngraph::op::v8::PriorBox::Attributes attributes;
attributes.min_size = min_size;
attributes.max_size = max_size;
attributes.aspect_ratio = aspect_ratio;
@ -77,10 +78,11 @@ void PriorBoxLayerTest::SetUp() {
attributes.offset = offset;
attributes.clip = clip;
attributes.flip = flip;
attributes.min_max_aspect_ratios_order = min_max_aspect_ratios_order;
auto shape_of_1 = std::make_shared<ngraph::opset3::ShapeOf>(params[0]);
auto shape_of_2 = std::make_shared<ngraph::opset3::ShapeOf>(params[1]);
auto priorBox = std::make_shared<ngraph::op::PriorBox>(
auto priorBox = std::make_shared<ngraph::op::v8::PriorBox>(
shape_of_1,
shape_of_2,
attributes);

View File

@ -102,7 +102,7 @@ from ngraph.opset1.ops import pad
from ngraph.opset1.ops import parameter
from ngraph.opset1.ops import power
from ngraph.opset1.ops import prelu
from ngraph.opset1.ops import prior_box
from ngraph.opset8.ops import prior_box
from ngraph.opset1.ops import prior_box_clustered
from ngraph.opset1.ops import psroi_pooling
from ngraph.opset4.ops import proposal

View File

@ -417,3 +417,108 @@ def gather_nd(
}
return _get_node_factory_opset8().create("GatherND", inputs, attributes)
def prior_box(
layer_shape: Node, image_shape: NodeInput, attrs: dict, name: Optional[str] = None
) -> Node:
"""Generate prior boxes of specified sizes and aspect ratios across all dimensions.
@param layer_shape: Shape of layer for which prior boxes are computed.
@param image_shape: Shape of image to which prior boxes are scaled.
@param attrs: The dictionary containing key, value pairs for attributes.
@param name: Optional name for the output node.
@return Node representing prior box operation.
Available attributes are:
* min_size The minimum box size (in pixels).
Range of values: positive floating point numbers
Default value: []
Required: no
* max_size The maximum box size (in pixels).
Range of values: positive floating point numbers
Default value: []
Required: no
* aspect_ratio Aspect ratios of prior boxes.
Range of values: set of positive floating point numbers
Default value: []
Required: no
* flip The flag that denotes that each aspect_ratio is duplicated and flipped.
Range of values: {True, False}
Default value: False
Required: no
* clip The flag that denotes if each value in the output tensor should be clipped
to [0,1] interval.
Range of values: {True, False}
Default value: False
Required: no
* step The distance between box centers.
Range of values: floating point non-negative number
Default value: 0
Required: no
* offset This is a shift of box respectively to top left corner.
Range of values: floating point non-negative number
Default value: None
Required: yes
* variance The variance denotes a variance of adjusting bounding boxes. The attribute
could contain 0, 1 or 4 elements.
Range of values: floating point positive numbers
Default value: []
Required: no
* scale_all_sizes The flag that denotes type of inference.
Range of values: False - max_size is ignored
True - max_size is used
Default value: True
Required: no
* fixed_ratio This is an aspect ratio of a box.
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* fixed_size This is an initial box size (in pixels).
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* density This is the square root of the number of boxes of each type.
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* min_max_aspect_ratios_order The flag that denotes the order of output prior box.
Range of values: False - the output prior box is in order of
[min, aspect_ratios, max]
True - the output prior box is in order of
[min, max, aspect_ratios]
Default value: True
Required: no
Example of attribute dictionary:
@code{.py}
# just required ones
attrs = {
'offset': 85,
}
attrs = {
'offset': 85,
'flip': True,
'clip': True,
'fixed_size': [32, 64, 128]
}
@endcode
Optional attributes which are absent from dictionary will be set with corresponding default.
"""
requirements = [
("offset", True, np.floating, is_non_negative_value),
("min_size", False, np.floating, is_positive_value),
("max_size", False, np.floating, is_positive_value),
("aspect_ratio", False, np.floating, is_positive_value),
("flip", False, np.bool_, None),
("clip", False, np.bool_, None),
("step", False, np.floating, is_non_negative_value),
("variance", False, np.floating, is_positive_value),
("scale_all_sizes", False, np.bool_, None),
("fixed_ratio", False, np.floating, is_positive_value),
("fixed_size", False, np.floating, is_positive_value),
("density", False, np.floating, is_positive_value),
("min_max_aspect_ratios_order", False, np.bool_, None),
]
check_valid_attributes("PriorBox", attrs, requirements)
return _get_node_factory_opset8().create("PriorBox", [layer_shape, as_node(image_shape)], attrs)

View File

@ -102,7 +102,7 @@ from openvino.opset1.ops import pad
from openvino.opset1.ops import parameter
from openvino.opset1.ops import power
from openvino.opset1.ops import prelu
from openvino.opset1.ops import prior_box
from openvino.opset8.ops import prior_box
from openvino.opset1.ops import prior_box_clustered
from openvino.opset1.ops import psroi_pooling
from openvino.opset4.ops import proposal

View File

@ -417,3 +417,109 @@ def gather_nd(
}
return _get_node_factory_opset8().create("GatherND", inputs, attributes)
@nameable_op
def prior_box(
layer_shape: Node, image_shape: NodeInput, attrs: dict, name: Optional[str] = None
) -> Node:
"""Generate prior boxes of specified sizes and aspect ratios across all dimensions.
@param layer_shape: Shape of layer for which prior boxes are computed.
@param image_shape: Shape of image to which prior boxes are scaled.
@param attrs: The dictionary containing key, value pairs for attributes.
@param name: Optional name for the output node.
@return Node representing prior box operation.
Available attributes are:
* min_size The minimum box size (in pixels).
Range of values: positive floating point numbers
Default value: []
Required: no
* max_size The maximum box size (in pixels).
Range of values: positive floating point numbers
Default value: []
Required: no
* aspect_ratio Aspect ratios of prior boxes.
Range of values: set of positive floating point numbers
Default value: []
Required: no
* flip The flag that denotes that each aspect_ratio is duplicated and flipped.
Range of values: {True, False}
Default value: False
Required: no
* clip The flag that denotes if each value in the output tensor should be clipped
to [0,1] interval.
Range of values: {True, False}
Default value: False
Required: no
* step The distance between box centers.
Range of values: floating point non-negative number
Default value: 0
Required: no
* offset This is a shift of box respectively to top left corner.
Range of values: floating point non-negative number
Default value: None
Required: yes
* variance The variance denotes a variance of adjusting bounding boxes. The attribute
could contain 0, 1 or 4 elements.
Range of values: floating point positive numbers
Default value: []
Required: no
* scale_all_sizes The flag that denotes type of inference.
Range of values: False - max_size is ignored
True - max_size is used
Default value: True
Required: no
* fixed_ratio This is an aspect ratio of a box.
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* fixed_size This is an initial box size (in pixels).
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* density This is the square root of the number of boxes of each type.
Range of values: a list of positive floating-point numbers
Default value: None
Required: no
* min_max_aspect_ratios_order The flag that denotes the order of output prior box.
Range of values: False - the output prior box is in order of
[min, aspect_ratios, max]
True - the output prior box is in order of
[min, max, aspect_ratios]
Default value: True
Required: no
Example of attribute dictionary:
@code{.py}
# just required ones
attrs = {
'offset': 85,
}
attrs = {
'offset': 85,
'flip': True,
'clip': True,
'fixed_size': [32, 64, 128]
}
@endcode
Optional attributes which are absent from dictionary will be set with corresponding default.
"""
requirements = [
("offset", True, np.floating, is_non_negative_value),
("min_size", False, np.floating, is_positive_value),
("max_size", False, np.floating, is_positive_value),
("aspect_ratio", False, np.floating, is_positive_value),
("flip", False, np.bool_, None),
("clip", False, np.bool_, None),
("step", False, np.floating, is_non_negative_value),
("variance", False, np.floating, is_positive_value),
("scale_all_sizes", False, np.bool_, None),
("fixed_ratio", False, np.floating, is_positive_value),
("fixed_size", False, np.floating, is_positive_value),
("density", False, np.floating, is_positive_value),
("min_max_aspect_ratios_order", False, np.bool_, None),
]
check_valid_attributes("PriorBox", attrs, requirements)
return _get_node_factory_opset8().create("PriorBox", [layer_shape, as_node(image_shape)], attrs)

View File

@ -0,0 +1,26 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <ngraph/pass/graph_rewrite.hpp>
#include <transformations_visibility.hpp>
namespace ngraph {
namespace pass {
class TRANSFORMATIONS_API ConvertPriorBox8To0;
} // namespace pass
} // namespace ngraph
/**
* @ingroup ie_transformation_common_api
* @brief ConvertPriorBox8To1 converts v8::PriorBox into v0::PriorBox.
*/
class ngraph::pass::ConvertPriorBox8To0 : public ngraph::pass::MatcherPass {
public:
NGRAPH_RTTI_DECLARATION;
ConvertPriorBox8To0();
};

View File

@ -81,6 +81,7 @@
#include "transformations/op_conversions/convert_deformable_conv_v8_to_v1.hpp"
#include "transformations/op_conversions/convert_maxpool_downgrade.hpp"
#include "transformations/disable_decompression_convert_constant_folding.hpp"
#include "transformations/op_conversions/convert_prior_box_v8_to_v0.hpp"
#include <ngraph/pass/manager.hpp>
#include <ngraph/pass/constant_folding.hpp>
@ -174,6 +175,7 @@ bool ngraph::pass::CommonOptimizations::run_on_function(std::shared_ptr<ngraph::
manager.register_pass<ngraph::pass::ConvertGather7ToGather8, false>();
manager.register_pass<ngraph::pass::ConvertDeformableConv8To1>();
manager.register_pass<ngraph::pass::ConvertMaxPool8ToMaxPool1>();
manager.register_pass<ngraph::pass::ConvertPriorBox8To0>(); // not plugins implemented priorbox8
auto fq_fusions = manager.register_pass<ngraph::pass::GraphRewrite>();
fq_fusions->add_matcher<ngraph::pass::FakeQuantizeMulFusion>();

View File

@ -0,0 +1,54 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "transformations/op_conversions/convert_prior_box_v8_to_v0.hpp"
#include <ngraph/opsets/opset1.hpp>
#include <ngraph/opsets/opset8.hpp>
#include <ngraph/pattern/op/wrap_type.hpp>
#include <ngraph/rt_info.hpp>
#include "itt.hpp"
NGRAPH_RTTI_DEFINITION(ngraph::pass::ConvertPriorBox8To0, "ConvertPriorBox8To0", 0);
ngraph::pass::ConvertPriorBox8To0::ConvertPriorBox8To0() {
MATCHER_SCOPE(ConvertPriorBox8To0);
auto prior_box_v8 = pattern::wrap_type<ngraph::opset8::PriorBox>();
ngraph::matcher_pass_callback callback = [=](pattern::Matcher& m) {
auto prior_box_v8_node = std::dynamic_pointer_cast<ngraph::opset8::PriorBox>(m.get_match_root());
if (!prior_box_v8_node)
return false;
ngraph::opset8::PriorBox::Attributes attrs_v8 = prior_box_v8_node->get_attrs();
if (!attrs_v8.min_max_aspect_ratios_order)
return false;
ngraph::opset1::PriorBox::Attributes attrs_v0;
attrs_v0.min_size = attrs_v8.min_size;
attrs_v0.max_size = attrs_v8.max_size;
attrs_v0.aspect_ratio = attrs_v8.aspect_ratio;
attrs_v0.density = attrs_v8.density;
attrs_v0.fixed_ratio = attrs_v8.fixed_ratio;
attrs_v0.fixed_size = attrs_v8.fixed_size;
attrs_v0.clip = attrs_v8.clip;
attrs_v0.flip = attrs_v8.flip;
attrs_v0.step = attrs_v8.step;
attrs_v0.offset = attrs_v8.offset;
attrs_v0.variance = attrs_v8.variance;
attrs_v0.scale_all_sizes = attrs_v8.scale_all_sizes;
auto prior_box_v0 = std::make_shared<ngraph::opset1::PriorBox>(prior_box_v8_node->input_value(0), prior_box_v8_node->input_value(1), attrs_v0);
prior_box_v0->set_friendly_name(prior_box_v8_node->get_friendly_name());
ngraph::copy_runtime_info(prior_box_v8_node, prior_box_v0);
ngraph::replace_node(prior_box_v8_node, prior_box_v0);
return true;
};
auto m = std::make_shared<pattern::Matcher>(prior_box_v8, matcher_name);
register_matcher(m, callback);
}

View File

@ -13,6 +13,9 @@ using PriorBoxAttrs = ov::op::v0::PriorBox::Attributes;
namespace v0 {
using ov::op::v0::PriorBox;
} // namespace v0
namespace v8 {
using ov::op::v8::PriorBox;
} // namespace v8
using v0::PriorBox;
} // namespace op
} // namespace ngraph

View File

@ -66,5 +66,66 @@ private:
Attributes m_attrs;
};
} // namespace v0
namespace v8 {
/// \brief Layer which generates prior boxes of specified sizes
/// normalized to input image size
class OPENVINO_API PriorBox : public Op {
public:
OPENVINO_OP("PriorBox", "opset8");
BWDCMP_RTTI_DECLARATION;
struct Attributes {
// min_size Desired min_size of prior boxes
// max_size Desired max_size of prior boxes
// aspect_ratio Aspect ratios of prior boxes
// clip Clip output to [0,1]
// flip Flip aspect ratios
// step Distance between prior box centers
// offset Box offset relative to top center of image
// variance Values to adjust prior boxes with
// scale_all_sizes Scale all sizes
// min_max_aspect_ratios_order Order of output prior box
std::vector<float> min_size;
std::vector<float> max_size;
std::vector<float> aspect_ratio;
std::vector<float> density;
std::vector<float> fixed_ratio;
std::vector<float> fixed_size;
bool clip = false;
bool flip = false;
float step = 0.0f;
float offset = 0.0f;
std::vector<float> variance;
bool scale_all_sizes = true;
bool min_max_aspect_ratios_order = true;
};
PriorBox() = default;
/// \brief Constructs a PriorBox operation
///
/// \param layer_shape Shape of layer for which prior boxes are computed
/// \param image_shape Shape of image to which prior boxes are scaled
/// \param attrs PriorBox attributes
PriorBox(const Output<Node>& layer_shape, const Output<Node>& image_shape, const Attributes& attrs);
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
static int64_t number_of_priors(const Attributes& attrs);
static std::vector<float> normalized_aspect_ratio(const std::vector<float>& aspect_ratio, bool flip);
const Attributes& get_attrs() const {
return m_attrs;
}
bool visit_attributes(AttributeVisitor& visitor) override;
OPENVINO_SUPPRESS_DEPRECATED_START
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
OPENVINO_SUPPRESS_DEPRECATED_END
bool has_evaluate() const override;
private:
Attributes m_attrs;
};
} // namespace v8
} // namespace op
} // namespace ov

View File

@ -71,7 +71,6 @@ _OPENVINO_OP_REG(PSROIPooling, ov::op::v0)
_OPENVINO_OP_REG(Pad, ov::op::v1)
_OPENVINO_OP_REG(Parameter, ov::op::v0)
_OPENVINO_OP_REG(Power, ov::op::v1)
_OPENVINO_OP_REG(PriorBox, ov::op::v0)
_OPENVINO_OP_REG(PriorBoxClustered, ov::op::v0)
_OPENVINO_OP_REG(Proposal, ov::op::v4)
_OPENVINO_OP_REG(Range, ov::op::v4)
@ -188,3 +187,4 @@ _OPENVINO_OP_REG(NV12toRGB, ov::op::v8)
_OPENVINO_OP_REG(RandomUniform, ov::op::v8)
_OPENVINO_OP_REG(Slice, ov::op::v8)
_OPENVINO_OP_REG(If, ov::op::v8)
_OPENVINO_OP_REG(PriorBox, ov::op::v8)

View File

@ -23,7 +23,11 @@ static inline float clip_less(float x, float threshold) {
}
template <typename T>
void prior_box(const T* data, const T* img, float* dst_data, const Shape& out_shape, const op::PriorBoxAttrs& attrs) {
void prior_box(const T* data,
const T* img,
float* dst_data,
const Shape& out_shape,
const op::v8::PriorBox::Attributes& attrs) {
const int64_t W = data[1];
const int64_t H = data[0];
const int64_t IW = img[1];
@ -51,7 +55,7 @@ void prior_box(const T* data, const T* img, float* dst_data, const Shape& out_sh
if (variance.empty())
variance.push_back(0.1f);
int64_t num_priors = op::PriorBox::number_of_priors(attrs);
int64_t num_priors = op::v8::PriorBox::number_of_priors(attrs);
float step = attrs.step;
auto min_size = attrs.min_size;
@ -162,21 +166,42 @@ void prior_box(const T* data, const T* img, float* dst_data, const Shape& out_sh
box_height = min_size[ms_idx] * 0.5f;
calculate_data(center_x, center_y, box_width, box_height, false);
if (attrs.max_size.size() > ms_idx) {
box_width = box_height = std::sqrt(min_size[ms_idx] * attrs.max_size[ms_idx]) * 0.5f;
calculate_data(center_x, center_y, box_width, box_height, false);
}
if (attrs.min_max_aspect_ratios_order) {
if (attrs.max_size.size() > ms_idx) {
box_width = box_height = std::sqrt(min_size[ms_idx] * attrs.max_size[ms_idx]) * 0.5f;
calculate_data(center_x, center_y, box_width, box_height, false);
}
if (attrs.scale_all_sizes || (!attrs.scale_all_sizes && (ms_idx == min_size.size() - 1))) {
size_t s_idx = attrs.scale_all_sizes ? ms_idx : 0;
for (float ar : aspect_ratios) {
if (std::fabs(ar - 1.0f) < 1e-6) {
continue;
if (attrs.scale_all_sizes || (!attrs.scale_all_sizes && (ms_idx == min_size.size() - 1))) {
size_t s_idx = attrs.scale_all_sizes ? ms_idx : 0;
for (float ar : aspect_ratios) {
if (std::fabs(ar - 1.0f) < 1e-6) {
continue;
}
ar = std::sqrt(ar);
box_width = min_size[s_idx] * 0.5f * ar;
box_height = min_size[s_idx] * 0.5f / ar;
calculate_data(center_x, center_y, box_width, box_height, false);
}
}
} else {
if (attrs.scale_all_sizes || (!attrs.scale_all_sizes && (ms_idx == min_size.size() - 1))) {
size_t s_idx = attrs.scale_all_sizes ? ms_idx : 0;
for (float ar : aspect_ratios) {
if (std::fabs(ar - 1.0f) < 1e-6) {
continue;
}
ar = std::sqrt(ar);
box_width = min_size[s_idx] * 0.5f * ar;
box_height = min_size[s_idx] * 0.5f / ar;
ar = std::sqrt(ar);
box_width = min_size[s_idx] * 0.5f * ar;
box_height = min_size[s_idx] * 0.5f / ar;
calculate_data(center_x, center_y, box_width, box_height, false);
}
}
if (attrs.max_size.size() > ms_idx) {
box_width = box_height = std::sqrt(min_size[ms_idx] * attrs.max_size[ms_idx]) * 0.5f;
calculate_data(center_x, center_y, box_width, box_height, false);
}
}

View File

@ -10,21 +10,22 @@
#include "ngraph/op/constant.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/runtime/reference/prior_box.hpp"
#include "openvino/runtime/tensor.hpp"
using namespace std;
using namespace ngraph;
BWDCMP_RTTI_DEFINITION(op::v0::PriorBox);
op::PriorBox::PriorBox(const Output<Node>& layer_shape,
const Output<Node>& image_shape,
const PriorBox::Attributes& attrs)
op::v0::PriorBox::PriorBox(const Output<Node>& layer_shape,
const Output<Node>& image_shape,
const PriorBox::Attributes& attrs)
: Op({layer_shape, image_shape}),
m_attrs(attrs) {
constructor_validate_and_infer_types();
}
void op::PriorBox::validate_and_infer_types() {
void op::v0::PriorBox::validate_and_infer_types() {
NGRAPH_OP_SCOPE(v0_PriorBox_validate_and_infer_types);
// shape node should have integer data type. For now we only allow i64
auto layer_shape_et = get_input_element_type(0);
@ -67,13 +68,13 @@ void op::PriorBox::validate_and_infer_types() {
}
}
shared_ptr<Node> op::PriorBox::clone_with_new_inputs(const OutputVector& new_args) const {
shared_ptr<Node> op::v0::PriorBox::clone_with_new_inputs(const OutputVector& new_args) const {
NGRAPH_OP_SCOPE(v0_PriorBox_clone_with_new_inputs);
check_new_args_count(this, new_args);
return make_shared<PriorBox>(new_args.at(0), new_args.at(1), m_attrs);
}
int64_t op::PriorBox::number_of_priors(const PriorBox::Attributes& attrs) {
int64_t op::v0::PriorBox::number_of_priors(const PriorBox::Attributes& attrs) {
// Starting with 0 number of prior and then various conditions on attributes will contribute
// real number of prior boxes as PriorBox is a fat thing with several modes of
// operation that will be checked in order in the next statements.
@ -102,7 +103,7 @@ int64_t op::PriorBox::number_of_priors(const PriorBox::Attributes& attrs) {
return num_priors;
}
std::vector<float> op::PriorBox::normalized_aspect_ratio(const std::vector<float>& aspect_ratio, bool flip) {
std::vector<float> op::v0::PriorBox::normalized_aspect_ratio(const std::vector<float>& aspect_ratio, bool flip) {
std::set<float> unique_ratios;
for (auto ratio : aspect_ratio) {
unique_ratios.insert(std::round(ratio * 1e6) / 1e6);
@ -113,7 +114,7 @@ std::vector<float> op::PriorBox::normalized_aspect_ratio(const std::vector<float
return std::vector<float>(unique_ratios.begin(), unique_ratios.end());
}
bool op::PriorBox::visit_attributes(AttributeVisitor& visitor) {
bool op::v0::PriorBox::visit_attributes(AttributeVisitor& visitor) {
NGRAPH_OP_SCOPE(v0_PriorBox_visit_attributes);
visitor.on_attribute("min_size", m_attrs.min_size);
visitor.on_attribute("max_size", m_attrs.max_size);
@ -136,19 +137,32 @@ template <element::Type_t ET>
bool evaluate(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
op::PriorBox::Attributes attrs) {
op::v0::PriorBox::Attributes attrs) {
op::v8::PriorBox::Attributes attrs_v8;
attrs_v8.min_size = attrs.min_size;
attrs_v8.max_size = attrs.max_size;
attrs_v8.aspect_ratio = attrs.aspect_ratio;
attrs_v8.density = attrs.density;
attrs_v8.fixed_ratio = attrs.fixed_ratio;
attrs_v8.fixed_size = attrs.fixed_size;
attrs_v8.clip = attrs.clip;
attrs_v8.flip = attrs.flip;
attrs_v8.step = attrs.step;
attrs_v8.offset = attrs.offset;
attrs_v8.variance = attrs.variance;
attrs_v8.scale_all_sizes = attrs.scale_all_sizes;
runtime::reference::prior_box(arg0->get_data_ptr<ET>(),
arg1->get_data_ptr<ET>(),
out->get_data_ptr<float>(),
out->get_shape(),
attrs);
attrs_v8);
return true;
}
bool evaluate_prior_box(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::PriorBox::Attributes& attrs) {
const op::v0::PriorBox::Attributes& attrs) {
bool rc = true;
switch (arg0->get_element_type()) {
NGRAPH_TYPE_CASE(evaluate_prior_box, i8, arg0, arg1, out, attrs);
@ -190,3 +204,183 @@ bool op::v0::PriorBox::has_evaluate() const {
}
return false;
}
// ------------------------------ V8 ------------------------------
BWDCMP_RTTI_DEFINITION(op::v8::PriorBox);
op::v8::PriorBox::PriorBox(const Output<Node>& layer_shape,
const Output<Node>& image_shape,
const PriorBox::Attributes& attrs)
: Op({layer_shape, image_shape}),
m_attrs(attrs) {
constructor_validate_and_infer_types();
}
void op::v8::PriorBox::validate_and_infer_types() {
NGRAPH_OP_SCOPE(v8_PriorBox_validate_and_infer_types);
// shape node should have integer data type. For now we only allow i64
auto layer_shape_et = get_input_element_type(0);
NODE_VALIDATION_CHECK(this,
layer_shape_et.is_integral_number(),
"layer shape input must be an integral number, but is: ",
layer_shape_et);
auto image_shape_et = get_input_element_type(1);
NODE_VALIDATION_CHECK(this,
image_shape_et.is_integral_number(),
"image shape input must be an integral number, but is: ",
image_shape_et);
auto layer_shape_rank = get_input_partial_shape(0).rank();
auto image_shape_rank = get_input_partial_shape(1).rank();
NODE_VALIDATION_CHECK(this,
layer_shape_rank.compatible(image_shape_rank),
"layer shape input rank ",
layer_shape_rank,
" must match image shape input rank ",
image_shape_rank);
set_input_is_relevant_to_shape(0);
if (auto const_shape = get_constant_from_source(input_value(0))) {
NODE_VALIDATION_CHECK(this,
shape_size(const_shape->get_shape()) == 2,
"Layer shape must have rank 2",
const_shape->get_shape());
auto layer_shape = const_shape->get_shape_val();
set_output_type(
0,
element::f32,
ov::Shape{2, 4 * layer_shape[0] * layer_shape[1] * static_cast<size_t>(number_of_priors(m_attrs))});
} else {
set_output_type(0, element::f32, ov::PartialShape{2, Dimension::dynamic()});
}
}
shared_ptr<Node> op::v8::PriorBox::clone_with_new_inputs(const OutputVector& new_args) const {
NGRAPH_OP_SCOPE(v8_PriorBox_clone_with_new_inputs);
check_new_args_count(this, new_args);
return make_shared<PriorBox>(new_args.at(0), new_args.at(1), m_attrs);
}
int64_t op::v8::PriorBox::number_of_priors(const PriorBox::Attributes& attrs) {
// Starting with 0 number of prior and then various conditions on attributes will contribute
// real number of prior boxes as PriorBox is a fat thing with several modes of
// operation that will be checked in order in the next statements.
int64_t num_priors = 0;
// Total number of boxes around each point; depends on whether flipped boxes are included
// plus one box 1x1.
int64_t total_aspect_ratios = normalized_aspect_ratio(attrs.aspect_ratio, attrs.flip).size();
if (attrs.scale_all_sizes)
num_priors = total_aspect_ratios * attrs.min_size.size() + attrs.max_size.size();
else
num_priors = total_aspect_ratios + attrs.min_size.size() - 1;
if (!attrs.fixed_size.empty())
num_priors = total_aspect_ratios * attrs.fixed_size.size();
for (auto density : attrs.density) {
auto rounded_density = static_cast<int64_t>(density);
auto density_2d = (rounded_density * rounded_density - 1);
if (!attrs.fixed_ratio.empty())
num_priors += attrs.fixed_ratio.size() * density_2d;
else
num_priors += total_aspect_ratios * density_2d;
}
return num_priors;
}
std::vector<float> op::v8::PriorBox::normalized_aspect_ratio(const std::vector<float>& aspect_ratio, bool flip) {
std::set<float> unique_ratios;
for (auto ratio : aspect_ratio) {
unique_ratios.insert(std::round(ratio * 1e6) / 1e6);
if (flip)
unique_ratios.insert(std::round(1 / ratio * 1e6) / 1e6);
}
unique_ratios.insert(1);
return std::vector<float>(unique_ratios.begin(), unique_ratios.end());
}
bool op::v8::PriorBox::visit_attributes(AttributeVisitor& visitor) {
NGRAPH_OP_SCOPE(v8_PriorBox_visit_attributes);
visitor.on_attribute("min_size", m_attrs.min_size);
visitor.on_attribute("max_size", m_attrs.max_size);
visitor.on_attribute("aspect_ratio", m_attrs.aspect_ratio);
visitor.on_attribute("density", m_attrs.density);
visitor.on_attribute("fixed_ratio", m_attrs.fixed_ratio);
visitor.on_attribute("fixed_size", m_attrs.fixed_size);
visitor.on_attribute("clip", m_attrs.clip);
visitor.on_attribute("flip", m_attrs.flip);
visitor.on_attribute("step", m_attrs.step);
visitor.on_attribute("offset", m_attrs.offset);
visitor.on_attribute("variance", m_attrs.variance);
visitor.on_attribute("scale_all_sizes", m_attrs.scale_all_sizes);
visitor.on_attribute("min_max_aspect_ratios_order", m_attrs.min_max_aspect_ratios_order);
return true;
}
namespace prior_box_v8 {
namespace {
template <element::Type_t ET>
bool evaluate(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
op::v8::PriorBox::Attributes attrs) {
runtime::reference::prior_box(arg0->get_data_ptr<ET>(),
arg1->get_data_ptr<ET>(),
out->get_data_ptr<float>(),
out->get_shape(),
attrs);
return true;
}
bool evaluate_prior_box(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::v8::PriorBox::Attributes& attrs) {
bool rc = true;
switch (arg0->get_element_type()) {
NGRAPH_TYPE_CASE(evaluate_prior_box, i8, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, i16, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, i32, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, i64, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, u8, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, u16, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, u32, arg0, arg1, out, attrs);
NGRAPH_TYPE_CASE(evaluate_prior_box, u64, arg0, arg1, out, attrs);
default:
rc = false;
break;
}
return rc;
}
} // namespace
} // namespace prior_box_v8
bool op::v8::PriorBox::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const {
NGRAPH_OP_SCOPE(v8_PriorBox_evaluate);
return prior_box_v8::evaluate_prior_box(inputs[0], inputs[1], outputs[0], get_attrs());
}
bool op::v8::PriorBox::has_evaluate() const {
NGRAPH_OP_SCOPE(v8_PriorBox_has_evaluate);
switch (get_input_element_type(0)) {
case ngraph::element::i8:
case ngraph::element::i16:
case ngraph::element::i32:
case ngraph::element::i64:
case ngraph::element::u8:
case ngraph::element::u16:
case ngraph::element::u32:
case ngraph::element::u64:
return true;
default:
break;
}
return false;
}

View File

@ -1526,17 +1526,6 @@ bool evaluate(const shared_ptr<op::v0::Elu>& op, const HostTensorVector& outputs
return true;
}
template <element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::PriorBox>& op, const HostTensorVector& outputs, const HostTensorVector& inputs) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::prior_box<T>(inputs[0]->get_data_ptr<T>(),
inputs[1]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<float>(),
outputs[0]->get_shape(),
op->get_attrs());
return true;
}
template <element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::Proposal>& op, const HostTensorVector& outputs, const HostTensorVector& inputs) {
using T = typename element_type_traits<ET>::value_type;
@ -2704,11 +2693,8 @@ bool evaluate(const shared_ptr<op::v8::Gather>& op, const HostTensorVector& outp
template <typename T>
bool evaluate_node(std::shared_ptr<Node> node, const HostTensorVector& outputs, const HostTensorVector& inputs) {
auto element_type = node->get_output_element_type(0);
if (ov::is_type<op::v1::Select>(node)) {
if (ov::is_type<op::v1::Select>(node))
element_type = node->get_input_element_type(1);
} else if (ov::is_type<op::v0::PriorBox>(node)) {
element_type = node->get_input_element_type(0);
}
switch (element_type) {
case element::Type_t::boolean:

View File

@ -128,7 +128,7 @@ bool runtime::interpreter::INTExecutable::call(const vector<shared_ptr<runtime::
// get op type
element::Type type;
if (ov::is_type<op::Convert>(op) || ov::is_type<op::PriorBox>(op)) {
if (ov::is_type<op::Convert>(op) || ov::is_type<op::v0::PriorBox>(op) || ov::is_type<op::v8::PriorBox>(op)) {
type = op->get_input_element_type(0);
} else if (ov::is_type<op::v1::Equal>(op) || ov::is_type<op::v1::Greater>(op) ||
ov::is_type<op::v1::GreaterEqual>(op) || ov::is_type<op::v1::Less>(op) ||

View File

@ -111,3 +111,4 @@ NGRAPH_OP(Sigmoid, op::v0)
NGRAPH_OP(Tanh, op::v0)
NGRAPH_OP(Exp, op::v0)
NGRAPH_OP(Log, op::v0)
NGRAPH_OP(PriorBox, ngraph::op::v8)

View File

@ -10,19 +10,19 @@
using namespace ngraph;
TEST(type_prop, prior_box1) {
op::PriorBoxAttrs attrs;
op::v0::PriorBox::Attributes attrs;
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.scale_all_sizes = false;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
auto pb = std::make_shared<op::PriorBox>(layer_shape, image_shape, attrs);
auto pb = std::make_shared<op::v0::PriorBox>(layer_shape, image_shape, attrs);
ASSERT_EQ(pb->get_shape(), (Shape{2, 20480}));
}
TEST(type_prop, prior_box2) {
op::PriorBoxAttrs attrs;
op::v0::PriorBox::Attributes attrs;
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.flip = true;
@ -30,12 +30,12 @@ TEST(type_prop, prior_box2) {
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
auto pb = std::make_shared<op::PriorBox>(layer_shape, image_shape, attrs);
auto pb = std::make_shared<op::v0::PriorBox>(layer_shape, image_shape, attrs);
ASSERT_EQ(pb->get_shape(), (Shape{2, 32768}));
}
TEST(type_prop, prior_box3) {
op::PriorBoxAttrs attrs;
op::v0::PriorBox::Attributes attrs;
attrs.min_size = {256.0f};
attrs.max_size = {315.0f};
attrs.aspect_ratio = {2.0f};
@ -43,6 +43,33 @@ TEST(type_prop, prior_box3) {
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {1, 1});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
auto pb = std::make_shared<op::PriorBox>(layer_shape, image_shape, attrs);
auto pb = std::make_shared<op::v0::PriorBox>(layer_shape, image_shape, attrs);
ASSERT_EQ(pb->get_shape(), (Shape{2, 16}));
}
TEST(type_prop, prior_box_v8_1) {
op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.scale_all_sizes = false;
attrs.min_max_aspect_ratios_order = true;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
auto pb = std::make_shared<op::v8::PriorBox>(layer_shape, image_shape, attrs);
ASSERT_EQ(pb->get_shape(), (Shape{2, 20480}));
}
TEST(type_prop, prior_box_v8_2) {
op::v8::PriorBox::Attributes attrs;
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.flip = true;
attrs.scale_all_sizes = false;
attrs.min_max_aspect_ratios_order = false;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
auto pb = std::make_shared<op::v8::PriorBox>(layer_shape, image_shape, attrs);
ASSERT_EQ(pb->get_shape(), (Shape{2, 32768}));
}

View File

@ -9,6 +9,7 @@
#include "ngraph/opsets/opset3.hpp"
#include "ngraph/opsets/opset4.hpp"
#include "ngraph/opsets/opset5.hpp"
#include "ngraph/opsets/opset8.hpp"
#include "util/visitor.hpp"
using namespace std;
@ -21,7 +22,7 @@ TEST(attributes, prior_box_op) {
const auto layer_shape = make_shared<op::Parameter>(element::i64, Shape{128, 128});
const auto image_shape = make_shared<op::Parameter>(element::i64, Shape{32, 32});
op::PriorBoxAttrs attrs;
op::v0::PriorBox::Attributes attrs;
attrs.min_size = vector<float>{16.f, 32.f};
attrs.max_size = vector<float>{256.f, 512.f};
attrs.aspect_ratio = vector<float>{0.66f, 1.56f};
@ -57,3 +58,47 @@ TEST(attributes, prior_box_op) {
EXPECT_EQ(g_prior_box_attrs.variance, prior_box_attrs.variance);
EXPECT_EQ(g_prior_box_attrs.scale_all_sizes, prior_box_attrs.scale_all_sizes);
}
TEST(attributes, prior_box_v8_op) {
NodeBuilder::get_ops().register_factory<opset8::PriorBox>();
const auto layer_shape = make_shared<op::Parameter>(element::i64, Shape{128, 128});
const auto image_shape = make_shared<op::Parameter>(element::i64, Shape{32, 32});
op::v8::PriorBox::Attributes attrs;
attrs.min_size = vector<float>{16.f, 32.f};
attrs.max_size = vector<float>{256.f, 512.f};
attrs.aspect_ratio = vector<float>{0.66f, 1.56f};
attrs.density = vector<float>{0.55f};
attrs.fixed_ratio = vector<float>{0.88f};
attrs.fixed_size = vector<float>{1.25f};
attrs.clip = true;
attrs.flip = false;
attrs.step = 1.0f;
attrs.offset = 0.0f;
attrs.variance = vector<float>{2.22f, 3.14f};
attrs.scale_all_sizes = true;
attrs.min_max_aspect_ratios_order = false;
auto prior_box = make_shared<opset8::PriorBox>(layer_shape, image_shape, attrs);
NodeBuilder builder(prior_box);
auto g_prior_box = ov::as_type_ptr<opset8::PriorBox>(builder.create());
const auto prior_box_attrs = prior_box->get_attrs();
const auto g_prior_box_attrs = g_prior_box->get_attrs();
const auto expected_attr_count = 13;
EXPECT_EQ(builder.get_value_map_size(), expected_attr_count);
EXPECT_EQ(g_prior_box_attrs.min_size, prior_box_attrs.min_size);
EXPECT_EQ(g_prior_box_attrs.max_size, prior_box_attrs.max_size);
EXPECT_EQ(g_prior_box_attrs.aspect_ratio, prior_box_attrs.aspect_ratio);
EXPECT_EQ(g_prior_box_attrs.density, prior_box_attrs.density);
EXPECT_EQ(g_prior_box_attrs.fixed_ratio, prior_box_attrs.fixed_ratio);
EXPECT_EQ(g_prior_box_attrs.fixed_size, prior_box_attrs.fixed_size);
EXPECT_EQ(g_prior_box_attrs.clip, prior_box_attrs.clip);
EXPECT_EQ(g_prior_box_attrs.flip, prior_box_attrs.flip);
EXPECT_EQ(g_prior_box_attrs.step, prior_box_attrs.step);
EXPECT_EQ(g_prior_box_attrs.offset, prior_box_attrs.offset);
EXPECT_EQ(g_prior_box_attrs.variance, prior_box_attrs.variance);
EXPECT_EQ(g_prior_box_attrs.scale_all_sizes, prior_box_attrs.scale_all_sizes);
EXPECT_EQ(g_prior_box_attrs.min_max_aspect_ratios_order, prior_box_attrs.min_max_aspect_ratios_order);
}

View File

@ -38,7 +38,7 @@ OutputVector prior_box(const Node& node) {
auto output_shape_slice = detail::make_slice(output_shape, 2, 4);
auto image_shape_slice = detail::make_slice(image_shape, 2, 4);
ngraph::op::PriorBoxAttrs attrs;
ngraph::op::v0::PriorBox::Attributes attrs;
attrs.min_size = node.get_attribute_value<std::vector<float>>("min_size", {});
attrs.max_size = node.get_attribute_value<std::vector<float>>("max_size", {});
attrs.aspect_ratio = node.get_attribute_value<std::vector<float>>("aspect_ratio", {});
@ -55,7 +55,7 @@ OutputVector prior_box(const Node& node) {
auto axes = default_opset::Constant::create(element::i64, Shape{1}, std::vector<int64_t>{0});
return {std::make_shared<default_opset::Unsqueeze>(
std::make_shared<default_opset::PriorBox>(output_shape_slice, image_shape_slice, attrs),
std::make_shared<ngraph::op::v0::PriorBox>(output_shape_slice, image_shape_slice, attrs),
axes)};
}

View File

@ -40,12 +40,11 @@ NamedOutputs prior_box(const NodeContext& node) {
attrs.flip = node.get_attribute<bool>("flip", false);
attrs.clip = node.get_attribute<bool>("clip", false);
attrs.step = node.get_attribute<float>("step_w", 0);
attrs.min_max_aspect_ratios_order = node.get_attribute<bool>("min_max_aspect_ratios_order", false);
attrs.offset = node.get_attribute<float>("offset", 0.5);
attrs.variance = node.get_attribute<std::vector<float>>("variances", {0.1, 0.1, 0.2, 0.2});
bool min_max_aspect_ratios_order = node.get_attribute<bool>("min_max_aspect_ratios_order", false);
const auto ov_prior_box_node = std::make_shared<PriorBox>(output_shape_slice, image_shape_slice, attrs);
const auto split_axis_node = Constant::create(i64, ov::Shape{}, {0});
@ -60,47 +59,6 @@ NamedOutputs prior_box(const NodeContext& node) {
auto node_boxes_reshape = std::make_shared<Reshape>(node_boxes_origin, out_shape, true);
const auto node_variances_reshape = std::make_shared<Reshape>(node_variances_origin, out_shape, true);
int64_t total_aspect_ratios = PriorBox::normalized_aspect_ratio(attrs.aspect_ratio, attrs.flip).size();
if ((total_aspect_ratios > 1) && !attrs.min_size.empty() && !attrs.max_size.empty() &&
!min_max_aspect_ratios_order) {
std::vector<int64_t> mask{1, 1, 1, 0, 1};
int64_t min_size_len = static_cast<int64_t>(attrs.min_size.size());
const auto out_shape_div_numpri = std::make_shared<Concat>(
NodeVector{output_shape_slice, Constant::create<int64_t>(i64, {3}, {min_size_len, -1, 4})},
0);
const auto node_boxes_div_numpri = std::make_shared<Reshape>(node_boxes_reshape, out_shape_div_numpri, true);
const auto slice_begin_min = Constant::create(i64, Shape{5}, std::vector<int64_t>{0, 0, 0, 0, 0});
const auto slice_end_min = std::make_shared<Concat>(
NodeVector{output_shape_slice, Constant::create<int64_t>(i64, {3}, {min_size_len, 1, 4})},
0);
const auto slice_min_node =
std::make_shared<StridedSlice>(node_boxes_div_numpri, slice_begin_min, slice_end_min, mask, mask);
const auto slice_begin_max = Constant::create(i64, Shape{5}, std::vector<int64_t>{0, 0, 0, 1, 0});
const auto slice_end_max = std::make_shared<Concat>(
NodeVector{output_shape_slice, Constant::create<int64_t>(i64, {3}, {min_size_len, 2, 4})},
0);
const auto slice_max_node =
std::make_shared<StridedSlice>(node_boxes_div_numpri, slice_begin_max, slice_end_max, mask, mask);
const auto slice_begin_aspect_ratios = Constant::create(i64, Shape{5}, std::vector<int64_t>{0, 0, 0, 2, 0});
const auto slice_end_aspect_ratios = std::make_shared<Concat>(
NodeVector{output_shape_slice,
Constant::create<int64_t>(i64, {3}, {min_size_len, 2 + (total_aspect_ratios - 1), 4})},
0);
const auto slice_aspect_ratios_node = std::make_shared<StridedSlice>(node_boxes_div_numpri,
slice_begin_aspect_ratios,
slice_end_aspect_ratios,
mask,
mask);
const auto node_boxes_div_numpri_reorder =
std::make_shared<Concat>(NodeVector{slice_min_node, slice_aspect_ratios_node, slice_max_node}, 3);
node_boxes_reshape = std::make_shared<Reshape>(node_boxes_div_numpri_reorder, out_shape, true);
}
NamedOutputs outputs;
outputs["Boxes"] = {node_boxes_reshape};
outputs["Variances"] = {node_variances_reshape};