[CPU] Proposal layer. Check img info input. (#2456)

This commit is contained in:
Nikolay Shchegolev 2020-11-11 00:36:38 +03:00 committed by GitHub
parent bd091500cd
commit eb82adeb3a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 214 additions and 16 deletions

View File

@ -163,15 +163,20 @@ public:
// input image height & width
const float img_H = p_img_info_cpu[0];
const float img_W = p_img_info_cpu[1];
if (!std::isnormal(img_H) || !std::isnormal(img_W) || (img_H < 0.f) || (img_W < 0.f)) {
THROW_IE_EXCEPTION << "Proposal operation image info input must have positive image height and width.";
}
// scale factor for height & width
const float scale_H = p_img_info_cpu[2];
const float scale_W = img_info_size == 4 ? p_img_info_cpu[3] : scale_H;
if (!std::isfinite(scale_H) || !std::isfinite(scale_W) || (scale_H < 0.f) || (scale_W < 0.f)) {
THROW_IE_EXCEPTION << "Proposal operation image info input must have non negative scales.";
}
XARCH::proposal_exec(p_bottom_item, p_d_anchor_item, dims0,
{img_H, img_W, scale_H, scale_W}, anchors.data(), roi_indices.data(), p_roi_item, p_prob_item, conf);
return OK;
} catch (const InferenceEngine::details::InferenceEngineException& e) {
if (resp) {

View File

@ -0,0 +1,55 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include "behavior/invalid_cases/proposal.hpp"
using namespace ngraph::helpers;
using namespace LayerTestsDefinitions;
using namespace BehaviorTestsDefinitions;
namespace {
/* ============= Proposal ============= */
const std::vector<base_size_type> base_size_ = {16};
const std::vector<pre_nms_topn_type> pre_nms_topn_ = {100};
const std::vector<post_nms_topn_type> post_nms_topn_ = {100};
const std::vector<nms_thresh_type> nms_thresh_ = {0.7f};
const std::vector<min_size_type> min_size_ = {1};
const std::vector<ratio_type> ratio_ = {{1.0f, 2.0f}};
const std::vector<scale_type> scale_ = {{1.2f, 1.5f}};
const std::vector<clip_before_nms_type> clip_before_nms_ = {false};
const std::vector<clip_after_nms_type> clip_after_nms_ = {false};
const std::vector<std::vector<float>> img_info_invalid = {{0.f, 225.f, 1.f},
{225.f, -1.f, 1.f},
{225.f, NAN, 1.f},
{INFINITY, 100.f, 1.f},
{225.f, 100.f, NAN},
{225.f, 100.f, INFINITY}};
// empty string corresponds to Caffe framework
const std::vector<framework_type> framework_ = {""};
const auto proposalParams = ::testing::Combine(
::testing::ValuesIn(base_size_),
::testing::ValuesIn(pre_nms_topn_),
::testing::ValuesIn(post_nms_topn_),
::testing::ValuesIn(nms_thresh_),
::testing::ValuesIn(min_size_),
::testing::ValuesIn(ratio_),
::testing::ValuesIn(scale_),
::testing::ValuesIn(clip_before_nms_),
::testing::ValuesIn(clip_after_nms_),
::testing::ValuesIn(framework_)
);
INSTANTIATE_TEST_CASE_P(invalid, ProposalBehTest,
::testing::Combine(
proposalParams,
::testing::ValuesIn(img_info_invalid),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ProposalBehTest::getTestCaseName
);
} // namespace

View File

@ -0,0 +1,33 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "single_layer_tests/proposal.hpp"
namespace BehaviorTestsDefinitions {
typedef std::tuple<
LayerTestsDefinitions::proposalSpecificParams,
std::vector<float>,
std::string> proposalBehTestParamsSet;
class ProposalBehTest
: public testing::WithParamInterface<proposalBehTestParamsSet>,
virtual public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(testing::TestParamInfo<proposalBehTestParamsSet> obj);
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override;
protected:
void SetUp() override;
void Validate() override {};
const LayerTestsDefinitions::normalize_type normalize = true;
const LayerTestsDefinitions::feat_stride_type feat_stride = 1;
const LayerTestsDefinitions::box_size_scale_type box_size_scale = 2.0f;
const LayerTestsDefinitions::box_coordinate_scale_type box_coordinate_scale = 2.0f;
};
} // namespace BehaviorTestsDefinitions

View File

@ -56,6 +56,7 @@ class ProposalLayerTest
virtual public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(testing::TestParamInfo<proposalLayerTestParamsSet> obj);
static std::string SerializeProposalSpecificParams(proposalSpecificParams& params);
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override;
protected:

View File

@ -0,0 +1,98 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "behavior/invalid_cases/proposal.hpp"
using namespace BehaviorTestsDefinitions;
using namespace LayerTestsDefinitions;
std::string ProposalBehTest::getTestCaseName(testing::TestParamInfo<proposalBehTestParamsSet> obj) {
proposalSpecificParams proposalParams;
std::string targetDevice;
std::vector<float> img_info;
std::tie(proposalParams, img_info, targetDevice) = obj.param;
auto proposalPramString = ProposalLayerTest::SerializeProposalSpecificParams(proposalParams);
std::ostringstream result;
result << "targetDevice=" << targetDevice;
result << "img_info = " << CommonTestUtils::vec2str(img_info) << "_";
return proposalPramString + result.str();
}
InferenceEngine::Blob::Ptr ProposalBehTest::GenerateInput(const InferenceEngine::InputInfo &info) const {
InferenceEngine::Blob::Ptr blobPtr;
const std::string name = info.name();
if (name == "scores") {
blobPtr = FuncTestUtils::createAndFillBlobFloat(info.getTensorDesc(), 1, 0, 1000, 8234231);
} else if (name == "boxes") {
blobPtr = FuncTestUtils::createAndFillBlobFloatNormalDistribution(info.getTensorDesc(), 0.0f, 0.2f, 7235346);
}
return blobPtr;
}
void ProposalBehTest::SetUp() {
proposalSpecificParams proposalParams;
std::vector<float> img_info;
std::tie(proposalParams, img_info, targetDevice) = this->GetParam();
base_size_type base_size;
pre_nms_topn_type pre_nms_topn;
post_nms_topn_type post_nms_topn;
nms_thresh_type nms_thresh;
min_size_type min_size;
ratio_type ratio;
scale_type scale;
clip_before_nms_type clip_before_nms;
clip_after_nms_type clip_after_nms;
framework_type framework;
std::tie(base_size, pre_nms_topn,
post_nms_topn,
nms_thresh,
min_size,
ratio,
scale,
clip_before_nms,
clip_after_nms,
framework) = proposalParams;
size_t bottom_w = base_size;
size_t bottom_h = base_size;
size_t num_anchors = ratio.size() * scale.size();
std::vector<size_t> scoresShape = {1, 2 * num_anchors, bottom_h, bottom_w};
std::vector<size_t> boxesShape = {1, 4 * num_anchors, bottom_h, bottom_w};
std::vector<size_t> imageInfoShape = {3};
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(InferenceEngine::Precision::FP16);
auto params = ngraph::builder::makeParams(ngPrc, {{"scores", scoresShape}, {"boxes", boxesShape}});
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto proposal = std::dynamic_pointer_cast<ngraph::opset1::Proposal>(
ngraph::builder::makeProposal(paramOuts[0], paramOuts[1], img_info, ngPrc,
base_size,
pre_nms_topn,
post_nms_topn,
nms_thresh,
feat_stride,
min_size,
ratio,
scale,
clip_before_nms,
clip_after_nms,
normalize,
box_size_scale,
box_coordinate_scale,
framework));
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(proposal)};
function = std::make_shared<ngraph::Function>(results, params, "proposal");
}
TEST_P(ProposalBehTest, CompareWithRefs) {
ASSERT_THROW(Run(), InferenceEngine::details::InferenceEngineException);
}

View File

@ -25,12 +25,7 @@ const feat_stride_type feat_stride = 1;
const box_size_scale_type box_size_scale = 2.0f;
const box_coordinate_scale_type box_coordinate_scale = 2.0f;
std::string ProposalLayerTest::getTestCaseName(testing::TestParamInfo<proposalLayerTestParamsSet> obj) {
proposalSpecificParams proposalParams;
std::string targetDevice;
std::tie(proposalParams, targetDevice) = obj.param;
std::string ProposalLayerTest::SerializeProposalSpecificParams(proposalSpecificParams& params) {
base_size_type base_size;
pre_nms_topn_type pre_nms_topn;
post_nms_topn_type post_nms_topn;
@ -49,7 +44,7 @@ std::string ProposalLayerTest::getTestCaseName(testing::TestParamInfo<proposalLa
scale,
clip_before_nms,
clip_after_nms,
framework) = proposalParams;
framework) = params;
std::ostringstream result;
result << "base_size=" << base_size << "_";
@ -66,13 +61,25 @@ std::string ProposalLayerTest::getTestCaseName(testing::TestParamInfo<proposalLa
result << "box_size_scale=" << box_size_scale << "_";
result << "box_coordinate_scale=" << box_coordinate_scale << "_";
result << "framework=" << framework << "_";
result << "targetDevice=" << targetDevice;
return result.str();
}
std::string ProposalLayerTest::getTestCaseName(testing::TestParamInfo<proposalLayerTestParamsSet> obj) {
proposalSpecificParams proposalParams;
std::string targetDevice;
std::tie(proposalParams, targetDevice) = obj.param;
auto proposalPramString = SerializeProposalSpecificParams(proposalParams);
std::ostringstream result;
result << "targetDevice=" << targetDevice;
return proposalPramString + result.str();
}
void ProposalLayerTest::SetUp() {
proposalSpecificParams proposalParams;
std::vector<float> img_info = {225.0f, 225.0f, 1.0f};
std::tie(proposalParams, targetDevice) = this->GetParam();
base_size_type base_size;
@ -105,11 +112,11 @@ void ProposalLayerTest::SetUp() {
std::vector<size_t> imageInfoShape = {3};
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(InferenceEngine::Precision::FP16);
auto params = ngraph::builder::makeParams(ngPrc, {{"scores", scoresShape}, {"boxes", boxesShape}, {"image_info", imageInfoShape}});
auto params = ngraph::builder::makeParams(ngPrc, {{"scores", scoresShape}, {"boxes", boxesShape}});
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto proposal = std::dynamic_pointer_cast<ngraph::opset1::Proposal>(
ngraph::builder::makeProposal(paramOuts[0], paramOuts[1], paramOuts[2], ngPrc,
ngraph::builder::makeProposal(paramOuts[0], paramOuts[1], img_info, ngPrc,
base_size,
pre_nms_topn,
post_nms_topn,
@ -137,9 +144,6 @@ InferenceEngine::Blob::Ptr ProposalLayerTest::GenerateInput(const InferenceEngin
blobPtr = FuncTestUtils::createAndFillBlobFloat(info.getTensorDesc(), 1, 0, 1000, 8234231);
} else if (name == "boxes") {
blobPtr = FuncTestUtils::createAndFillBlobFloatNormalDistribution(info.getTensorDesc(), 0.0f, 0.2f, 7235346);
} else if (name == "image_info") {
const float image_info[] = {225.0f, 225.0f, 1.0f};
blobPtr = FuncTestUtils::createAndFillBlobWithFloatArray(info.getTensorDesc(), image_info, 3);
}
return blobPtr;

View File

@ -247,7 +247,7 @@ std::shared_ptr<ngraph::Node> makeMinMax(const ngraph::Output<Node> &in1,
std::shared_ptr<ngraph::Node> makeProposal(const ngraph::Output<Node> &class_probs,
const ngraph::Output<Node> &class_logits,
const ngraph::Output<Node> &image_shape,
const std::vector<float>& image_info,
const element::Type &type,
size_t base_size,
size_t pre_nms_topn,

View File

@ -13,7 +13,7 @@ namespace builder {
std::shared_ptr<Node> makeProposal(const ngraph::Output<Node> &class_probs,
const ngraph::Output<Node> &class_logits,
const ngraph::Output<Node> &image_shape,
const std::vector<float>& image_info,
const element::Type &type,
size_t base_size,
size_t pre_nms_topn,
@ -45,6 +45,8 @@ std::shared_ptr<Node> makeProposal(const ngraph::Output<Node> &class_probs,
attrs.box_coordinate_scale = box_coordinate_scale;
attrs.framework = framework;
auto image_shape = makeConstant(ngraph::element::Type_t::f32, {3}, image_info);
return std::make_shared<opset1::Proposal>(class_probs, class_logits, image_shape, attrs);
}