Add new tests for Unsqueeze/Squeeze; refactoring; remove debug code

This commit is contained in:
Ivan 2023-03-09 22:29:37 +04:00
parent e13d0e589f
commit a47a18cf55
6 changed files with 215 additions and 42 deletions

View File

@ -140,6 +140,7 @@ bool ov::pass::MOCTransformations::run_on_model(const std::shared_ptr<ngraph::Fu
}
REGISTER_PASS(manager, ConvertQuantizeDequantize)
REGISTER_PASS(manager, SimplifyShapeOfSubGraph)
if (!m_use_shapes) {
manager.register_pass<ov::pass::DisableShapeOfConstantFolding>();
}
@ -167,6 +168,7 @@ bool ov::pass::MOCTransformations::run_on_model(const std::shared_ptr<ngraph::Fu
eliminations->set_name("ov::pass::CommonEliminations");
manager.register_pass<ov::pass::ConstantFolding>();
auto common_fusions = manager.register_pass<ov::pass::GraphRewrite>();
ADD_MATCHER(common_fusions, ConvertScatterElementsToScatter)
ADD_MATCHER(common_fusions, SoftPlusFusion)
@ -222,6 +224,7 @@ bool ov::pass::MOCTransformations::run_on_model(const std::shared_ptr<ngraph::Fu
ADD_MATCHER(multiply_fusions, MatMulMultiplyFusion)
multiply_fusions->set_name("ov::pass::MultiplyFusions");
REGISTER_PASS(manager, ConstantFolding)
auto fq_fusions = manager.register_pass<ov::pass::GraphRewrite>();
ADD_MATCHER(fq_fusions, FakeQuantizeMulFusion)
ADD_MATCHER(fq_fusions, FakeQuantizeReshapeFusion)

View File

@ -71,7 +71,7 @@ bool get_keep_dims(const std::shared_ptr<Node>& reduction) {
auto arithmetic_reduce = std::dynamic_pointer_cast<ov::op::util::ArithmeticReductionKeepDims>(reduction);
auto logical_reduce = std::dynamic_pointer_cast<ov::op::util::LogicalReductionKeepDims>(reduction);
bool keep_dims = false; // squeeze always reduces number of output dimensions
bool keep_dims = false; // squeeze/unsqueeze always reduces number of output dimensions
if (logical_reduce)
keep_dims = logical_reduce->get_keep_dims();
else if (arithmetic_reduce)
@ -100,6 +100,7 @@ ov::pass::TransposeSinkingReductionForward::TransposeSinkingReductionForward() {
auto reduction_axes = std::dynamic_pointer_cast<Constant>(reduction->get_input_node_shared_ptr(1));
if (!transpose_order || !reduction_axes)
return false;
auto unsqueeze = std::dynamic_pointer_cast<Unsqueeze>(reduction);
auto rank =
unsqueeze ? reduction->get_output_partial_shape(0).rank() : reduction->get_input_partial_shape(0).rank();
@ -201,7 +202,7 @@ ov::pass::TransposeSinkingReductionBackward::TransposeSinkingReductionBackward()
}
}
}
bool special_case = false;
bool squeeze_all_dims = false;
if (!keep_dims) {
if (non_negative_axes.empty()) {
auto input_pshape = reduction->input_value(0).get_partial_shape();
@ -211,7 +212,7 @@ ov::pass::TransposeSinkingReductionBackward::TransposeSinkingReductionBackward()
non_negative_axes.push_back(i);
}
}
special_case = true;
squeeze_all_dims = true;
} else {
return false;
}
@ -233,26 +234,21 @@ ov::pass::TransposeSinkingReductionBackward::TransposeSinkingReductionBackward()
auto new_transpose_order = std::make_shared<Constant>(transpose_order->get_element_type(),
Shape{transpose_order_values.size()},
transpose_order_values);
if (special_case) {
auto new_transpose = transpose->clone_with_new_inputs({reduction->input_value(0), new_transpose_order});
auto new_reduction = reduction->clone_with_new_inputs({new_transpose, reduction->input_value(1)});
new_reduction->set_friendly_name(transpose->get_friendly_name());
replace_node(transpose, new_reduction);
transpose_sinking::UpdateForwardSinkingAbility(new_transpose);
copy_runtime_info({transpose, reduction}, {new_transpose, new_reduction});
register_new_node(new_transpose);
std::shared_ptr<Node> new_transpose, new_reduction;
if (squeeze_all_dims) {
new_transpose = transpose->clone_with_new_inputs({reduction->input_value(0), new_transpose_order});
new_reduction = reduction->clone_with_new_inputs({new_transpose, reduction->input_value(1)});
} else {
auto new_const =
std::make_shared<Constant>(reduction_axes->get_element_type(), reduction_axes->get_shape(), new_values);
auto new_transpose = transpose->clone_with_new_inputs({reduction->input_value(0), new_transpose_order});
auto new_reduction = reduction->clone_with_new_inputs({new_transpose, new_const});
replace_node(transpose, new_reduction);
copy_runtime_info({transpose, reduction}, {new_transpose, new_reduction});
transpose_sinking::UpdateForwardSinkingAbility(new_transpose);
new_reduction->set_friendly_name(transpose->get_friendly_name());
register_new_node(new_transpose);
new_transpose = transpose->clone_with_new_inputs({reduction->input_value(0), new_transpose_order});
new_reduction = reduction->clone_with_new_inputs({new_transpose, new_const});
}
replace_node(transpose, new_reduction);
copy_runtime_info({transpose, reduction}, {new_transpose, new_reduction});
transpose_sinking::UpdateForwardSinkingAbility(new_transpose);
new_reduction->set_friendly_name(transpose->get_friendly_name());
register_new_node(new_transpose);
return true;
};

View File

@ -79,6 +79,7 @@ ov::pass::TransposeToReshape::TransposeToReshape() {
Output<Node> reshape_dim;
NodeVector new_ops;
if (count_if(dims.begin(), dims.end(), [](const DimensionToPosition& item) {
return item.dim.is_dynamic();
}) < 2) {
@ -102,7 +103,6 @@ ov::pass::TransposeToReshape::TransposeToReshape() {
reshape_op->set_friendly_name(transpose->get_friendly_name());
copy_runtime_info(transpose, new_ops);
replace_node(transpose, reshape_op);
return true;
};

View File

@ -21,15 +21,62 @@ using namespace testing;
using namespace ngraph;
TEST(TransformationTests, ConvToBinaryConvOutputLowZeroOutputHighOne) {
std::shared_ptr<Function> f(nullptr);
std::shared_ptr<Function> f(nullptr), f_ref(nullptr);
{
auto data = std::make_shared<opset5::Parameter>(element::f32, Shape{1, 3, 2, 2});
auto act_in_low = opset5::Constant::create(element::f32, Shape{1}, {1.0f});
auto act_in_high = opset5::Constant::create(element::f32, Shape{1}, {3.0f});
auto act_out_low = opset5::Constant::create(element::f32, Shape{1}, {0.0f});
auto act_out_high = opset5::Constant::create(element::f32, Shape{1}, {1.0f});
auto act_fq =
std::make_shared<opset5::FakeQuantize>(data, act_in_low, act_in_high, act_out_low, act_out_high, 2);
auto weights = opset5::Constant::create(element::f32, Shape{1, 3, 1, 1}, {-1, 1, 1});
auto conv = std::make_shared<opset5::Convolution>(act_fq,
weights,
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1},
op::PadType::EXPLICIT);
auto act_in_low = opset5::Constant::create(element::f32, Shape{}, {1.0f});
auto act_in_high = opset5::Constant::create(element::i64, Shape{0}, {0});
auto transpose = std::make_shared<opset5::Transpose>(act_in_low, act_in_high);
auto model = std::make_shared<ov::Model>(ov::OutputVector{transpose}, ParameterVector{});
ov::pass::Manager manager;
manager.register_pass<ov::pass::ConstantFolding>();
manager.run_passes(model);
f = std::make_shared<Function>(NodeVector{conv}, ParameterVector{data});
pass::Manager m;
m.register_pass<ov::pass::InitNodeInfo>();
m.register_pass<ov::pass::ConvToBinaryConv>();
m.register_pass<ov::pass::ConstantFolding>();
m.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
}
{
auto data = std::make_shared<opset5::Parameter>(element::f32, Shape{1, 3, 2, 2});
auto act_in_low = opset5::Constant::create(element::f32, Shape{1}, {1.0f});
auto act_in_high = opset5::Constant::create(element::f32, Shape{1}, {3.0f});
auto act_out_low = opset5::Constant::create(element::f32, Shape{1}, {0.0f});
auto act_out_high = opset5::Constant::create(element::f32, Shape{1}, {1.0f});
auto act_fq =
std::make_shared<opset5::FakeQuantize>(data, act_in_low, act_in_high, act_out_low, act_out_high, 2);
uint8_t weights_val = 6;
auto weights = std::make_shared<opset5::Constant>(element::u1, Shape{1, 3, 1, 1}, &weights_val);
auto conv =
std::make_shared<opset5::BinaryConvolution>(act_fq,
weights,
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1},
opset5::BinaryConvolution::BinaryConvolutionMode::XNOR_POPCOUNT,
-1.0f,
op::PadType::EXPLICIT);
auto add = std::make_shared<opset5::Add>(conv, opset5::Constant::create(element::f32, Shape{1, 1, 1}, {0.7f}));
auto mul = std::make_shared<opset5::Multiply>(add, opset5::Constant::create(element::f32, Shape{}, {0.2f}));
f_ref = std::make_shared<Function>(NodeVector{mul}, ParameterVector{data});
}
auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
}
TEST(TransformationTests, ConvToBinaryConvOutputLowMinusOneOutputHighOne) {

View File

@ -18,20 +18,6 @@
using namespace testing;
/*
TEST(TransformationTests, ConcatTest) {
auto data1 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{1, 28, 28, 1, 58});
auto data2 = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, ngraph::Shape{1, 28, 28, 1, 58});
int axis = 3;
std::shared_ptr<ov::Node> X = data1;
for (int i = 0; i < 1000; ++i) {
X = std::make_shared<ngraph::opset1::Concat>(ov::OutputVector{X, data2}, axis);
}
auto model = std::make_shared<ov::Model>(ov::OutputVector{X}, ov::ParameterVector{data1, data2});
serialize(model, "/home/tikhonov/OpenVINO/tmp/concat_5d_axis" + std::to_string(axis) +".xml");
}
*/
TEST(TransformationTests, DivideFusion) {
std::shared_ptr<ngraph::Function> f, f_ref;
{

View File

@ -646,6 +646,81 @@ INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonInterpolateForward,
TransposeSinkingTestFixture,
test_forward_interpolate());
auto test_forward_squeeze = []() {
TestCase test_case;
// Initialize common attributes
test_case.transformation = CREATE_PASS_FACTORY(TransposeSinkingReductionForward);
test_case.num_main_ops = {1};
test_case.inputs_to_main = {
parameter(element::f32, {32, 1, 2, 1}),
constant<int64_t>(element::i32, {2}, {0, 2}),
};
// Test model description:
test_case.model.preprocess_inputs_to_main = {{set_transpose_for}, {{0}}};
test_case.model.main_op = {CREATE_BINARY_FACTORY(Squeeze)};
test_case.model.model_template = transpose_sinking::common::create_model;
// Reference model description:
auto new_constant = [](const vector<size_t>& idxs, const OutputVector& out_vec) -> OutputVector {
OutputVector new_out_vec(out_vec.size());
new_out_vec[0] = out_vec[0];
new_out_vec[1] =
make_shared<Constant>(out_vec[1].get_element_type(), out_vec[1].get_shape(), std::vector<int64_t>{3, 1});
return new_out_vec;
};
test_case.model_ref.preprocess_inputs_to_main = {{new_constant}, {{1}}};
test_case.model_ref.main_op = {CREATE_BINARY_FACTORY(Squeeze)};
test_case.model_ref.preprocess_outputs_of_main = {{set_transpose_for}, {{0}}};
test_case.model_ref.model_template = transpose_sinking::common::create_model;
return wrapper(test_case);
};
INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonSqueezeForward, TransposeSinkingTestFixture, test_forward_squeeze());
auto test_forward_unsqueeze = []() {
TestCase test_case;
// Initialize common attributes
test_case.transformation = CREATE_PASS_FACTORY(TransposeSinkingReductionForward);
test_case.num_main_ops = {1};
test_case.inputs_to_main = {
parameter(element::f32, {32, 3, 2, 1}),
constant<int64_t>(element::i32, {2}, {0, 2}),
};
// Test model description:
test_case.model.preprocess_inputs_to_main = {{set_transpose_for}, {{0}}};
test_case.model.main_op = {CREATE_BINARY_FACTORY(Unsqueeze)};
test_case.model.model_template = transpose_sinking::common::create_model;
// Reference model description:
auto new_constant = [](const vector<size_t>& idxs, const OutputVector& out_vec) -> OutputVector {
OutputVector new_out_vec(out_vec.size());
new_out_vec[0] = out_vec[0];
new_out_vec[1] =
make_shared<Constant>(out_vec[1].get_element_type(), out_vec[1].get_shape(), std::vector<int64_t>{0, 2});
return new_out_vec;
};
test_case.model_ref.preprocess_inputs_to_main = {{new_constant}, {{1}}};
test_case.model_ref.main_op = {CREATE_BINARY_FACTORY(Unsqueeze)};
auto new_transpose = [](const vector<size_t>& idxs, const OutputVector& out_vec) -> OutputVector {
OutputVector new_out_vec(out_vec.size());
auto order = make_shared<Constant>(element::i32, Shape{6}, std::vector<int64_t>{0, 5, 2, 4, 3, 1});
new_out_vec[0] = make_shared<Transpose>(out_vec[0], order);
return new_out_vec;
};
test_case.model_ref.preprocess_outputs_of_main = {{new_transpose}, {{0}}};
test_case.model_ref.model_template = transpose_sinking::common::create_model;
return wrapper(test_case);
};
INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonUnsqueezeForward, TransposeSinkingTestFixture, test_forward_unsqueeze());
// ------------------ BACKWARD --------------------
auto test_backward_unary = []() {
@ -923,5 +998,71 @@ INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonInterpolateBackward,
TransposeSinkingTestFixture,
test_backward_interpolate());
auto test_backward_squeeze = []() {
TestCase test_case;
// Initialize common attributes
test_case.transformation = CREATE_PASS_FACTORY(TransposeSinkingReductionBackward);
test_case.num_main_ops = {1};
test_case.inputs_to_main = {
parameter(element::f32, {32, 1, 2, 1}),
constant<int64_t>(element::i32, {2}, {1, 3}),
};
// Test model description:
test_case.model.main_op = {CREATE_BINARY_FACTORY(Squeeze)};
test_case.model.preprocess_outputs_of_main = {{set_transpose_for}, {{0}}};
test_case.model.model_template = transpose_sinking::common::create_model;
// Reference model description:
auto new_transpose = [](const vector<size_t>& idxs, const OutputVector& out_vec) -> OutputVector {
OutputVector new_out_vec(out_vec.size());
auto order = make_shared<Constant>(element::i32, Shape{4}, std::vector<int64_t>{2, 1, 0, 3});
new_out_vec[0] = make_shared<Transpose>(out_vec[0], order);
new_out_vec[1] = out_vec[1];
return new_out_vec;
};
test_case.model_ref.preprocess_inputs_to_main = {{new_transpose}, {{0}}};
test_case.model_ref.main_op = {CREATE_BINARY_FACTORY(Squeeze)};
test_case.model_ref.model_template = transpose_sinking::common::create_model;
return wrapper(test_case);
};
INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonSqueezeBackward, TransposeSinkingTestFixture, test_backward_squeeze());
auto test_backward_unsqueeze = []() {
TestCase test_case;
// Initialize common attributes
test_case.transformation = CREATE_PASS_FACTORY(TransposeSinkingReductionBackward);
test_case.num_main_ops = {1};
test_case.inputs_to_main = {
parameter(element::f32, {32, 3, 2, 1}),
constant<int64_t>(element::i32, {2}, {0, 2}),
};
// Test model description:
test_case.model.main_op = {CREATE_BINARY_FACTORY(Unsqueeze)};
test_case.model.preprocess_outputs_of_main = {{set_transpose_for}, {{0}}};
test_case.model.model_template = transpose_sinking::common::create_model;
// Reference model description:
auto new_constant = [](const vector<size_t>& idxs, const OutputVector& out_vec) -> OutputVector {
OutputVector new_out_vec(out_vec.size());
new_out_vec[0] = out_vec[0];
new_out_vec[1] =
make_shared<Constant>(out_vec[1].get_element_type(), out_vec[1].get_shape(), std::vector<int64_t>{5, 3});
return new_out_vec;
};
test_case.model_ref.preprocess_inputs_to_main = {{set_transpose_for, new_constant}, {{0}, {1}}};
test_case.model_ref.main_op = {CREATE_BINARY_FACTORY(Unsqueeze)};
test_case.model_ref.model_template = transpose_sinking::common::create_model;
return wrapper(test_case);
};
INSTANTIATE_TEST_SUITE_P(TransposeSinkingCommonUnsqueezeBackward, TransposeSinkingTestFixture, test_backward_unsqueeze());
} // namespace common
} // namespace transpose_sinking