[LPT] Legacy compliance restrictions removal all: Reshape (#6870)

* [LPT] Reshape: legacy compliance restrictions removal

* [LPT] comment fixes
This commit is contained in:
Edward Shogulin 2021-08-06 08:27:34 +01:00 committed by GitHub
parent fc39303677
commit 43f18da413
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 430 additions and 275 deletions

View File

@ -151,7 +151,7 @@ public:
static bool isQuantizeSupported(const std::shared_ptr<opset1::FakeQuantize>& fakeQuantize); static bool isQuantizeSupported(const std::shared_ptr<opset1::FakeQuantize>& fakeQuantize);
static FakeQuantizeDequantization getDequantization(const std::shared_ptr<Node>& node, const size_t parentIndex = 0ul, const bool inPlace = false); static FakeQuantizeDequantization getDequantization(const std::shared_ptr<const Node>& node, const size_t parentIndex = 0ul, const bool inPlace = false);
static FakeQuantizeDequantization getDequantizationBelow(const std::shared_ptr<Node>& node, const bool convertIsMandatory = false); static FakeQuantizeDequantization getDequantizationBelow(const std::shared_ptr<Node>& node, const bool convertIsMandatory = false);

View File

@ -5,9 +5,7 @@
#include "low_precision/concat.hpp" #include "low_precision/concat.hpp"
#include <algorithm> #include <algorithm>
#include <map>
#include <memory> #include <memory>
#include <string>
#include <utility> #include <utility>
#include <vector> #include <vector>
@ -189,7 +187,6 @@ bool ConcatTransformation::canBeTransformed(const TransformationContext& context
const auto outPShape = concat->get_output_partial_shape(0); const auto outPShape = concat->get_output_partial_shape(0);
const size_t normalizedAxis = ngraph::normalize_axis(concat->get_friendly_name(), axis, outPShape.rank()); const size_t normalizedAxis = ngraph::normalize_axis(concat->get_friendly_name(), axis, outPShape.rank());
// TODO: LPT: to support current flow: #58269
if (normalizedAxis != 1ul) { if (normalizedAxis != 1ul) {
return false; return false;
} }
@ -198,8 +195,6 @@ bool ConcatTransformation::canBeTransformed(const TransformationContext& context
return false; return false;
} }
const bool perTensorQuantizationIsRequired = normalizedAxis != 1ul;
element::Type precision; element::Type precision;
for (size_t i = 0ul; i < concat->get_input_size(); i++) { for (size_t i = 0ul; i < concat->get_input_size(); i++) {
const FakeQuantizeDequantization dequantization = NetworkHelper::getDequantization(concat, i); const FakeQuantizeDequantization dequantization = NetworkHelper::getDequantization(concat, i);
@ -212,12 +207,6 @@ bool ConcatTransformation::canBeTransformed(const TransformationContext& context
} else if (precision != dequantization.data.get_element_type()) { } else if (precision != dequantization.data.get_element_type()) {
return false; return false;
} }
if (perTensorQuantizationIsRequired &&
(((dequantization.subtractConstant != nullptr) && !NetworkHelper::isScalarLike(dequantization.subtractConstant)) ||
((dequantization.multiplyConstant != nullptr) && !NetworkHelper::isScalarLike(dequantization.multiplyConstant)))) {
return false;
}
} }
return true; return true;
} }

View File

@ -1268,7 +1268,7 @@ bool NetworkHelper::isQuantizeSupported(const std::shared_ptr<opset1::FakeQuanti
return QuantizationDetails::outputLayoutIsSupported(fakeQuantize) && QuantizationDetails::isSupportedLevel(fakeQuantize->get_levels()); return QuantizationDetails::outputLayoutIsSupported(fakeQuantize) && QuantizationDetails::isSupportedLevel(fakeQuantize->get_levels());
} }
FakeQuantizeDequantization NetworkHelper::getDequantization(const std::shared_ptr<Node>& node, const size_t parentIndex, const bool inPlace) { FakeQuantizeDequantization NetworkHelper::getDequantization(const std::shared_ptr<const Node>& node, const size_t parentIndex, const bool inPlace) {
auto getDataIndex = [](const std::shared_ptr<ngraph::Node>& node) { auto getDataIndex = [](const std::shared_ptr<ngraph::Node>& node) {
if (is_type<opset1::Constant>(node->get_input_node_ptr(1))) { if (is_type<opset1::Constant>(node->get_input_node_ptr(1))) {
return 0ul; return 0ul;
@ -1285,7 +1285,7 @@ FakeQuantizeDequantization NetworkHelper::getDequantization(const std::shared_pt
return 1ul; return 1ul;
}; };
Output<Node> dataNode = inPlace ? node->output(0) : node->input_value(parentIndex); Output<Node> dataNode = inPlace ? std::const_pointer_cast<Node>(node)->output(0) : node->input_value(parentIndex);
const std::shared_ptr<ngraph::opset1::Multiply> multiply = as_type_ptr<ngraph::opset1::Multiply>(dataNode.get_node_shared_ptr()); const std::shared_ptr<ngraph::opset1::Multiply> multiply = as_type_ptr<ngraph::opset1::Multiply>(dataNode.get_node_shared_ptr());
std::shared_ptr<opset1::Constant> multiplyConstant; std::shared_ptr<opset1::Constant> multiplyConstant;

View File

@ -38,131 +38,80 @@ ReshapeTransformation::ReshapeTransformation(const Params& params) : LayerTransf
} }
void reshapeDequantizationConstant(const std::shared_ptr<opset1::Reshape>& reshape) { void reshapeDequantizationConstant(const std::shared_ptr<opset1::Reshape>& reshape) {
// Reshape dequantization operation Constant.
// 1. Calculate result dequantization Constant shape for broadcast based on original dequantization Constant shape and Reshape output.
// For example: dequantization shape {1, 3, 1, 1}, output Reshape shape {1, 12, 3, 3}, result for broadcast: {1, 3, 4, 1},
// where '4' calculated for temporary broadcast before reshape.
// 2. Broadcast dequantization Constant, if channels are changed
// 3. Reshape and replace
auto replaceConstant = [](const std::shared_ptr<opset1::Reshape>& reshape, const std::shared_ptr<opset1::Constant>& originalConstant) {
// reshape for element-wise constant is not required
auto constantShape = originalConstant->get_shape();
if (shape_size(constantShape) == 1ul) {
if (!constantShape.empty()) {
const auto newConstant = NetworkHelper::toScalar(originalConstant);
replace_node(originalConstant, newConstant);
}
return;
}
auto const reshapeInputRank = reshape->get_input_partial_shape(0).rank();
assert(reshapeInputRank.is_static());
if (constantShape.size() > 1ul) {
while (constantShape.size() < static_cast<size_t>(reshapeInputRank.get_length())) {
constantShape.insert(constantShape.begin(), 1ul);
}
}
const auto reshapeOutputPShape = reshape->output(0).get_partial_shape();
const auto reshapeOutputRank = reshapeOutputPShape.rank();
assert(reshapeOutputRank.is_static());
assert(reshapeOutputRank.get_length() >= 2);
assert(reshapeOutputPShape[1].is_static());
assert(static_cast<size_t>(reshapeOutputPShape[1].get_length()) >= constantShape[1]);
assert(reshapeOutputPShape[1].get_length() % constantShape[1] == 0);
const size_t dimensionsToBroadcast = reshapeOutputPShape[1].get_length() / constantShape[1];
if (dimensionsToBroadcast == 0ul) {
return;
}
Shape newOperationConstantBroadcastedShape = originalConstant->output(0).get_shape();
// add dimensions to broadcast values
if (newOperationConstantBroadcastedShape.size() == 2ul) {
newOperationConstantBroadcastedShape.push_back(dimensionsToBroadcast);
} else {
newOperationConstantBroadcastedShape[2] = dimensionsToBroadcast;
}
const std::shared_ptr<Node> broadcastedConstant = fold<opset1::Broadcast>(
originalConstant,
std::make_shared<opset1::Constant>(
element::i32,
Shape({ newOperationConstantBroadcastedShape.size() }),
newOperationConstantBroadcastedShape));
std::vector<int> newReshapeConstValues(reshapeOutputRank.get_length(), 1ul);
newReshapeConstValues[1] = reshapeOutputPShape[1].get_length();
const std::shared_ptr<opset1::Constant> newReshapeConstant = std::make_shared<opset1::Constant>(
element::i32,
Shape({ newReshapeConstValues.size() }),
newReshapeConstValues);
const std::shared_ptr<Node> resultConstant = fold<opset1::Reshape>(
broadcastedConstant,
newReshapeConstant,
reshape->get_special_zero());
replace_node(originalConstant, resultConstant);
};
const FakeQuantizeDequantization dequantization = NetworkHelper::getDequantization(reshape, 0); const FakeQuantizeDequantization dequantization = NetworkHelper::getDequantization(reshape, 0);
if (dequantization.multiplyConstant->get_shape().size() > 1ul) {
// Reshape Subtract or Multiply operation Constant.
// 1. modify reshape parameters to avoid reshape by spatial dimensions
// 2. broadcast element-wise constant if channels are changed
// 3. reshape element-wise constant with modified reshape parameters
auto replaceConstant = [](const std::shared_ptr<opset1::Reshape>& reshape, const std::shared_ptr<Node>& op) {
const size_t constantIndex = as_type<ngraph::opset1::Constant>(op->get_input_node_ptr(1)) ? 1 : 0;
const auto originalConstant = as_type_ptr<opset1::Constant>(op->get_input_node_shared_ptr(constantIndex));
const auto constantShape = originalConstant->get_shape();
// reshape for element-wise constant is not required if (dequantization.subtract != nullptr) {
if (shape_size(constantShape) == 1ul) { replaceConstant(reshape, dequantization.subtractConstant);
if (constantShape.size() > 1ul) { }
const Shape newConstShape = Shape(reshape->get_output_partial_shape(0).rank().get_length(), 1ul);
const auto newConstant = opset1::Constant::create(
originalConstant->get_element_type(), newConstShape, originalConstant->cast_vector<float>());
replace_node(op->get_input_node_shared_ptr(constantIndex), newConstant);
}
return; if (dequantization.multiply != nullptr) {
} replaceConstant(reshape, dequantization.multiplyConstant);
// simple broadcast operation Constant shape to shape on activations
auto newOperationConstantShape = constantShape;
auto const reshapeInputPShape = reshape->get_input_partial_shape(0);
PartialShape newOperationConstantBroadcastedShape(reshapeInputPShape);
newOperationConstantBroadcastedShape[0] = 1ul;
if ((reshapeInputPShape.rank().get_length() - newOperationConstantShape.size()) == 1ul) {
newOperationConstantShape.insert(newOperationConstantShape.begin(), 1ul);
}
const std::shared_ptr<opset1::Constant> newOperationConstant = std::make_shared<opset1::Constant>(
op->input(constantIndex).get_element_type(),
newOperationConstantShape,
originalConstant->cast_vector<float>());
// reshape -1 value handling
auto getOverallValue = [](const Shape& shape, const std::vector<int>& reshapeValues, const bool specialZero) -> size_t {
size_t overallValue = shape_size(shape);
for (size_t i = 0; i < reshapeValues.size(); ++i) {
auto reshapeValue = reshapeValues[i];
if ((reshapeValue == 1ul) || (reshapeValue == -1) || ((reshapeValue == 0ul) && !specialZero)) {
continue;
}
if ((reshapeValue == 0ul) && specialZero) {
reshapeValue = shape[i];
}
overallValue = overallValue / reshapeValue;
}
return overallValue;
};
// modify reshape constant for element-wise constant reshape
// element-wise constant doesn't have spatial dimensions, as result we should remove spatial dimensions from reshape parameters
const std::vector<int> reshapeConstValues = as_type_ptr<opset1::Constant>(reshape->get_input_node_shared_ptr(1))->cast_vector<int>();
size_t overallValue = 0;
for (size_t i = 0; i < reshapeConstValues.size(); ++i) {
if (reshapeConstValues[i] == -1) {
overallValue = getOverallValue(
reshapeInputPShape.to_shape(),
reshapeConstValues,
as_type_ptr<opset1::Reshape>(reshape)->get_special_zero());
break;
}
}
std::vector<int> newReshapeConstValues(reshapeConstValues);
for (int i = static_cast<int>(newReshapeConstValues.size() - 1); i >= 0; --i) {
if (static_cast<int64_t>(newOperationConstantShape.size()) <= i) {
// new dimension was added
newReshapeConstValues[i] = 1;
} else if (newOperationConstantShape[i] == 1ul) {
// keep the same
newReshapeConstValues[i] = 1;
} else if (newReshapeConstValues[i] == -1) {
// modified reshape parameters are different, but value instead '-1' has to be equal as original reshape
newReshapeConstValues[i] = overallValue;
}
}
const std::shared_ptr<opset1::Constant> newReshapeConstant = std::make_shared<opset1::Constant>(
reshape->input(1).get_element_type(),
Shape({ newReshapeConstValues.size() }),
newReshapeConstValues);
// if channels are different then broadcast spatial dimensions to reshape channels correctly
// limitation which has to be covered by canBeTransformed:
// 1. spatial dimensions have to be absent or equal to 1 after reshape
// 2. only second dimension can be changed
const bool shouldBroadcast = (shape_size(newReshapeConstValues) != 1ul) && (reshapeConstValues[1] != 0) &&
(((reshapeConstValues[1] != -1) &&
(static_cast<int64_t>(newOperationConstantShape[1]) != reshapeConstValues[1])) ||
((reshapeConstValues[1] == -1) &&
(newOperationConstantShape[1] != overallValue)));
const std::shared_ptr<Node> broadcastedConstant = shouldBroadcast ?
fold<opset1::Broadcast>(
newOperationConstant,
std::make_shared<opset1::Constant>(
element::i32,
Shape({static_cast<size_t>(newOperationConstantBroadcastedShape.rank().get_length())}),
// TODO: investigate behaviour
newOperationConstantBroadcastedShape.to_shape())) :
newOperationConstant;
const std::shared_ptr<Node> resultConstant = fold<opset1::Reshape>(
broadcastedConstant,
newReshapeConstant,
reshape->get_special_zero());
replace_node(op->get_input_node_shared_ptr(constantIndex), resultConstant);
};
if (dequantization.subtract != nullptr) {
replaceConstant(reshape, dequantization.subtract);
}
if (dequantization.multiply != nullptr) {
replaceConstant(reshape, dequantization.multiply);
}
} }
} }
@ -186,7 +135,7 @@ bool ReshapeTransformation::isPrecisionPreserved(std::shared_ptr<Node> op) const
return true; return true;
} }
size_t getLastNotBroadcastedChannel(const Shape& shape) { size_t getLastNotBroadcastedDimension(const Shape& shape) {
for (int i = static_cast<int>(shape.size()) - 1; i >= 0; --i) { for (int i = static_cast<int>(shape.size()) - 1; i >= 0; --i) {
if (shape[i] != 1ul) { if (shape[i] != 1ul) {
return i; return i;
@ -195,7 +144,7 @@ size_t getLastNotBroadcastedChannel(const Shape& shape) {
return 0; return 0;
} }
size_t getFirstChangedChannel(const PartialShape& shape1, const PartialShape& shape2) { size_t getFirstChangedDimension(const PartialShape& shape1, const PartialShape& shape2) {
const size_t minSize = std::min(shape1.rank().get_length(), shape2.rank().get_length()); const size_t minSize = std::min(shape1.rank().get_length(), shape2.rank().get_length());
size_t i = 0; size_t i = 0;
for (; i < minSize; ++i) { for (; i < minSize; ++i) {
@ -216,11 +165,15 @@ bool ReshapeTransformation::canBeTransformed(const TransformationContext& contex
return false; return false;
} }
// TODO: LPT: to support current flow: #58269 if (((dequantization.subtract == nullptr) || NetworkHelper::isScalarLike(dequantization.subtractConstant)) &&
//if (((dequantization.subtractConstant != nullptr) && NetworkHelper::isScalarLike(dequantization.subtractConstant)) || ((dequantization.multiply == nullptr) || NetworkHelper::isScalarLike(dequantization.multiplyConstant))) {
// ((dequantization.multiplyConstant != nullptr) && NetworkHelper::isScalarLike(dequantization.multiplyConstant))) { return true;
// return true; }
//}
const PartialShape outputPShape = op->get_output_partial_shape(0);
if (outputPShape[1].is_dynamic()) {
return false;
}
const Shape subtractShape = dequantization.subtract == nullptr ? Shape{} : dequantization.subtractConstant->get_shape(); const Shape subtractShape = dequantization.subtract == nullptr ? Shape{} : dequantization.subtractConstant->get_shape();
Shape subtractShapeWithBatch = subtractShape; Shape subtractShapeWithBatch = subtractShape;
@ -245,26 +198,23 @@ bool ReshapeTransformation::canBeTransformed(const TransformationContext& contex
multiplyShapeWithBatch.insert(multiplyShapeWithBatch.begin(), 1ul); multiplyShapeWithBatch.insert(multiplyShapeWithBatch.begin(), 1ul);
} }
const PartialShape outputPShape = op->get_output_partial_shape(0); const size_t outputChannel = static_cast<size_t>(outputPShape[1].get_length());
// if we have per-channel dq, dynamic shape, and "-1" reshape value - don't transform if (!subtractShapeWithBatch.empty() && (outputChannel < subtractShapeWithBatch[1])) {
if (outputPShape.is_dynamic() && (shape_size(subtractShape) > 1ul || shape_size(multiplyShape) > 1ul)) { return false;
const auto reshapeConstant = as_type_ptr<opset1::Constant>(op->get_input_node_shared_ptr(1))->cast_vector<int>(); }
if (std::any_of(reshapeConstant.cbegin(), reshapeConstant.cend(), [](const int value) { return value == -1; })) { if (!multiplyShapeWithBatch.empty() && (outputChannel < multiplyShapeWithBatch[1])) {
return false; return false;
} }
if (outputPShape.is_static() &&
((!subtractShapeWithBatch.empty() && ((outputChannel % subtractShapeWithBatch[1]) != 0)) ||
(!multiplyShapeWithBatch.empty() && (outputChannel % multiplyShapeWithBatch[1] != 0)))) {
return false;
} }
return canBeTransformed(subtractShapeWithBatch, multiplyShapeWithBatch, inputPShape, outputPShape); return canBeTransformed(subtractShapeWithBatch, multiplyShapeWithBatch, inputPShape, outputPShape);
} }
size_t getChannelVolume(const PartialShape& shape) {
size_t volume = 1ul;
for (int i = 2; i < shape.rank().get_length(); ++i) {
volume = volume * shape[i].get_length();
}
return volume;
}
bool ReshapeTransformation::canBeTransformed( bool ReshapeTransformation::canBeTransformed(
const ngraph::Shape& subtractShape, const ngraph::Shape& subtractShape,
const ngraph::Shape& multiplyShape, const ngraph::Shape& multiplyShape,
@ -277,68 +227,15 @@ bool ReshapeTransformation::canBeTransformed(
return false; return false;
} }
// TODO: story 38439 const size_t lastNotBroadcastedDimension = std::max(getLastNotBroadcastedDimension(subtractShape), getLastNotBroadcastedDimension(multiplyShape));
if ((inputRank == 4ul) && (outputRank == 2ul)) { const size_t firstChangedDimension = getFirstChangedDimension(inputShape, outputShape);
auto checkSpatialDimensions = [](const Shape& dequantizationConstShape) { // LPT supports channel on the second dimension natively <= reshape transformation supports more shapes for this case
for (size_t i = (dequantizationConstShape.size() - 2); i < dequantizationConstShape.size(); ++i) { if ((lastNotBroadcastedDimension == 1ul) && (firstChangedDimension == 1ul)) {
if (dequantizationConstShape[i] != 1ul) { return true;
return false; }
}
}
return true;
};
if (((subtractShape.size() >= 3ul) && (!checkSpatialDimensions(subtractShape))) || if (lastNotBroadcastedDimension >= firstChangedDimension) {
((multiplyShape.size() >= 3ul) && (!checkSpatialDimensions(multiplyShape)))) { return false;
return false;
}
if (inputRank > 1ul) {
if (inputShape[1].is_dynamic()) {
return false;
}
} else {
if (inputShape[0].is_dynamic()) {
return false;
}
}
if (outputRank > 1ul) {
if (outputShape[1].is_dynamic()) {
return false;
}
} else {
if (outputShape[0].is_dynamic()) {
return false;
}
}
// custom validation for Layout::NCHW => Layout::NC
const size_t inputChannelsCount = inputRank > 1ul ? inputShape[1].get_length() : inputShape[0].get_length();
const size_t outputChannelsCount = outputRank > 1ul ? outputShape[1].get_length() : outputShape[0].get_length();
for (size_t i = 2; i < inputRank; ++i) {
if (inputShape[i].is_dynamic()) {
return false;
}
}
if ((inputShape[0] != outputShape[0]) || ((inputChannelsCount * getChannelVolume(inputShape)) != outputChannelsCount)) {
return false;
}
} else {
if (ngraph::shape_size(subtractShape) > 1 || ngraph::shape_size(multiplyShape) > 1) {
for (size_t i = 0; i < 2ul; ++i) {
if (inputShape[i] != outputShape[i]) {
return false;
}
}
}
const size_t lastNotBroadcastedChannel = std::max(getLastNotBroadcastedChannel(subtractShape), getLastNotBroadcastedChannel(multiplyShape));
const size_t firstChangedChannel = getFirstChangedChannel(inputShape, outputShape);
if (lastNotBroadcastedChannel >= firstChangedChannel) {
return false;
}
} }
return true; return true;

View File

@ -7,22 +7,18 @@
#include <sstream> #include <sstream>
#include <memory> #include <memory>
#include <vector> #include <vector>
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include <transformations/utils/utils.hpp> #include <low_precision/rt_info/precision_preserved_attribute.hpp>
#include <transformations/init_node_info.hpp> #include <low_precision/rt_info/intervals_alignment_attribute.hpp>
#include <low_precision/rt_info/quantization_alignment_attribute.hpp>
#include <low_precision/low_precision.hpp>
#include <low_precision/common/operation_precision_restriction.hpp>
#include <low_precision/common/operation_per_tensor_quantization_restriction.hpp>
#include <low_precision/concat.hpp> #include <low_precision/concat.hpp>
#include <low_precision/fake_quantize_decomposition.hpp> #include <low_precision/fake_quantize_decomposition.hpp>
#include <low_precision/rt_info/precision_preserved_attribute.hpp>
#include <low_precision/align_quantization_parameters.hpp>
#include <low_precision/fuse_subtract_to_fake_quantize.hpp> #include <low_precision/fuse_subtract_to_fake_quantize.hpp>
#include <low_precision/fuse_multiply_to_fake_quantize.hpp> #include <low_precision/fuse_multiply_to_fake_quantize.hpp>
#include <low_precision/markup_can_be_quantized.hpp>
#include <low_precision/markup_per_tensor_quantization.hpp>
#include "common_test_utils/ngraph_test_utils.hpp" #include "common_test_utils/ngraph_test_utils.hpp"
#include "lpt_ngraph_functions/concat_function.hpp" #include "lpt_ngraph_functions/concat_function.hpp"

View File

@ -140,7 +140,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
{{ngraph::element::f32}, {}, {0.1f}} {{ngraph::element::f32}, {}, {0.1f}}
} }
}, },
// U8: 3D -> 4D: dynamic rank // U8: 3D -> 4D: dynamic rank: per tensor quantization
{ {
PartialShape::dynamic(), PartialShape::dynamic(),
{ 0, 384, 16, 64 }, { 0, 384, 16, 64 },
@ -151,7 +151,39 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
}, },
{ {
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {0.1f}}, {},
ngraph::element::u8,
{{ngraph::element::f32}, {}, {0.1f}}
}
},
// U8: 3D -> 4D: dynamic rank: per tensor quantization
{
PartialShape::dynamic(),
{ 0, 384, 16, 64 },
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {128}, {0.1f}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{{ngraph::element::f32}, {128}, {0.1f}}
}
},
// U8: 3D -> 4D: dynamic rank
{
PartialShape::dynamic(),
{ 0, 3, 16, 64 },
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, element::f32, {1, 3, 1, 1}}}
},
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, element::f32, {1, 3, 1, 1}}},
ngraph::element::f32, ngraph::element::f32,
{} {}
} }
@ -340,8 +372,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
{} {}
} }
}, },
// U8: no subtract 2D -> 4D: channels are affected: per tensor quantization // U8: no subtract 4D -> 2D: channels are affected: per tensor quantization
// TODO: story 38439
{ {
{ 1, 16, 384, 384 }, { 1, 16, 384, 384 },
{ 6144, -1 }, { 6144, -1 },
@ -352,12 +383,12 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
}, },
{ {
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {0.1f}}, {},
ngraph::element::f32, ngraph::element::u8,
{} {{ngraph::element::f32}, {}, {0.1f}}
} }
}, },
// U8: no subtract 2D -> 4D: channels are affected: per channel quantization // U8: no subtract 4D -> 2D: channels are affected: per channel quantization
{ {
{ 1, 3, 4, 5 }, { 1, 3, 4, 5 },
{ 12, -1 }, { 12, -1 },
@ -437,8 +468,83 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
{} {}
} }
}, },
// U8: no subtract 4D -> 5D: channels are not affected: no subtract
{
{ 1, 3, 4, 5 },
{ 1, 3, 20, 1, 1},
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {1, 3, 1, 1}}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {1, 3, 1, 1, 1}}},
}
},
// U8: no subtract 4D -> 5D: channels are affected: no subtract
{
{ 1, 3, 2, 3 },
{ 1, 18, 1, 1, 1},
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {1, 3, 1, 1}}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{
{ngraph::element::f32},
{},
{
{0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.2f, 0.2f, 0.2f, 0.2f, 0.2f, 0.2f, 0.3f, 0.3f, 0.3f, 0.3f, 0.3f, 0.3f},
ngraph::element::f32,
{1, 18, 1, 1, 1}
}
},
}
},
// U8: no subtract 4D -> 5D: channels are affected: no subtract
{
{ 1, 3, 4, 5 },
{ 1, 12, 1, 1, 5},
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}},
}
},
// U8: no subtract 4D -> 5D: channels are affected: no subtract
{
{ 1, 3, 4, 5 },
{ 1, 12, 1, 1, 5},
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {1, 3, 1, 1}}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{
{ngraph::element::f32},
{},
{{0.1f, 0.1f, 0.1f, 0.1f, 0.2f, 0.2f, 0.2f, 0.2f, 0.3f, 0.3f, 0.3f, 0.3f}, ngraph::element::f32, {1, 12, 1, 1, 1}}
}
}
},
// U8: no subtract 4D -> 2D: channels are not affected: per tensor quantization // U8: no subtract 4D -> 2D: channels are not affected: per tensor quantization
// TODO: story 38439
{ {
{ 1, 3, 4, 5 }, { 1, 3, 4, 5 },
{ 0, -1 }, { 0, -1 },
@ -454,7 +560,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
{{ngraph::element::f32}, {{128.f}, ngraph::element::f32, {}}, {{0.1f}, ngraph::element::f32, {}}} {{ngraph::element::f32}, {{128.f}, ngraph::element::f32, {}}, {{0.1f}, ngraph::element::f32, {}}}
} }
}, },
// U8: no subtract 4D -> 2D: channels are not affected: per tensor quantization // U8: no subtract 4D -> 2D: channels are affected: per channel quantization
{ {
{ 1, 3, 2, 2 }, { 1, 3, 2, 2 },
{ 0, -1 }, { 0, -1 },
@ -474,6 +580,26 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
} }
} }
}, },
// U8: no subtract 4D -> 2D: channels are affected: per channel quantization
{
{ 1, 3, 2, 2 },
{ 0, -1 },
LayerTransformation::createParamsU8I8(),
{
ngraph::element::u8,
{{ngraph::element::f32}, {{0.f, 128.f, 255.f}, ngraph::element::f32, {3, 1, 1}}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {3, 1, 1}}}
},
{
ngraph::element::u8,
{},
ngraph::element::u8,
{
{ngraph::element::f32},
{{0.f, 0.f, 0.f, 0.f, 128.f, 128.f, 128.f, 128.f, 255.f, 255.f, 255.f, 255.f}, ngraph::element::f32, {1, 12}},
{{0.1f, 0.1f, 0.1f, 0.1f, 0.2f, 0.2f, 0.2f, 0.2f, 0.3f, 0.3f, 0.3f, 0.3f}, ngraph::element::f32, {1, 12}}
}
}
},
// U8: 4D -> 2D: per channel dq and dynamic batch // U8: 4D -> 2D: per channel dq and dynamic batch
{ {
{ Dimension::dynamic(), 3, 2, 2 }, { Dimension::dynamic(), 3, 2, 2 },
@ -485,9 +611,13 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
}, },
{ {
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {{0.f, 128.f, 255.f}, ngraph::element::f32, {1, 3, 1, 1}}, {{0.1f, 0.2f, 0.3f}, ngraph::element::f32, {1, 3, 1, 1}}}, {},
ngraph::element::f32, ngraph::element::u8,
{} {
{ngraph::element::f32},
{{0.f, 0.f, 0.f, 0.f, 128.f, 128.f, 128.f, 128.f, 255.f, 255.f, 255.f, 255.f}, ngraph::element::f32, {1, 12}},
{{0.1f, 0.1f, 0.1f, 0.1f, 0.2f, 0.2f, 0.2f, 0.2f, 0.3f, 0.3f, 0.3f, 0.3f}, ngraph::element::f32, {1, 12}}
}
} }
}, },
// U8: no subtract 4D -> 2D: channels are not affected: per tensor quantization // U8: no subtract 4D -> 2D: channels are not affected: per tensor quantization
@ -603,7 +733,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
ngraph::element::u8, ngraph::element::u8,
{{}, {}, {}}, {{}, {}, {}},
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {1ul}}} {{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}}
} }
}, },
// U8: no subtract 4D -> 2D // U8: no subtract 4D -> 2D
@ -619,7 +749,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
ngraph::element::u8, ngraph::element::u8,
{{}, {}, {}}, {{}, {}, {}},
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {1, 1}}} {{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}}
} }
}, },
// U8: no subtract 4D -> 2D: channels are not affected // U8: no subtract 4D -> 2D: channels are not affected
@ -635,7 +765,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
ngraph::element::u8, ngraph::element::u8,
{{}, {}, {}}, {{}, {}, {}},
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {1, 1}}} {{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}}
} }
}, },
// U8: no subtract 4D -> 2D: channels are not affected, dynamic batch // U8: no subtract 4D -> 2D: channels are not affected, dynamic batch
@ -651,7 +781,7 @@ const std::vector<ReshapeTransformationTestValues> testValues = {
ngraph::element::u8, ngraph::element::u8,
{{}, {}, {}}, {{}, {}, {}},
ngraph::element::u8, ngraph::element::u8,
{{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {1, 1}}} {{ngraph::element::f32}, {}, {{0.1f}, ngraph::element::f32, {}}}
} }
}, },
// U8: no subtract 4D -> 4D: channels are affected // U8: no subtract 4D -> 4D: channels are affected

View File

@ -11,14 +11,12 @@ using namespace LayerTestsDefinitions;
namespace { namespace {
const std::vector<ngraph::element::Type> netPrecisions = { const std::vector<ngraph::element::Type> netPrecisions = {
ngraph::element::f32 ngraph::element::f32,
// ngraph::element::f16 ngraph::element::f16
}; };
const std::vector<ngraph::pass::low_precision::LayerTransformation::Params> trasformationParamValues = { const std::vector<ngraph::pass::low_precision::LayerTransformation::Params> trasformationParamValues = {
LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams(), LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams()
// LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams().setUpdatePrecisions(false),
// LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParamsU8I8()
}; };
const std::vector<ReshapeTransformationParam> params = { const std::vector<ReshapeTransformationParam> params = {
@ -27,29 +25,87 @@ const std::vector<ReshapeTransformationParam> params = {
{ 1, 3, 32 }, { 1, 3, 32 },
{ 1, 3, 4, 8 }, { 1, 3, 4, 8 },
{ 256ul, ngraph::Shape{ 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
},
// 3D -> 1D
{
{ 1, 3, 32 },
{ -1 },
{ 256ul, ngraph::Shape{}, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
"Reshape",
"FP32"
}, },
// 4D -> 3D // 4D -> 3D
{ {
{ 1, 3, 16, 16 }, { 1, 3, 16, 16 },
{ 1, 3, 256 }, { 1, 3, 256 },
{ 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
}, },
// 4D -> 3D // 4D -> 3D
{ {
{ 1, 3, 16, 16 }, { 1, 3, 16, 16 },
{ 0, 3, -1 }, { 0, 3, -1 },
{ 256ul, ngraph::Shape{ 1, 3, 1, 1 }, { 0.f }, { 255.f }, { 0.f, 0.f, 0.f }, { 255.f, 25.5f, 2.55f } }, { 256ul, ngraph::Shape{ 1, 3, 1, 1 }, { 0.f }, { 255.f }, { 0.f, 0.f, 0.f }, { 255.f, 25.5f, 2.55f } },
true "Reshape",
"U8"
}, },
// 4D -> 2D // 4D -> 2D
{ {
{ 1, 3, 4, 8 }, { 1, 3, 4, 8 },
{ 1, -1 }, { 1, -1 },
{ 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
}, },
// 4D -> 2D
{
{ 1, 3, 4, 8 },
{ 1, -1 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
},
"Reshape",
"U8"
},
// 4D -> 3D
{
{ 1, 3, 4, 8 },
{ 1, 3, -1 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
},
"Reshape",
"U8"
},
// per-channel
// 4D -> 3D
{
{ 1, 3, 4, 8 },
{ 1, -1, 8 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f/2.f, 255.f/3.f },
},
"Reshape",
"U8"
}
}; };
INSTANTIATE_TEST_SUITE_P(smoke_LPT, ReshapeTransformation, INSTANTIATE_TEST_SUITE_P(smoke_LPT, ReshapeTransformation,

View File

@ -19,16 +19,16 @@ const std::vector<ngraph::element::Type> precisions = {
std::vector<MatMulWithConstantTransformationTestValues> testValues = { std::vector<MatMulWithConstantTransformationTestValues> testValues = {
{ {
{ 2, 3, 4 }, { 2, 3, 4 },
{ 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f}, {255.f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 255.f} }, { 256ul, {{1, 3, 1}, {1, 3, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 2.55f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 2.55f} },
{ std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } }, { std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } },
{ 256ul, {{1}, {1}, {2, 1}, {2, 1}}, {-128.f}, {127.f}, {-128.f, -12.8f}, {127.f, 12.7f} }, { 256ul, {{2, 1}, {2, 1}, {2, 1}, {2, 1}}, {-128.f, -12.8f}, {127.f, 12.7f}, {-128.f, -12.8f}, {127.f, 12.7f} },
{ {}, {}, {} }, { {}, {}, {} },
"FullyConnected", "FullyConnected",
"FP32" "FP32"
}, },
{ {
{ 2, 3, 4 }, { 2, 3, 4 },
{ 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f}, {255.f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 255.f} }, { 256ul, {{1, 3, 1}, {1, 3, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 2.f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 2.f} },
{ std::vector<float>(4 * 2, 2.f), ngraph::element::i8, ngraph::Shape{ 2, 4 } }, { std::vector<float>(4 * 2, 2.f), ngraph::element::i8, ngraph::Shape{ 2, 4 } },
{}, {},
{ ngraph::element::f32, {}, {0.1f} }, { ngraph::element::f32, {}, {0.1f} },
@ -39,23 +39,23 @@ std::vector<MatMulWithConstantTransformationTestValues> testValues = {
{ 1, 3, 4 }, { 1, 3, 4 },
{ 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}}, {-10.5f}, {4.5f}, {-10.5f}, {4.5f} }, { 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}}, {-10.5f}, {4.5f}, {-10.5f}, {4.5f} },
{ std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } }, { std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } },
{ 256ul, {{1}, {1}, {2, 1}, {2, 1}}, {-128.f}, {127.f}, {-128.f, -12.8f}, {127.f, 12.7f} }, { 256ul, {{2, 1}, {2, 1}, {2, 1}, {2, 1}}, {-128.f, -12.8f}, {127.f, 12.7f}, {-128.f, -12.8f}, {127.f, 12.7f} },
{ {}, {}, {} }, { {}, {}, {} },
"FullyConnected", "FullyConnected",
"FP32" "FP32"
}, },
{ {
{ 1, 1, 3, 4 }, { 1, 1, 3, 4 },
{ 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f}, {255.f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 255.f} }, { 256ul, {{1, 3, 1}, {1, 3, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f, 0.f, 0.f}, {25.f, 24.f, 25.f}, {0.f, 0.f, 0.f}, {25.f, 24.f, 25.f} },
{ std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } }, { std::vector<float>(4 * 2, 2.f), ngraph::element::f32, ngraph::Shape{ 2, 4 } },
{ 256ul, {{1}, {1}, {2, 1}, {2, 1}}, {-128.f}, {127.f}, {-128.f, -12.8f}, {127.f, 12.7f} }, { 256ul, {{2, 1}, {2, 1}, {2, 1}, {2, 1}}, {-128.f, -12.8f}, {127.f, 12.7f}, {-128.f, -12.8f}, {127.f, 12.7f} },
{ {}, {}, {} }, { {}, {}, {} },
"FullyConnected", "FullyConnected",
"U8" "U8"
}, },
{ {
{ 1, 1, 3, 4 }, { 1, 1, 3, 4 },
{ 256ul, {{1, 1, 1}, {1, 1, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f}, {255.f}, {0.f, 0.f, 0.f}, {255.f, 25.5f, 255.f} }, { 256ul, {{1, 3, 1}, {1, 3, 1}, {1, 3, 1}, {1, 3, 1}}, {0.f, 0.f, 0.f}, {25.f, 24.f, 25.f}, {0.f, 0.f, 0.f}, {25.f, 24.f, 25.f} },
{ std::vector<float>(4 * 2, 2.f), ngraph::element::i8, ngraph::Shape{ 2, 4 } }, { std::vector<float>(4 * 2, 2.f), ngraph::element::i8, ngraph::Shape{ 2, 4 } },
{}, {},
{ ngraph::element::f32, {}, {{0.1f, 0.01}, ngraph::element::f32, ngraph::Shape{ 2, 1 }} }, { ngraph::element::f32, {}, {{0.1f, 0.01}, ngraph::element::f32, ngraph::Shape{ 2, 1 }} },
@ -73,7 +73,7 @@ std::vector<MatMulWithConstantTransformationTestValues> testValues = {
}, },
{ {
{ 2, 3 }, { 2, 3 },
{ 256ul, {{1}, {1}, {2, 1}, {2, 1}}, {-10.f}, {5.f}, {-10.f, -5.f}, {5.f, 5.f} }, { 256ul, {{2, 1}, {2, 1}, {2, 1}, {2, 1}}, {-10.f, -5.f}, {5.f, 5.f}, {-10.f, -5.f}, {5.f, 5.f} },
{ std::vector<float>{1, 2, 3, 4, 5, 6}, ngraph::element::f32, ngraph::Shape{ 2, 3 } }, { std::vector<float>{1, 2, 3, 4, 5, 6}, ngraph::element::f32, ngraph::Shape{ 2, 3 } },
{ 256ul, {{1}, {1}, {1}, {1}}, {-128.f}, {127.f}, {-12.8f}, {12.7f} }, { 256ul, {{1}, {1}, {1}, {1}}, {-128.f}, {127.f}, {-12.8f}, {12.7f} },
{ {}, {}, {} }, { {}, {}, {} },
@ -82,7 +82,7 @@ std::vector<MatMulWithConstantTransformationTestValues> testValues = {
}, },
{ {
{ 2, 3 }, { 2, 3 },
{ 256ul, {{1}, {1}, {2, 1}, {2, 1}}, {-10.f}, {5.f}, {-10.f, -5.f}, {5.f, 5.f} }, { 256ul, {{2, 1}, {2, 1}, {2, 1}, {2, 1}}, {-10.f, -5.f}, {5.f, 5.f}, {-10.f, -5.f}, {5.f, 5.f} },
{ std::vector<float>{1, 2, 3, 4, 5, 6}, ngraph::element::i8, ngraph::Shape{ 2, 3 } }, { std::vector<float>{1, 2, 3, 4, 5, 6}, ngraph::element::i8, ngraph::Shape{ 2, 3 } },
{}, {},
{ ngraph::element::f32, {}, {0.1f} }, { ngraph::element::f32, {}, {0.1f} },

View File

@ -17,8 +17,6 @@ const std::vector<ngraph::element::Type> netPrecisions = {
const std::vector<ngraph::pass::low_precision::LayerTransformation::Params> trasformationParamValues = { const std::vector<ngraph::pass::low_precision::LayerTransformation::Params> trasformationParamValues = {
LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams(), LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams(),
// LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParams().setUpdatePrecisions(false),
// LayerTestsUtils::LayerTransformationParamsNGraphFactory::createParamsU8I8()
}; };
const std::vector<ReshapeTransformationParam> params = { const std::vector<ReshapeTransformationParam> params = {
@ -27,29 +25,87 @@ const std::vector<ReshapeTransformationParam> params = {
{ 1, 3, 32 }, { 1, 3, 32 },
{ 1, 3, 4, 8 }, { 1, 3, 4, 8 },
{ 256ul, ngraph::Shape{ 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
},
// 3D -> 1D
{
{ 1, 3, 32 },
{ -1 },
{ 256ul, ngraph::Shape{}, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
"Reshape",
"FP32"
}, },
// 4D -> 3D // 4D -> 3D
{ {
{ 1, 3, 16, 16 }, { 1, 3, 16, 16 },
{ 1, 3, 256 }, { 1, 3, 256 },
{ 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
}, },
// 4D -> 3D // 4D -> 3D
{ {
{ 1, 3, 16, 16 }, { 1, 3, 16, 16 },
{ 0, 3, -1 }, { 0, 3, -1 },
{ 256ul, ngraph::Shape{ 1, 3, 1, 1 }, { 0.f }, { 255.f }, { 0.f, 0.f, 0.f }, { 255.f, 25.5f, 2.55f } }, { 256ul, ngraph::Shape{ 1, 3, 1, 1 }, { 0.f }, { 255.f }, { 0.f, 0.f, 0.f }, { 255.f, 25.5f, 2.55f } },
true "Reshape",
"U8"
}, },
// 4D -> 2D // 4D -> 2D
{ {
{ 1, 3, 4, 8 }, { 1, 3, 4, 8 },
{ 1, -1 }, { 1, -1 },
{ 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } }, { 256ul, ngraph::Shape{ 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
true "Reshape",
"U8"
}, },
// 4D -> 2D
{
{ 1, 3, 4, 8 },
{ 1, -1 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
},
"Reshape",
"U8"
},
// 4D -> 3D
{
{ 1, 3, 4, 8 },
{ 1, 3, -1 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
},
"Reshape",
"U8"
},
// per-channel
// 4D -> 3D
{
{ 1, 3, 4, 8 },
{ 1, -1, 8 },
{
256ul,
ngraph::Shape{ 1, 3, 1, 1 },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
{ 0.f, 0.f, 0.f },
{ 255.f, 255.f / 2.f, 255.f / 3.f },
},
"Reshape",
"U8"
}
}; };
INSTANTIATE_TEST_SUITE_P(smoke_LPT, ReshapeTransformation, INSTANTIATE_TEST_SUITE_P(smoke_LPT, ReshapeTransformation,

View File

@ -17,7 +17,8 @@ public:
ngraph::PartialShape inputShape; ngraph::PartialShape inputShape;
std::vector<int> reshapeConstValues; std::vector<int> reshapeConstValues;
ngraph::builder::subgraph::FakeQuantizeOnData fakeQuantize; ngraph::builder::subgraph::FakeQuantizeOnData fakeQuantize;
bool isTransformed; std::string layerType;
std::string expectedKernelType;
}; };
typedef std::tuple< typedef std::tuple<
@ -35,6 +36,7 @@ public:
protected: protected:
void SetUp() override; void SetUp() override;
void Run() override;
}; };
} // namespace LayerTestsDefinitions } // namespace LayerTestsDefinitions

View File

@ -6,15 +6,11 @@
#include <memory> #include <memory>
#include <tuple> #include <tuple>
#include <vector>
#include <string>
#include <ie_core.hpp> #include <ie_core.hpp>
#include "ngraph_functions/builders.hpp"
#include <transformations/init_node_info.hpp> #include <transformations/init_node_info.hpp>
#include "lpt_ngraph_functions/reshape_function.hpp" #include "lpt_ngraph_functions/reshape_function.hpp"
namespace LayerTestsDefinitions { namespace LayerTestsDefinitions {
std::string ReshapeTransformation::getTestCaseName(testing::TestParamInfo<ReshapeTransformationParams> obj) { std::string ReshapeTransformation::getTestCaseName(testing::TestParamInfo<ReshapeTransformationParams> obj) {
@ -50,6 +46,18 @@ void ReshapeTransformation::SetUp() {
param.fakeQuantize); param.fakeQuantize);
} }
void ReshapeTransformation::Run() {
LayerTestsCommon::Run();
const auto params = std::get<3>(GetParam());
auto actualPrecision = getRuntimePrecisionByType(params.layerType);
const auto expectedPrecision = params.expectedKernelType;
if ((expectedPrecision == "FP32") && (actualPrecision == "FP16")) {
actualPrecision = "FP32";
}
EXPECT_EQ(actualPrecision, expectedPrecision);
}
TEST_P(ReshapeTransformation, CompareWithRefImpl) { TEST_P(ReshapeTransformation, CompareWithRefImpl) {
Run(); Run();
}; };

View File

@ -83,6 +83,10 @@ public:
std::string getRuntimePrecision(const std::string& layerName); std::string getRuntimePrecision(const std::string& layerName);
std::string getRuntimePrecisionByType(const std::string& layerType); std::string getRuntimePrecisionByType(const std::string& layerType);
#ifndef NDEBUG
void showRuntimePrecisions();
#endif
template<class T_IE, class T_NGRAPH> template<class T_IE, class T_NGRAPH>
static void Compare(const T_NGRAPH *expected, const T_IE *actual, std::size_t size, float threshold) { static void Compare(const T_NGRAPH *expected, const T_IE *actual, std::size_t size, float threshold) {
for (std::size_t i = 0; i < size; ++i) { for (std::size_t i = 0; i < size; ++i) {

View File

@ -474,6 +474,24 @@ std::string LayerTestsCommon::getRuntimePrecisionByType(const std::string& layer
return ""; return "";
} }
#ifndef NDEBUG
void LayerTestsCommon::showRuntimePrecisions() {
const auto execGraph = executableNetwork.GetExecGraphInfo();
const auto function = execGraph.getFunction();
for (const auto& op : function->get_ops()) {
const auto& rtInfo = op->get_rt_info();
const auto& typeIt = rtInfo.find("layerType");
const auto type = ngraph::as_type_ptr<ngraph::VariantWrapper<std::string>>(typeIt->second)->get();
const auto& it = rtInfo.find("runtimePrecision");
const auto rtPrecisionPtr = ngraph::as_type_ptr<ngraph::VariantWrapper<std::string>>(it->second);
std::cout << type << ": " << rtPrecisionPtr->get() << std::endl;
}
}
#endif
void LayerTestsCommon::SetRefMode(RefMode mode) { void LayerTestsCommon::SetRefMode(RefMode mode) {
refMode = mode; refMode = mode;
} }

View File

@ -152,7 +152,6 @@ TEST(LPT_ReshapeTransformation, canBeTransformed_4D_to_2D_perSpacial_TRUE) {
ngraph::Shape({ 1, 9216 }))); ngraph::Shape({ 1, 9216 })));
} }
// TODO: story 38439
TEST(LPT_ReshapeTransformation, canBeTransformed_5D_to_5D_perBatch) { TEST(LPT_ReshapeTransformation, canBeTransformed_5D_to_5D_perBatch) {
ASSERT_FALSE(ngraph::pass::low_precision::ReshapeTransformation::canBeTransformed( ASSERT_FALSE(ngraph::pass::low_precision::ReshapeTransformation::canBeTransformed(
ngraph::Shape({ 1, 16, 1, 1, 1 }), ngraph::Shape({ 1, 16, 1, 1, 1 }),