[CPU] MatMul dummy shapes creation fixed (#9858)

This commit is contained in:
Maxim Andronov 2022-01-28 21:21:02 +03:00 committed by GitHub
parent 97a78d0059
commit b5ea943267
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 207 additions and 8 deletions

View File

@ -72,13 +72,15 @@ public:
*/
static InferenceEngine::TensorDesc convertToTensorDesc(const MemoryDesc& desc);
static constexpr Dim DEFAULT_DUMMY_VAL = 64;
/**
* @brief Makes a dummy descriptor where all undefined values are replaced with the smallest value between the parameter and the upper bound dim
* @param desc MemoryDesc from which the new descriptor is generated
* @param dummyVal Dim value to replace undefined dimensions
* @return a new MemoryDesc with dummy values instead of undefined dims
*/
static std::shared_ptr<MemoryDesc> makeDummyDesc(const MemoryDesc& desc, Dim dummyVal = 64);
static std::shared_ptr<MemoryDesc> makeDummyDesc(const MemoryDesc& desc, Dim dummyVal = DEFAULT_DUMMY_VAL);
/**
* @brief Makes a static dummy shape where all undefined values are replaced with the smallest value between the parameter and the upper bound dim
@ -86,7 +88,7 @@ public:
* @param dummyVal Dim value to replace undefined dimensions
* @return a new Shape with dummy values instead of undefined dims
*/
static Shape makeDummyShape(const Shape& shape, Dim dummyVal = 64);
static Shape makeDummyShape(const Shape& shape, Dim dummyVal = DEFAULT_DUMMY_VAL);
/**
* @brief Converts dim to string, undefined dim represented as ?

View File

@ -717,7 +717,9 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
const auto shape = inputNodeItr->second->get_output_partial_shape(0);
const bool isDynamic = shape.is_dynamic();
if (!shape.compatible(ov::PartialShape(data->getTensorDesc().getDims()))) {
IE_THROW() << "Can't SetBlob with name: " << name << ", because model input and blob are incompatible";
IE_THROW() << "Can't SetBlob with name: " << name
<< ", because model input (shape=" << shape
<< ") and blob (shape=" << vec2str(data->getTensorDesc().getDims()) << ") are incompatible";
}
if (!isDynamic && ngraph::shape_size(shape.to_shape()) != data->size()) {

View File

@ -291,9 +291,10 @@ void MKLDNNMatMulNode::getSupportedDescriptors() {
}
}
std::vector<Shape> staticInputShapes(2);
staticInputShapes[0] = inputShape0.isStatic() ? inputShape0 : MemoryDescUtils::makeDummyShape(inputShape0);
staticInputShapes[1] = inputShape1.isStatic() ? inputShape1 : MemoryDescUtils::makeDummyShape(inputShape1);
std::vector<Shape> staticInputShapes{inputShape0, inputShape1};
if (inputShape0.isDynamic() || inputShape1.isDynamic()) {
std::tie(staticInputShapes[0], staticInputShapes[1]) = makeDummyInputShapes(inputShape0, inputShape1);
}
auto staticOutputShape = outputShape.isStatic() ? outputShape : Shape(shapeInferGeneric(staticInputShapes).front());
@ -307,6 +308,80 @@ void MKLDNNMatMulNode::getSupportedDescriptors() {
createDescriptor({inDataDesc[0], inDataDesc[1]}, {outDataDesc});
}
std::pair<Shape, Shape> MKLDNNMatMulNode::makeDummyInputShapes(const Shape& in0, const Shape& in1) const {
if (in0.getRank() < 2 || in1.getRank() < 2) {
IE_THROW() << "Can't create dummy inputs with rank less 2";
}
if (in0.getRank() != in1.getRank()) {
IE_THROW() << "Can't create dummy inputs if input's rank not equal";
}
auto swapTranspDims = [&](VectorDims& in0, VectorDims& in1) {
if (transposeIn[0]) {
std::swap(in0[in0.size() - 1], in0[in0.size() - 2]);
}
if (transposeIn[1]) {
std::swap(in1[in1.size() - 1], in1[in1.size() - 2]);
}
};
auto inDims0 = in0.getDims();
auto inDims1 = in1.getDims();
auto minDims0 = in0.getMinDims();
auto maxDims0 = in0.getMaxDims();
auto minDims1 = in1.getMinDims();
auto maxDims1 = in1.getMaxDims();
swapTranspDims(inDims0, inDims1);
swapTranspDims(minDims0, minDims1);
swapTranspDims(maxDims0, maxDims1);
auto fillDummy = [&](size_t idx0, size_t idx1) {
if (inDims0[idx0] == Shape::UNDEFINED_DIM && inDims1[idx1] == Shape::UNDEFINED_DIM) {
inDims0[idx0] = inDims1[idx1] = std::min(std::min(maxDims0[idx0], maxDims1[idx1]),
std::max(std::max(minDims0[idx0], minDims1[idx1]), static_cast<Dim>(MemoryDescUtils::DEFAULT_DUMMY_VAL)));
} else {
if (inDims0[idx0] == Shape::UNDEFINED_DIM && inDims1[idx1] != Shape::UNDEFINED_DIM) {
if (inDims1[idx1] == 1 && minDims0[idx0] != Shape::UNDEFINED_DIM) {
inDims0[idx0] = std::max<Dim>(minDims0[idx0], 1);
} else {
inDims0[idx0] = inDims1[idx1];
}
} else if (inDims0[idx0] != Shape::UNDEFINED_DIM && inDims1[idx1] == Shape::UNDEFINED_DIM) {
if (inDims0[idx0] == 1 && minDims1[idx1] != Shape::UNDEFINED_DIM) {
inDims1[idx1] = std::max<Dim>(minDims1[idx1], 1);
} else {
inDims1[idx1] = inDims0[idx0];
}
}
}
};
// fill k
fillDummy(inDims0.size() - 1, inDims1.size() - 2);
// fill m, n
if (inDims0[inDims0.size() - 2] == Shape::UNDEFINED_DIM) {
inDims0[inDims0.size() - 2] = std::min(maxDims0[inDims0.size() - 2],
std::max(minDims0[inDims0.size() - 2], static_cast<Dim>(MemoryDescUtils::DEFAULT_DUMMY_VAL)));
}
if (inDims1[inDims1.size() - 1] == Shape::UNDEFINED_DIM) {
inDims1[inDims1.size() - 1] = std::min(maxDims1[inDims1.size() - 1],
std::max(minDims1[inDims1.size() - 1], static_cast<Dim>(MemoryDescUtils::DEFAULT_DUMMY_VAL)));
}
// fill batches
for (size_t i = 0; i < inDims0.size() - 2; i++) {
fillDummy(i, i);
}
swapTranspDims(inDims0, inDims1);
return {Shape(inDims0), Shape(inDims1)};
}
void MKLDNNMatMulNode::createDescriptor(const std::vector<MemoryDescPtr>& inputDesc,
const std::vector<MemoryDescPtr>& outputDesc) {
std::shared_ptr<mkldnn::matmul::desc> matmul_desc;

View File

@ -47,6 +47,7 @@ protected:
private:
mkldnn::memory::desc getBiasDescFrom(const DnnlMemoryDescCPtr outMemDesc);
std::pair<Shape, Shape> makeDummyInputShapes(const Shape& in0, const Shape& in1) const;
bool withBiases;

View File

@ -748,11 +748,116 @@ const std::vector<ShapeRelatedParams> IS_Dynamic = {
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{{1, 15}, {1, 15}, {1, 15}}, {{10, 10, 10}, {5, 5, 5}}}, // input 0
{{{1, 15}, {1, 15}, {1, 15}}, {{10, 10, 10}, {5, 5, 5}}} // input 1
{{ -1, 16 }, {{ 4, 16 }, { 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, 12, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{false, false}
},
};
const std::vector<ShapeRelatedParams> IS_Dynamic_nightly = {
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{{5, 15}, {1, 12}, {4, 15}}, {{10, 10, 10}, {5, 5, 5}}}, // input 0
{{{1, 13}, {3, 15}, {1, 10}}, {{10, 10, 10}, {5, 5, 5}}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ {2, 10}, {3, 15}, -1, 16 }, {{ 2, 12, 4, 16 }, { 3, 12, 2, 16 }}}, // input 0
{{ 1, 1, -1, 4 }, {{ 1, 1, 16, 4 }, { 1, 1, 16, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ 1, 1, -1, 16 }, {{ 1, 1, 4, 16 }, { 1, 1, 2, 16 }}}, // input 0
{{ {2, 5}, {3, 15}, -1, 4 }, {{ 2, 12, 16, 4 }, { 2, 12, 16, 4 }}} // input 1
},
{false, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, 16 }, {{ 4, 16 }, { 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{false, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, {2, 15}, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ -1, 4 }, {{ 16, 4 }, { 16, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, {1, 15}, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ -1, 4 }, {{ 16, 4 }, { 16, 4 }}} // input 1
},
{false, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ {1, 3}, {1, 9}, {1, 5}, {1, 10} }, {{ 1, 7, 4, 5 }, { 1, 7, 4, 4 }}}, // input 0
{{ {1, 5}, {1, 7}, {1, 8}, {1, 5} }, {{ 1, 7, 5, 4 }, { 1, 7, 4, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ {1, 3}, {1, 9}, {1, 5}, {1, 10} }, {{ 1, 7, 4, 5 }, { 1, 7, 4, 4 }}}, // input 0
{{ {1, 5}, {1, 7}, {1, 8}, {1, 5} }, {{ 1, 7, 5, 4 }, { 1, 7, 4, 4 }}} // input 1
},
{false, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ 1, 7, 4, -1 }, {{ 1, 7, 4, 5 }, { 1, 7, 4, 4 }}}, // input 0
{{ 1, 7, -1, 4 }, {{ 1, 7, 5, 4 }, { 1, 7, 4, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ 1, 7, 4, -1 }, {{ 1, 7, 4, 5 }, { 1, 7, 4, 4 }}}, // input 0
{{ 1, 7, -1, 4 }, {{ 1, 7, 5, 4 }, { 1, 7, 4, 4 }}} // input 1
},
{false, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, 12, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{true, true}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, 12, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{true, false}
},
{
{ //dynamic case description each pair per each input has {{dynamic shape}, {{static shape case1}, {static shape case2}, ...}
{{ -1, 12, -1, 16 }, {{ 1, 12, 4, 16 }, { 2, 12, 2, 16 }}}, // input 0
{{ {1, 5}, 12, -1, 4 }, {{ 1, 12, 16, 4 }, { 1, 12, 16, 4 }}} // input 1
},
{false, true}
},
};
std::vector<fusingSpecificParams> matmulFusingParams {
@ -801,6 +906,20 @@ const auto testParamsDynamic = ::testing::Combine(matMulParamsDynamic,
INSTANTIATE_TEST_SUITE_P(smoke_MM_Dynamic, MatMulLayerCPUTest, testParamsDynamic, MatMulLayerCPUTest::getTestCaseName);
const auto matMulParamsDynamic_nightly = ::testing::Combine(::testing::ValuesIn(IS_Dynamic_nightly),
::testing::ValuesIn(netPRCs),
::testing::Values(ElementType::undefined),
::testing::Values(ElementType::undefined),
::testing::Values(helpers::InputLayerType::PARAMETER),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::ValuesIn(additionalConfig));
const auto testParamsDynamic_nightly = ::testing::Combine(matMulParamsDynamic_nightly,
::testing::Values(MatMulNodeType::MatMul),
::testing::Values(emptyFusingSpec),
::testing::ValuesIn(filterSpecificParams()));
INSTANTIATE_TEST_SUITE_P(nightly_MM_Dynamic, MatMulLayerCPUTest, testParamsDynamic_nightly, MatMulLayerCPUTest::getTestCaseName);
const std::vector<ShapeRelatedParams> IS_Dynamic_Fusing = {
{