[CPU] Generic JIT Eltwise implementation (#1464)

This commit is contained in:
Gorokhov Dmitriy
2020-10-28 09:16:28 +03:00
committed by GitHub
parent e3ed796b2e
commit abb8817cf6
54 changed files with 4855 additions and 5096 deletions

View File

@@ -261,655 +261,6 @@ std::string select_op(eltwise_test_params::opType op) {
return str_op;
}
class MKLDNNGraphEltwise3InputsTests: public TestsCommon,
public WithParamInterface<eltwise_test_params> {
std::string model_t = R"V0G0N(
<net name="EltwiseOnly" version="3" precision="FP32" batch="1">
<layers>
<layer name="in1" type="Input" precision="FP32" id="1">
<output>
<port id="1">__SRC_DIMS_1__
</port>
</output>
</layer>
<layer name="in2" type="Input" precision="FP32" id="2">
<output>
<port id="2">__SRC_DIMS_2__
</port>
</output>
</layer>
<layer name="in3" type="Input" precision="FP32" id="3">
<output>
<port id="3">__SRC_DIMS_3__
</port>
</output>
</layer>
<layer name="con" id="4" type="Eltwise" precision="FP32">
<data operation="_OP_" _COEFF_/>
<input>
<port id="1">__SRC_DIMS_1__
</port>
<port id="2">__SRC_DIMS_2__
</port>
<port id="3">__SRC_DIMS_3__
</port>
</input>
<output>
<port id="4">__SRC_DIMS__
</port>
</output>
</layer>
</layers>
<edges>
<edge from-layer="1" from-port="1" to-layer="4" to-port="1"/>
<edge from-layer="2" from-port="2" to-layer="4" to-port="2"/>
<edge from-layer="3" from-port="3" to-layer="4" to-port="3"/>
</edges>
</net>
)V0G0N";
protected:
std::string getModel(eltwise_test_params p) {
std::string model = model_t;
std::string op = select_op(p.op);
std::string src_dims1;
for (auto &dim : p.dims1) {
src_dims1 += "\n <dim>";
src_dims1 += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS_1__", src_dims1);
std::string src_dims2;
for (auto &dim : p.dims2) {
src_dims2 += "\n <dim>";
src_dims2 += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS_2__", src_dims2);
std::string src_dims3;
for (auto &dim : p.dims3) {
src_dims3 += "\n <dim>";
src_dims3 += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS_3__", src_dims3);
std::string src_dims;
std::vector<size_t> dims = p.dims1;
for (int i = 0; i < dims.size(); i++) {
dims[i] = std::max(p.dims1[i], p.dims2[i]);
dims[i] = std::max(dims[i], p.dims3[i]);
}
for (auto &dim : dims) {
src_dims += "\n <dim>";
src_dims += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS__", src_dims);
std::string scale;
if (!p.scales.empty()) {
scale = std::string("coeff=\"") + to_string_c_locale(p.scales) + std::string("\"");
}
REPLACE_WITH_STR(model, "_OP_", op);
REPLACE_WITH_STR(model, "_COEFF_", scale);
return model;
}
virtual void TearDown() {
}
virtual void SetUp() {
try {
TestsCommon::SetUp();
eltwise_test_params p = ::testing::WithParamInterface<eltwise_test_params>::GetParam();
std::string model = getModel(p);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
MKLDNNGraphTestClass graph;
graph.CreateGraph(network);
auto& nodes = graph.getNodes();
for (int i = 0; i < nodes.size(); i++) {
if (nodes[i]->getType() == MKLDNNPlugin::Eltwise) {
ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
}
ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
}
}
InferenceEngine::SizeVector dims_src1 = p.dims1;
InferenceEngine::Layout layout1 = InferenceEngine::ANY;
switch (p.dims1.size()) {
case 4:
layout1 = InferenceEngine::NCHW;
break;
case 5:
layout1 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::SizeVector dims_src2 = p.dims2;
InferenceEngine::Layout layout2 = InferenceEngine::ANY;
switch (p.dims2.size()) {
case 4:
layout2 = InferenceEngine::NCHW;
break;
case 5:
layout2 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::SizeVector dims_src3 = p.dims3;
InferenceEngine::Layout layout3 = InferenceEngine::ANY;
switch (p.dims3.size()) {
case 4:
layout3 = InferenceEngine::NCHW;
break;
case 5:
layout3 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src1, layout1});
src1->allocate();
InferenceEngine::TBlob<float>* srcPtr1 = dynamic_cast<InferenceEngine::TBlob<float>*>(src1.get());
if (srcPtr1 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
CommonTestUtils::fill_data_sine(src1->buffer(), src1->size(), 0.1, 0.9, 1);
InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src2, layout2});
src2->allocate();
InferenceEngine::TBlob<float>* srcPtr2 = dynamic_cast<InferenceEngine::TBlob<float>*>(src2.get());
if (srcPtr2 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
CommonTestUtils::fill_data_sine(src2->buffer(), src2->size(), 0.1, 0.9, 2);
InferenceEngine::Blob::Ptr src3 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src3, layout3});
src3->allocate();
InferenceEngine::TBlob<float>* srcPtr3 = dynamic_cast<InferenceEngine::TBlob<float>*>(src3.get());
if (srcPtr3 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
CommonTestUtils::fill_data_sine(src3->buffer(), src3->size(), 0.1, 0.9, 3);
InferenceEngine::BlobMap srcs;
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in3", src3));
InferenceEngine::OutputsDataMap out;
out = network.getOutputsInfo();
InferenceEngine::BlobMap outputBlobs;
std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
InferenceEngine::TBlob<float>::Ptr output;
output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
output->allocate();
outputBlobs[item.first] = output;
graph.Infer(srcs, outputBlobs);
InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
dst_ref.allocate();
std::vector<InferenceEngine::TBlob<float>> src_vec = {*srcPtr1, *srcPtr2, *srcPtr3};
ref_eltwise(src_vec, dst_ref, p);
compare(*output, dst_ref, 0.0005f);
} catch (const InferenceEngine::details::InferenceEngineException &e) {
FAIL() << e.what();
}
}
};
TEST_P(MKLDNNGraphEltwise3InputsTests, TestsEltwise) {}
INSTANTIATE_TEST_CASE_P(
TestsEltwise, MKLDNNGraphEltwise3InputsTests,
::testing::Values(
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.0,1.0,1.0", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.5,0.5,-2.0", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Prod, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Max, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 32, 16, 16, 16},{1, 32, 16, 16, 16},{1, 32, 16, 16, 16}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::ref, impl.getImplementationType());
ASSERT_EQ(3, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(1).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().inConfs.at(2).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCDHW, impl.getConfig().outConfs.at(0).desc.getLayout());
}
} },
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Min, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sub, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Div, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_AND, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_OR, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_XOR, "", 3, MKLDNNPlugin::impl_desc_type::ref}
));
class MKLDNNGraphEltwise2InputsTests: public TestsCommon,
public WithParamInterface<eltwise_test_params> {
std::string model_t = R"V0G0N(
<net name="EltwiseOnly" version="2" precision="FP32">
<layers>
<layer name="in1" type="Input" precision="FP32" id="1">
<output>
<port id="1">__SRC_DIMS_1__
</port>
</output>
</layer>
<layer name="in2" type="Input" precision="FP32" id="2">
<output>
<port id="2">__SRC_DIMS_2__
</port>
</output>
</layer>
<layer name="con" id="3" type="Eltwise" precision="FP32">
<data operation="_OP_" _COEFF_/>
<input>
<port id="1">__SRC_DIMS_1__
</port>
<port id="2">__SRC_DIMS_2__
</port>
</input>
<output>
<port id="3">__SRC_DIMS__
</port>
</output>
</layer>
</layers>
<edges>
<edge from-layer="1" from-port="1" to-layer="3" to-port="1"/>
<edge from-layer="2" from-port="2" to-layer="3" to-port="2"/>
</edges>
</net>
)V0G0N";
protected:
std::string getModel(eltwise_test_params p) {
std::string model = model_t;
std::string op = select_op(p.op);
std::string src_dims1 = "";
for (auto &dim : p.dims1) {
src_dims1 += "\n <dim>";
src_dims1 += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS_1__", src_dims1);
std::string src_dims2 = "";
for (auto &dim : p.dims2) {
src_dims2 += "\n <dim>";
src_dims2 += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS_2__", src_dims2);
std::string src_dims;
std::vector<size_t> dims = (p.dims1.size() >= p.dims2.size()) ? p.dims1 : p.dims2;
int i = dims.size() - 1, j = p.dims1.size() - 1, k = p.dims2.size() - 1;
for (; j >= 0 && k >= 0; i--, j--, k-- ) {
dims[i] = std::max(p.dims1[j], p.dims2[k]);
}
for (auto &dim : dims) {
src_dims += "\n <dim>";
src_dims += std::to_string(dim) + "</dim>";
}
REPLACE_WITH_STR(model, "__SRC_DIMS__", src_dims);
std::string scale;
if (!p.scales.empty()) {
scale = std::string("coeff=\"") + to_string_c_locale(p.scales) + std::string("\"");
}
REPLACE_WITH_STR(model, "_OP_", op);
REPLACE_WITH_STR(model, "_COEFF_", scale);
return model;
}
virtual void TearDown() {
}
virtual void SetUp() {
try {
TestsCommon::SetUp();
eltwise_test_params p = ::testing::WithParamInterface<eltwise_test_params>::GetParam();
std::string model = getModel(p);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
MKLDNNGraphTestClass graph;
graph.CreateGraph(network);
auto& nodes = graph.getNodes();
for (int i = 0; i < nodes.size(); i++) {
if (nodes[i]->getType() == MKLDNNPlugin::Eltwise) {
ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
}
ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
}
}
InferenceEngine::SizeVector dims_src1 = p.dims1;
InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src1, InferenceEngine::TensorDesc::getLayoutByDims(p.dims1) });
src1->allocate();
InferenceEngine::TBlob<float>* srcPtr1 = dynamic_cast<InferenceEngine::TBlob<float>*>(src1.get());
if (srcPtr1 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
CommonTestUtils::fill_data_sine(src1->buffer(), src1->size(), 0.1, 0.9, 1);
InferenceEngine::SizeVector dims_src2 = p.dims2;
InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src2, InferenceEngine::TensorDesc::getLayoutByDims(p.dims2) });
src2->allocate();
InferenceEngine::TBlob<float>* srcPtr2 = dynamic_cast<InferenceEngine::TBlob<float>*>(src2.get());
if (srcPtr2 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
CommonTestUtils::fill_data_sine(src2->buffer(), src2->size(), 0.1, 0.9, 2);
InferenceEngine::BlobMap srcs;
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
InferenceEngine::OutputsDataMap out;
out = network.getOutputsInfo();
InferenceEngine::BlobMap outputBlobs;
std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
InferenceEngine::TBlob<float>::Ptr output;
output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
output->allocate();
outputBlobs[item.first] = output;
graph.Infer(srcs, outputBlobs);
InferenceEngine::TBlob<float> dst_ref(item.second->getTensorDesc());
dst_ref.allocate();
std::vector<InferenceEngine::TBlob<float>> src_vec = {*srcPtr1, *srcPtr2};
ref_eltwise(src_vec, dst_ref, p);
compare(*output, dst_ref, 0.0005f);
} catch (const InferenceEngine::details::InferenceEngineException &e) {
FAIL() << e.what();
}
}
};
TEST_P(MKLDNNGraphEltwise2InputsTests, TestsEltwise) {}
INSTANTIATE_TEST_CASE_P(
TestsEltwise, MKLDNNGraphEltwise2InputsTests,
::testing::Values(
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Prod, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Max, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Min, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sub, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Div, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Squared_diff, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Logical_AND, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Logical_OR, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Logical_XOR, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Less, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Less_equal, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Greater, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Greater_equal, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Equal, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Not_equal, "", 3, MKLDNNPlugin::impl_desc_type::ref}
));
INSTANTIATE_TEST_CASE_P(
TestsBroadcasting, MKLDNNGraphEltwise2InputsTests,
::testing::Values(
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Prod, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Max, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Min, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Sub, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Div, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Squared_diff, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Logical_AND, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Logical_OR, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 1, 3},{1, 1, 3, 3},{}, eltwise_test_params::opType::Logical_XOR, "", 1, MKLDNNPlugin::impl_desc_type::ref},
// batch broadcasting
eltwise_test_params{{1, 3, 224},{224, 3, 1},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{2, 3, 1, 2},{1, 3, 2, 1},{}, eltwise_test_params::opType::Sub, "", 1, MKLDNNPlugin::impl_desc_type::ref}
));
INSTANTIATE_TEST_CASE_P(
TestsDiffDims, MKLDNNGraphEltwise2InputsTests,
::testing::Values(
eltwise_test_params{{},{1, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3},{},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3},{3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{},{1, 3, 3},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3},{},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3},{3},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3},{1, 3, 3},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3},{1, 3},{}, eltwise_test_params::opType::Sum, "", 2, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{},{1, 3, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3, 3},{},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3},{1, 3, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3, 3},{1, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3},{1, 3, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3, 3},{1, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3, 3},{1, 3, 3, 3},{}, eltwise_test_params::opType::Sum, "", 1, MKLDNNPlugin::impl_desc_type::ref}
));
class MKLDNNGraphEltwiseDynBatchTests: public MKLDNNGraphEltwise3InputsTests {
protected:
virtual void SetUp() {
try {
TestsCommon::SetUp();
eltwise_test_params p = ::testing::WithParamInterface<eltwise_test_params>::GetParam();
std::string model = getModel(p);
size_t MB = p.dims1[0];
if (MB < 2)
MB = 2;
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
MKLDNNGraphTestClass graph;
graph.setProperty({{InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES}});
graph.CreateGraph(network);
InferenceEngine::SizeVector dims_src1 = p.dims1;
InferenceEngine::Layout layout1 = InferenceEngine::ANY;
switch (p.dims1.size()) {
case 4:
layout1 = InferenceEngine::NCHW;
break;
case 5:
layout1 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::SizeVector dims_src2 = p.dims2;
InferenceEngine::Layout layout2 = InferenceEngine::ANY;
switch (p.dims2.size()) {
case 4:
layout2 = InferenceEngine::NCHW;
break;
case 5:
layout2 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::SizeVector dims_src3 = p.dims3;
InferenceEngine::Layout layout3 = InferenceEngine::ANY;
switch (p.dims3.size()) {
case 4:
layout3 = InferenceEngine::NCHW;
break;
case 5:
layout3 = InferenceEngine::NCDHW;
break;
}
InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src1, layout1});
src1->allocate();
InferenceEngine::TBlob<float>* srcPtr1 = dynamic_cast<InferenceEngine::TBlob<float>*>(src1.get());
if (srcPtr1 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
fill_data(src1->buffer(), src1->size());
InferenceEngine::Blob::Ptr src2 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src2, layout2});
src2->allocate();
InferenceEngine::TBlob<float>* srcPtr2 = dynamic_cast<InferenceEngine::TBlob<float>*>(src2.get());
if (srcPtr2 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
fill_data(src2->buffer(), src2->size());
InferenceEngine::Blob::Ptr src3 = InferenceEngine::make_shared_blob<float>({InferenceEngine::Precision::FP32, dims_src3, layout3});
src3->allocate();
InferenceEngine::TBlob<float>* srcPtr3 = dynamic_cast<InferenceEngine::TBlob<float>*>(src3.get());
if (srcPtr3 == nullptr)
FAIL() << "Cannot cast blob to TBlob<float>.";
fill_data(src3->buffer(), src3->size());
InferenceEngine::BlobMap srcs;
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in1", src1));
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in2", src2));
srcs.insert(std::pair<std::string, InferenceEngine::Blob::Ptr>("in3", src3));
InferenceEngine::OutputsDataMap out;
out = network.getOutputsInfo();
InferenceEngine::BlobMap outputBlobs;
std::pair<std::string, InferenceEngine::DataPtr> item = *out.begin();
InferenceEngine::TBlob<float>::Ptr output;
output = InferenceEngine::make_shared_blob<float>(item.second->getTensorDesc());
output->allocate();
outputBlobs[item.first] = output;
auto checkDepthwise = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
return node->getType() == MKLDNNPlugin::Eltwise;
};
graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkDepthwise);
graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkDepthwise);
} catch (const InferenceEngine::details::InferenceEngineException &e) {
FAIL() << e.what();
}
}
};
TEST_P(MKLDNNGraphEltwiseDynBatchTests, TestsDynBatchEltwise) {}
// TODO: rewrite to ngraph to have reshape functionality
INSTANTIATE_TEST_CASE_P(
DISABLED_TestsDynBatchEltwise, MKLDNNGraphEltwiseDynBatchTests,
::testing::Values(
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.0,1.0,1.0", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sum, "1.5,0.5,-2.0", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Prod, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Max, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Sub, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Min, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Div, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Pow, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_AND, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_OR, "", 3, MKLDNNPlugin::impl_desc_type::ref},
eltwise_test_params{{1, 3, 3, 3},{1, 3, 3, 3},{1, 3, 3, 3}, eltwise_test_params::opType::Logical_XOR, "", 3, MKLDNNPlugin::impl_desc_type::ref}
));
struct precisions_test_2params {
struct {
std::string precision0;
@@ -1022,7 +373,7 @@ INSTANTIATE_TEST_CASE_P(
TestsEltwise2Precisions, MKLDNNGraphEltwise2PrecisionsTests,
::testing::Values(
precisions_test_2params{ {"FP32", "FP32"}, 4, 0 },
precisions_test_2params{ { "U8", "FP32"}, 5, 1 },
precisions_test_2params{ {"FP32", "U8"}, 5, 1 },
precisions_test_2params{ { "U8", "U8"}, 6, 2 }
precisions_test_2params{ { "U8", "FP32"}, 4, 0 },
precisions_test_2params{ {"FP32", "U8"}, 4, 0 },
precisions_test_2params{ { "U8", "U8"}, 4, 0 }
));

View File

@@ -116,13 +116,12 @@ protected:
graph.CreateGraph(network);
auto& nodes = graph.getNodes();
for (int i = 0; i < nodes.size(); i++) {
if (nodes[i]->getType() == MKLDNNPlugin::Power) {
if (nodes[i]->getType() == MKLDNNPlugin::Eltwise) {
ASSERT_EQ(p.num_prim_desc, nodes[i]->getSupportedPrimitiveDescriptors().size());
for (size_t j = 0; j < p.num_prim_desc && j < p.comp.size(); j++) {
p.comp.at(j)(nodes[i]->getSupportedPrimitiveDescriptors().at(j));
}
ASSERT_NE(nullptr, nodes[i]->getSelectedPrimitiveDescriptor());
ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType());
}
}
@@ -174,25 +173,16 @@ INSTANTIATE_TEST_CASE_P(
power_test_params{
{1, 3, 13, 13}, 1, 2, 0.5f, 3, MKLDNNPlugin::impl_desc_type::unknown, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
},
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
},
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
}}},
power_test_params{{1, 1, 23, 23}, 3, 8, 2, 3 },
power_test_params{{1, 8, 23, 23}, 8, 2, 1, 3 },
@@ -306,7 +296,7 @@ protected:
outputBlobs[item.first] = output;
auto checkPower = [](const MKLDNNPlugin::MKLDNNNodePtr& node) {
return node->getType() == MKLDNNPlugin::Power;
return node->getType() == MKLDNNPlugin::Eltwise;
};
graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPower);
graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPower);
@@ -325,25 +315,16 @@ INSTANTIATE_TEST_CASE_P(
power_test_params{
{1, 3, 13, 13}, 1, 2, 0.5f, 3, MKLDNNPlugin::impl_desc_type::unknown, {
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::NCHW, impl.getConfig().outConfs.at(0).desc.getLayout());
},
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
},
[](MKLDNNPlugin::PrimitiveDescInfo impl) {
ASSERT_EQ(MKLDNNPlugin::impl_desc_type::unknown, impl.getImplementationType());
ASSERT_EQ(1, impl.getConfig().inConfs.size());
ASSERT_EQ(1, impl.getConfig().outConfs.size());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().inConfs.at(0).desc.getLayout());
ASSERT_EQ(InferenceEngine::Layout::BLOCKED, impl.getConfig().outConfs.at(0).desc.getLayout());
}}},
power_test_params{{1, 1, 23, 23}, 3, 8, 2, 3 },
power_test_params{{1, 8, 23, 23}, 8, 2, 1, 3 },

View File

@@ -257,14 +257,14 @@ protected:
ASSERT_EQ(nodes.size(), 3);
ASSERT_EQ(nodes[0].get()->getType(), MKLDNNPlugin::Type::Input);
ASSERT_EQ(nodes[1].get()->getType(), MKLDNNPlugin::Type::Convolution);
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Depthwise));
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Eltwise));
ASSERT_EQ(nodes[2].get()->getType(), MKLDNNPlugin::Type::Output);
} else {
ASSERT_EQ(nodes.size(), 5);
ASSERT_EQ(nodes[0].get()->getType(), MKLDNNPlugin::Type::Input);
ASSERT_EQ(nodes[1].get()->getType(), MKLDNNPlugin::Type::Reorder);
ASSERT_EQ(nodes[2].get()->getType(), MKLDNNPlugin::Type::Convolution);
ASSERT_TRUE(nodes[2].get()->isFusedWith(MKLDNNPlugin::Type::Depthwise));
ASSERT_TRUE(nodes[2].get()->isFusedWith(MKLDNNPlugin::Type::Eltwise));
ASSERT_EQ(nodes[3].get()->getType(), MKLDNNPlugin::Type::Reorder);
ASSERT_EQ(nodes[4].get()->getType(), MKLDNNPlugin::Type::Output);
}

View File

@@ -186,10 +186,9 @@ TEST_F(MKLDNNGraphStructureTests, TestNoRedundantReorders) {
for (auto &node : nodes) {
if (node->getType() == MKLDNNPlugin::Reorder) {
reorders_num++;
ASSERT_EQ(MKLDNNPlugin::Output, node->getChildEdgeAt(0)->getChild()->getType());
}
}
ASSERT_EQ(reorders_num, 1);
ASSERT_EQ(reorders_num, 3);
}
TEST_F(MKLDNNGraphStructureTests, TestRedundantReorderBeforeConvWithC_3) {
@@ -3781,7 +3780,7 @@ TEST_F(MKLDNNGraphStructureTests, TestNoRedundantReordersForXceptionTopology) {
weights->allocate();
fill_data((float *) weights->buffer(), weights->size() / sizeof(float));
InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
@@ -4020,7 +4019,7 @@ TEST_F(MKLDNNGraphStructureTests, TestFailedPartPlateRecognitionBarrier0001) {
fill_data((float *) weights->buffer(), weights->size() / sizeof(float));
InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
@@ -4629,7 +4628,7 @@ TEST_F(MKLDNNGraphStructureTests, TestConvolutionDWConvolutionSumFusing) {
memset((float *) weights->buffer(), 0, weights->size());
InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
network = core.ReadNetwork(model, weights_ptr);
@@ -5127,7 +5126,7 @@ TEST_F(MKLDNNGraphStructureTests, TestGemmConvolutionWithConcat) {
weights->allocate();
fill_data((float *) weights->buffer(), weights->size() / sizeof(float));
InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
@@ -5412,7 +5411,7 @@ TEST_F(MKLDNNGraphStructureTests, TestRefPoolingWithConcat) {
weights->allocate();
fill_data((float *) weights->buffer(), weights->size() / sizeof(float));
InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights);
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
@@ -5566,7 +5565,7 @@ TEST_F(MKLDNNGraphStructureTests, TestConvolutionWith2DepthwiseOpFusing) {
ASSERT_EQ(nodes[0].get()->getType(), MKLDNNPlugin::Type::Input);
ASSERT_EQ(nodes[1].get()->getType(), MKLDNNPlugin::Type::Reorder);
ASSERT_EQ(nodes[2].get()->getType(), MKLDNNPlugin::Type::Convolution);
ASSERT_TRUE(nodes[2].get()->isFusedWith(MKLDNNPlugin::Type::Depthwise));
ASSERT_TRUE(nodes[2].get()->isFusedWith(MKLDNNPlugin::Type::Eltwise));
ASSERT_EQ(nodes[3].get()->getType(), MKLDNNPlugin::Type::Reorder);
ASSERT_EQ(nodes[4].get()->getType(), MKLDNNPlugin::Type::Output);
@@ -5704,7 +5703,7 @@ TEST_F(MKLDNNGraphStructureTests, TestConvolutionWith2EltwiseOpFusing) {
ASSERT_EQ(nodes.size(), 4);
ASSERT_EQ(nodes[0].get()->getType(), MKLDNNPlugin::Type::Input);
ASSERT_EQ(nodes[1].get()->getType(), MKLDNNPlugin::Type::Convolution);
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Activation));
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Eltwise));
ASSERT_EQ(nodes[2].get()->getType(), MKLDNNPlugin::Type::Reorder);
ASSERT_EQ(nodes[3].get()->getType(), MKLDNNPlugin::Type::Output);
@@ -5846,7 +5845,7 @@ TEST_F(MKLDNNGraphStructureTests, TestGemmConvolutionWith2DepthwiseOpFusing) {
ASSERT_EQ(nodes.size(), 3);
ASSERT_EQ(nodes[0].get()->getType(), MKLDNNPlugin::Type::Input);
ASSERT_EQ(nodes[1].get()->getType(), MKLDNNPlugin::Type::Convolution);
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Depthwise));
ASSERT_TRUE(nodes[1].get()->isFusedWith(MKLDNNPlugin::Type::Eltwise));
ASSERT_EQ(nodes[2].get()->getType(), MKLDNNPlugin::Type::Output);
InferenceEngine::TensorDesc src_desc(InferenceEngine::Precision::FP32, {1, 8, 300, 600}, InferenceEngine::NCHW);

View File

@@ -27,6 +27,7 @@
#include <nodes/mkldnn_input_node.h>
#include <functional>
#include <cmath>
#include <legacy/details/ie_cnn_network_tools.h>
#define GARB_VAL(x) ((x + 100.0f + sin(x)) / (x + 150.f))
@@ -212,13 +213,66 @@ public:
return graphNodes;
}
void MoveInternalBlobsToConstLayers(InferenceEngine::details::CNNNetworkImpl* netImpl) {
auto createConstInputTo = [&](InferenceEngine::CNNLayerPtr layer, InferenceEngine::Blob::Ptr blob, std::string name) {
InferenceEngine::LayerParams attrs = {layer.get()->name + "_const_" + name, "Const", InferenceEngine::Precision::FP32};
auto constLayer = std::make_shared<InferenceEngine::CNNLayer>(attrs);
constLayer->blobs["custom"] = blob;
std::vector<size_t> constDims(layer->insData[0].lock()->getDims().size(), 1);
if (constDims.size() > 1)
constDims[1] = blob.get()->size();
else
constDims[0] = blob.get()->size();
const InferenceEngine::TensorDesc& td = {InferenceEngine::Precision::FP32, constDims, InferenceEngine::TensorDesc::getLayoutByDims(constDims)};
InferenceEngine::DataPtr newEdgeAfterLayer(new InferenceEngine::Data(constLayer->name, td));
newEdgeAfterLayer->setName(constLayer->name);
getCreatorLayer(newEdgeAfterLayer) = constLayer;
getInputTo(newEdgeAfterLayer).clear();
netImpl->addData(constLayer->name.c_str(), newEdgeAfterLayer);
IE_SUPPRESS_DEPRECATED_START
netImpl->addLayer(constLayer);
IE_SUPPRESS_DEPRECATED_END
constLayer->outData.push_back(newEdgeAfterLayer);
getInputTo(newEdgeAfterLayer)[layer->name] = layer;
layer->insData.push_back(newEdgeAfterLayer);
};
auto all_layers = InferenceEngine::details::CNNNetSortTopologically(*netImpl);
for (auto &layer : all_layers) {
if (layer->type == "ScaleShift" && layer->insData.size() == 1) {
InferenceEngine::Blob::Ptr scalesBlob = layer->blobs["weights"];
if (scalesBlob != nullptr)
createConstInputTo(layer, scalesBlob, "weights");
InferenceEngine::Blob::Ptr shiftBlob = layer->blobs["biases"];
if (shiftBlob != nullptr)
createConstInputTo(layer, shiftBlob, "biases");
} else if (layer->type == "PReLU" && layer->insData.size() == 1) {
InferenceEngine::Blob::Ptr scalesBlob = layer->blobs["weights"];
if (scalesBlob != nullptr)
createConstInputTo(layer, scalesBlob, "weights");
}
}
}
void CreateGraph(InferenceEngine::ICNNNetwork &network, const MKLDNNPlugin::MKLDNNExtensionManager::Ptr& extMgr,
MKLDNNPlugin::MKLDNNWeightsSharing::Ptr cache = {}) {
if (network.getFunction()) {
auto convertedNetwork = std::make_shared<InferenceEngine::details::CNNNetworkImpl>(network);
MoveInternalBlobsToConstLayers(convertedNetwork.get());
MKLDNNGraph::CreateGraph(static_cast<InferenceEngine::ICNNNetwork&>(*convertedNetwork),
extMgr, cache);
extMgr, cache);
} else {
InferenceEngine::details::CNNNetworkImpl* netImpl = dynamic_cast<InferenceEngine::details::CNNNetworkImpl*>(&network);
if (netImpl == nullptr) {
THROW_IE_EXCEPTION << "unexpected network type";
}
MoveInternalBlobsToConstLayers(netImpl);
MKLDNNGraph::CreateGraph(network, extMgr, cache);
}
}
@@ -227,9 +281,15 @@ public:
MKLDNNPlugin::MKLDNNWeightsSharing::Ptr cache;
if (network.getFunction()) {
auto convertedNetwork = std::make_shared<InferenceEngine::details::CNNNetworkImpl>(network);
MoveInternalBlobsToConstLayers(convertedNetwork.get());
MKLDNNGraph::CreateGraph(static_cast<InferenceEngine::ICNNNetwork&>(*convertedNetwork),
extensionManager, cache);
} else {
InferenceEngine::details::CNNNetworkImpl* netImpl = dynamic_cast<InferenceEngine::details::CNNNetworkImpl*>(&network);
if (netImpl == nullptr) {
THROW_IE_EXCEPTION << "unexpected network type";
}
MoveInternalBlobsToConstLayers(netImpl);
MKLDNNGraph::CreateGraph(network, extensionManager, cache);
}
}