[IE CLDNN] Added LogSoftmax-5 operation (#2945)

This commit is contained in:
Roman Lyamin 2020-11-11 08:53:30 +03:00 committed by GitHub
parent 252b99dc01
commit 6b09d5769f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 113 additions and 1 deletions

View File

@ -136,7 +136,8 @@ InferenceEngine::ICNNNetwork::Ptr clDNNEngine::CloneAndTransformNetwork(const In
std::dynamic_pointer_cast<const ::ngraph::opset4::HSwish>(node) ||
std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL1>(node) ||
std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL2>(node) ||
std::dynamic_pointer_cast<const ::ngraph::opset4::SoftPlus>(node);
std::dynamic_pointer_cast<const ::ngraph::opset4::SoftPlus>(node) ||
std::dynamic_pointer_cast<const ::ngraph::opset5::LogSoftmax>(node);
};
auto nGraphFunc = clonedNetwork->getFunction();
// Disable shape inference (WA for generic operations)

View File

@ -761,6 +761,7 @@ Program::LayerType Program::LayerTypeFromStr(const std::string &str) {
{ "Pooling" , Pooling },
{ "FullyConnected" , FullyConnected },
{ "SoftMax" , SoftMax },
{ "LogSoftmax", LogSoftmax },
{ "Power" , Power },
{ "Split" , Split },
{ "VariadicSplit", VariadicSplit },
@ -1442,6 +1443,8 @@ void Program::CreateSingleLayerPrimitive(cldnn::topology& topology, InferenceEng
break;
case SoftMax: CreateSoftMaxPrimitive(topology, layer);
break;
case LogSoftmax: CreateLogSoftmaxPrimitive(topology, layer);
break;
case Power: CreatePowerPrimitive(topology, layer);
break;
case Split: CreateSplitPrimitive(topology, layer);
@ -2922,6 +2925,36 @@ void Program::CreateSoftMaxPrimitive(cldnn::topology& topology, InferenceEngine:
AddPrimitiveToProfiler(softmaxLayerName, layer);
}
void Program::CreateLogSoftmaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer) {
ValidateLayer(layer, 1);
auto inputPrimitives = GetPrevLayersPrimitives(layer);
auto logSoftmaxLayer = as<InferenceEngine::GenericLayer*>(layer);
auto sz = logSoftmaxLayer->input().get()->getTensorDesc().getDims().size();
auto axis = logSoftmaxLayer->GetParamAsInt("axis", 1);
if (axis < 0) axis += sz;
cldnn::softmax::dimension_t softmax_axis;
switch (axis) {
case 0: softmax_axis = cldnn::softmax::normalize_all; break;
case 1: softmax_axis = cldnn::softmax::normalize_f; break;
case 2: softmax_axis = sz > 4 ? cldnn::softmax::normalize_z : cldnn::softmax::normalize_y; break;
case 3: softmax_axis = sz > 4 ? cldnn::softmax::normalize_y : cldnn::softmax::normalize_x; break;
case 4: softmax_axis = cldnn::softmax::normalize_x; break;
default: THROW_CLDNN_EXCEPTION("Unsupported logsoftmax axis " << axis);
}
std::string softmaxLayerName = "softMax";
auto softmaxPrim = cldnn::softmax(softmaxLayerName, inputPrimitives[0], softmax_axis);
topology.add(softmaxPrim);
AddPrimitiveToProfiler(softmaxLayerName, layer);
std::string logSoftmaxLayerName = layer_type_name_ID(layer);
auto logPrim = cldnn::activation(logSoftmaxLayerName, softmaxLayerName, cldnn::activation_func::log);
topology.add(logPrim);
AddPrimitiveToProfiler(logSoftmaxLayerName, layer);
}
void Program::CreateFullyConnectedPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer) {
ValidateLayer(layer, {1, 2, 3});
auto inputPrimitives = GetPrevLayersPrimitives(layer);

View File

@ -138,6 +138,7 @@ public:
Pooling,
FullyConnected,
SoftMax,
LogSoftmax,
Power,
Split,
VariadicSplit,
@ -337,6 +338,7 @@ private:
void CreateFusedSplitConvMergePrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer, bool useGroups = true);
void CreatePowerPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
void CreateSoftMaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
void CreateLogSoftmaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
void CreateFullyConnectedPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
void CreatePoolingPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
void CreateLRNPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);

View File

@ -0,0 +1,76 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include "single_layer_tests/log_softmax.hpp"
#include "common_test_utils/test_constants.hpp"
using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
};
const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
InferenceEngine::SizeVector {1, 100},
InferenceEngine::SizeVector {100, 1},
InferenceEngine::SizeVector {10, 10},
};
const std::vector<int64_t> axis2D = {
-1, 1
};
const auto params2D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::ANY),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_GPU),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_CASE_P(
smoke_LogSoftmax2D,
LogSoftmaxLayerTest,
params2D,
LogSoftmaxLayerTest::getTestCaseName
);
const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
InferenceEngine::SizeVector {1, 100, 1, 1},
InferenceEngine::SizeVector {1, 3, 4, 3},
InferenceEngine::SizeVector {2, 3, 4, 5},
};
const std::vector<int64_t> axis4D = {
-3, -2, -1, 1, 2, 3
};
const auto params4D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::ANY),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_GPU),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_CASE_P(
smoke_LogSoftmax4D,
LogSoftmaxLayerTest,
params4D,
LogSoftmaxLayerTest::getTestCaseName
);
} // namespace