try adopt softmax

This commit is contained in:
Efode, Irina
2021-10-11 21:20:58 +03:00
parent c66d8626e4
commit 4e32737bc2
7 changed files with 62 additions and 55 deletions

View File

@@ -11,12 +11,8 @@ using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
};
const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
const std::vector<ov::element::Type_t> netPrecisions = {
ov::element::Type_t::f32,
};
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputStaticShape2D = {
@@ -38,10 +34,8 @@ const std::vector<size_t> axis2D = {
const auto params2D_static = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputStaticShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@@ -50,10 +44,8 @@ const auto params2D_static = testing::Combine(
const auto params2D_dynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputDynamicShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@@ -90,10 +82,8 @@ const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4Dstatic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputStaticShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@@ -102,10 +92,8 @@ const auto params4Dstatic = testing::Combine(
const auto params4Ddynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
// testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputDynamicShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),

View File

@@ -9,7 +9,7 @@
namespace LayerTestsDefinitions {
TEST_P(SoftMaxLayerTest, CompareWithRefs) {
Run();
run();
}
} // namespace LayerTestsDefinitions

View File

@@ -78,6 +78,9 @@ protected:
virtual void infer();
//
virtual void validate();
void init_input_shapes(const std::pair<std::vector<ov::PartialShape>, std::vector<std::vector<ov::Shape>>>& shapes);
void init_input_shapes(const std::pair<ov::PartialShape, std::vector<ov::Shape>>& shapes);
//
//

View File

@@ -10,26 +10,26 @@
#include <tuple>
#include <vector>
#include "shared_test_classes/base/layer_test_utils.hpp"
#include "shared_test_classes/base/ov_subgraph.hpp"
#include "ngraph_functions/builders.hpp"
#include "ngraph_functions/utils/ngraph_helpers.hpp"
namespace LayerTestsDefinitions {
using softMaxLayerTestParams = std::tuple<
InferenceEngine::Precision, // netPrecision
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Output precision
InferenceEngine::Layout, // Input layout
InferenceEngine::Layout, // Output layout
std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>, // Dynamic shape + Target static shapes
ngraph::element::Type_t, // netPrecision
// ngraph::element::Type, // Input precision
// ngraph::element::Type, // Output precision
// InferenceEngine::Layout, // Input layout
// InferenceEngine::Layout, // Output layout
std::pair<ov::PartialShape, std::vector<ov::Shape>>, // Dynamic shape + Target static shapes
size_t, // axis
std::string, // targetDevice
std::map<std::string, std::string> // config
>;
class SoftMaxLayerTest : public testing::WithParamInterface<softMaxLayerTestParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
virtual public ov::test::SubgraphBaseTest {
public:
static std::string getTestCaseName(const testing::TestParamInfo<softMaxLayerTestParams>& obj);

View File

@@ -37,7 +37,7 @@ void SubgraphBaseTest::run() {
summary.updateOPsStats(function, status);
SKIP_IF_CURRENT_TEST_IS_DISABLED();
OPENVINO_ASSERT(targetStaticShapes.empty(), "Target Static Shape is empty!!!");
OPENVINO_ASSERT(!targetStaticShapes.empty(), "Target Static Shape is empty!!!");
std::string errorMessage;
try {
compile_model();
@@ -300,7 +300,30 @@ void SubgraphBaseTest::resize_ngraph_function(const std::vector<ngraph::Shape>&
shapes.insert({*params[i]->get_output_tensor(0).get_names().begin(), targetInputStaticShapes[i]});
}
function->reshape(shapes);
functionRefs->reshape(shapes);
// functionRefs->reshape(shapes);
}
void SubgraphBaseTest::init_input_shapes(const std::pair<std::vector<ov::PartialShape>, std::vector<std::vector<ov::Shape>>>& shapes) {
targetStaticShapes = shapes.second;
if (!shapes.first.empty()) {
inputDynamicShapes = shapes.first;
} else {
OPENVINO_ASSERT(targetStaticShapes.size() == 1, "Incorrect size of targetStaticShapes for static scenario");
for (const auto& targetStaticShape : targetStaticShapes.front()) {
inputDynamicShapes.emplace_back(targetStaticShape);
}
}
}
void SubgraphBaseTest::init_input_shapes(const std::pair<ov::PartialShape, std::vector<ov::Shape>>& shapes) {
std::pair<std::vector<ov::PartialShape>, std::vector<std::vector<ov::Shape>>> tmpShapeObj;
if (shapes.first.rank() != 0) {
tmpShapeObj.first = {shapes.first};
} else {
tmpShapeObj.first = {};
}
tmpShapeObj.second = {shapes.second};
init_input_shapes(tmpShapeObj);
}
} // namespace test

View File

@@ -7,19 +7,18 @@
namespace LayerTestsDefinitions {
std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<softMaxLayerTestParams>& obj) {
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision inPrc, outPrc;
InferenceEngine::Layout inLayout, outLayout;
ngraph::element::Type_t netPrecision;
// InferenceEngine::Precision inPrc, outPrc;
std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>> shapes;
size_t axis;
std::string targetDevice;
std::map<std::string, std::string> config;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, shapes, axis, targetDevice, config) = obj.param;
std::tie(netPrecision, shapes, axis, targetDevice, config) = obj.param;
std::ostringstream result;
result << "netPRC=" << netPrecision.name() << "_";
result << "inPRC=" << inPrc.name() << "_";
result << "outPRC=" << outPrc.name() << "_";
result << "netPRC=" << netPrecision << "_";
// result << "inPRC=" << inPrc.name() << "_";
// result << "outPRC=" << outPrc.name() << "_";
result << "IS=" << CommonTestUtils::partialShape2str({shapes.first}) << "_";
result << "TS=";
for (const auto& item : shapes.second) {
@@ -32,21 +31,14 @@ std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<softM
}
void SoftMaxLayerTest::SetUp() {
std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>> shapes;
InferenceEngine::Precision netPrecision;
std::pair<ov::PartialShape, std::vector<ov::Shape>> shapes;
ngraph::element::Type_t ngPrc;
size_t axis;
std::tie(netPrecision, inPrc, outPrc, inLayout, outLayout, shapes, axis, targetDevice, configuration) = GetParam();
outLayout = inLayout;
std::tie(ngPrc, shapes, axis, targetDevice, configuration) = GetParam();
init_input_shapes(shapes);
targetStaticShapes.reserve(shapes.second.size());
for (const auto& staticShape : shapes.second) {
targetStaticShapes.push_back({staticShape});
}
inputDynamicShapes = {shapes.first};
const auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
const auto params = ngraph::builder::makeParams(ngPrc, {targetStaticShapes.front().front()});
const auto params = ngraph::builder::makeDynamicParams(ngPrc, inputDynamicShapes);
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));

View File

@@ -241,9 +241,10 @@ std::vector<ov::runtime::Tensor>
// outputTensors[resultIndex]->read(output.second.data(), output.second.size());
// }
for (const auto& outTensor : outputTensors) {
ov::runtime::Tensor a;
outTensor->read(&a, outTensor->get_size_in_bytes());
outputs.push_back(a);
std::shared_ptr<ov::runtime::Tensor> a = std::dynamic_pointer_cast<ov::runtime::Tensor>(outTensor);
ov::runtime::Tensor b = dynamic_cast<ov::runtime::Tensor>(*a);
ov::runtime::Tensor c(b);
outputs.emplace_back(c);
}
return outputs;