From 9b85d67cce0404a7a05d6f2cb01bbefb9dbccb12 Mon Sep 17 00:00:00 2001 From: Krzysztof Bruniecki Date: Mon, 23 Nov 2020 14:18:17 +0100 Subject: [PATCH] Add test for RESULT_NOT_READY returned from Wait() in async mode (#2793) * Add shared test for RESULT_NOT_READY return from Wait() in async mode * Instantiate test for RESULT_NOT_READY for GNA Plugin only * Fix compile error * Increase model size for the RESULT_NOT_READY test * Reuse most of the test * Apply review - Fix typo * Make the test deterministic * Use callback timestamp * Apply review * Use promise and future --- .../behavior/infer_request.cpp | 7 +++ .../shared/include/behavior/infer_request.hpp | 40 ++++++++++++- .../include/subgraph_tests/basic_lstm.hpp | 6 +- .../shared/src/subgraph_tests/basic_lstm.cpp | 60 +++++++++++-------- 4 files changed, 87 insertions(+), 26 deletions(-) diff --git a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/infer_request.cpp b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/infer_request.cpp index 952b093b423..14c748e17ec 100644 --- a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/infer_request.cpp +++ b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/infer_request.cpp @@ -17,4 +17,11 @@ INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestTests, ::testing::Values(std::map({}))), InferRequestTests::getTestCaseName); +INSTANTIATE_TEST_CASE_P(smoke_BehaviorTests, InferRequestTestsResultNotReady, + ::testing::Combine( + ::testing::ValuesIn(netPrecisions), + ::testing::Values(CommonTestUtils::DEVICE_GNA), + ::testing::Values(std::map({}))), + InferRequestTests::getTestCaseName); + } // namespace diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp index ac5c1b5e680..f97eec8eed7 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp @@ -4,6 +4,8 @@ #pragma once +#include +#include #include #include #include @@ -23,6 +25,7 @@ #include "functional_test_utils/plugin_cache.hpp" #include "functional_test_utils/blob_utils.hpp" #include "ngraph_functions/subgraph_builders.hpp" +#include "subgraph_tests/basic_lstm.hpp" namespace BehaviorTestsDefinitions { using InferRequestTests = BehaviorTestsUtils::BehaviorTestsBasic; @@ -623,4 +626,39 @@ TEST_P(InferRequestTests, returnDeviceBusyOnGetPerformanceCountAfterAsyncInfer) std::cout << "Exception" << e.what() << std::endl; } } -} // namespace BehaviorTestsDefinitions \ No newline at end of file + +class InferRequestTestsResultNotReady : public InferRequestTests { +}; + +TEST_P(InferRequestTestsResultNotReady, ReturnResultNotReadyFromWaitInAsyncModeForTooSmallTimeout) { + // Skip test according to plugin specific disabledTestPatterns() (if any) + SKIP_IF_CURRENT_TEST_IS_DISABLED() + // Create CNNNetwork from ngraph::Function + // return function which computes around 20ms on GNA SW + function = LayerTestsDefinitions::Basic_LSTM_S::GetNetwork(3000, 380); + InferenceEngine::CNNNetwork cnnNet(function); + // Load CNNNetwork to target plugins + auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + // Create InferRequest + InferenceEngine::InferRequest req; + ASSERT_NO_THROW(req = execNet.CreateInferRequest()); + InferenceEngine::ResponseDesc response; + InferenceEngine::StatusCode sts = InferenceEngine::StatusCode::OK; + std::promise callbackTimeStamp; + auto callbackTimeStampFuture = callbackTimeStamp.get_future(); + // add a callback to the request and capture the timestamp + req.SetCompletionCallback([&]() { + callbackTimeStamp.set_value(std::chrono::system_clock::now()); + }); + req.StartAsync(); + ASSERT_NO_THROW(sts = req.Wait(InferenceEngine::IInferRequest::WaitMode::STATUS_ONLY)); + // get timestamp taken AFTER return from the Wait(STATUS_ONLY) + const auto afterWaitTimeStamp = std::chrono::system_clock::now(); + // IF the callback timestamp is larger than the afterWaitTimeStamp + // then we should observe RESULT_NOT_READY + if (afterWaitTimeStamp < callbackTimeStampFuture.get()) { + ASSERT_TRUE(sts == InferenceEngine::StatusCode::RESULT_NOT_READY); + } + ASSERT_NO_THROW(req.Wait(InferenceEngine::IInferRequest::WaitMode::RESULT_READY)); +} +} // namespace BehaviorTestsDefinitions diff --git a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/basic_lstm.hpp b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/basic_lstm.hpp index daa8ca358fe..959ad481421 100644 --- a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/basic_lstm.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/basic_lstm.hpp @@ -27,7 +27,11 @@ public: static std::string getTestCaseName(testing::TestParamInfo obj); void Run() override; - + static std::shared_ptr GetNetwork(uint64_t thirdDimOut, + uint64_t hiddenSize, + const InferenceEngine::Precision& netPrecission = InferenceEngine::Precision::FP32, + std::vector* hidden_memory_init_out = nullptr, + std::vector* cell_memory_init_out = nullptr); protected: size_t hidden_size; std::vector hidden_memory_init; diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp index ebb66d750db..350ecfc2cdc 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp @@ -47,52 +47,64 @@ void Basic_LSTM_S::SetUp() { InferenceEngine::Precision netPrecision; std::tie(netPrecision, targetDevice, configuration) = this->GetParam(); auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); - - auto params = ngraph::builder::makeParams(ngPrc, { {1, 490} }); - hidden_size = 118; - const size_t batch_size = 1; - outPrc = InferenceEngine::Precision::FP32; - //Reshape_1 [1,490] -> [1, 10, 49] - std::vector outFormShapes1 = { batch_size, 10, 49 }; - auto pattern1 = std::make_shared(ngraph::element::Type_t::i64, ngraph::Shape{3}, outFormShapes1); + function = GetNetwork(49, hidden_size, netPrecision, &hidden_memory_init, &cell_memory_init); +} + +std::shared_ptr Basic_LSTM_S::GetNetwork(uint64_t thirdDimOut, + uint64_t hiddenSize, + const InferenceEngine::Precision& netPrecission, + std::vector* hidden_memory_init_out, + std::vector* cell_memory_init_out) { + auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecission); + + auto params = ngraph::builder::makeParams(ngPrc, { {1, 10 * thirdDimOut} }); + + const size_t batch_size = 1; + + //Reshape_1 [1,thirdDimOut*10] -> [1, 10, thirdDimOut] + std::vector outFormShapes1 = { batch_size, 10, thirdDimOut }; + auto pattern1 = std::make_shared(ngraph::element::Type_t::i64, ngraph::Shape{ 3 }, outFormShapes1); auto reshape1 = std::make_shared(params[0], pattern1, false); auto reshape1_shape = reshape1->output(0).get_shape(); - auto H_init = ngraph::builder::makeConstant(ngPrc, { batch_size, hidden_size }, {}, true); - auto C_init = ngraph::builder::makeConstant(ngPrc, { batch_size, hidden_size }, {}, true); - hidden_memory_init = std::static_pointer_cast(H_init)->cast_vector(); - cell_memory_init = std::static_pointer_cast(C_init)->cast_vector(); - - auto H_t = std::make_shared(ngPrc, ngraph::Shape{ batch_size, hidden_size }); - auto C_t = std::make_shared(ngPrc, ngraph::Shape{ batch_size, hidden_size }); + auto H_init = ngraph::builder::makeConstant(ngPrc, { batch_size, hiddenSize }, {}, true); + auto C_init = ngraph::builder::makeConstant(ngPrc, { batch_size, hiddenSize }, {}, true); + if (hidden_memory_init_out != nullptr) { + *hidden_memory_init_out = std::static_pointer_cast(H_init)->cast_vector(); + } + if (cell_memory_init_out != nullptr) { + *cell_memory_init_out = std::static_pointer_cast(C_init)->cast_vector(); + } + auto H_t = std::make_shared(ngPrc, ngraph::Shape{ batch_size, hiddenSize }); + auto C_t = std::make_shared(ngPrc, ngraph::Shape{ batch_size, hiddenSize }); H_t->set_friendly_name("hidden_state_1"); C_t->set_friendly_name("cell_state_1"); //Body auto X = std::make_shared(ngPrc, ngraph::Shape{ batch_size, 1, reshape1_shape[2] }); - auto weightsNode = ngraph::builder::makeConstant(ngPrc, { 4 * hidden_size, reshape1_shape[2] }, {}, true); - auto reccurrenceWeightsNode = ngraph::builder::makeConstant(ngPrc, { 4 * hidden_size, hidden_size }, {}, true); + auto weightsNode = ngraph::builder::makeConstant(ngPrc, { 4 * hiddenSize, reshape1_shape[2] }, {}, true); + auto reccurrenceWeightsNode = ngraph::builder::makeConstant(ngPrc, { 4 * hiddenSize, hiddenSize }, {}, true); //lstm [1, 10], [1, 118], [1, 118] -> [1, 118], [1, 118] outFormShapes1 = { batch_size, reshape1_shape[2] }; - auto constantX = std::make_shared(ngraph::element::i64, ngraph::Shape{2}, outFormShapes1); + auto constantX = std::make_shared(ngraph::element::i64, ngraph::Shape{ 2 }, outFormShapes1); auto lstm1 = std::make_shared(std::make_shared(X, constantX, false), H_t, C_t, - weightsNode, reccurrenceWeightsNode, hidden_size); + weightsNode, reccurrenceWeightsNode, hiddenSize); auto H_o = lstm1->output(0); auto C_o = lstm1->output(1); - //TensorIterator [1, 10, 49] [1, 118], [1, 118] -> [1, 118] + //TensorIterator [1, 10, thirdDimOut] [1, 118], [1, 118] -> [1, 118] auto body = std::make_shared( ngraph::OutputVector{ H_o, C_o }, ngraph::ParameterVector{ X, H_t, C_t }); auto tensor_iterator = std::make_shared(); tensor_iterator->set_body(body); - //input tensor shape: [1, 10, 49] chunk shape: [1, 1, 49] + //input tensor shape: [1, 10, thirdDimOut] chunk shape: [1, 1, thirdDimOut] tensor_iterator->set_sliced_input(X, reshape1, 0, 1, 1, -1, 1); tensor_iterator->set_merged_input(H_t, H_init, H_o); tensor_iterator->set_merged_input(C_t, C_init, C_o); @@ -100,10 +112,10 @@ void Basic_LSTM_S::SetUp() { auto out0 = tensor_iterator->get_iter_value(H_o, -1); const size_t output_size = 12; - auto fc1 = ngraph::builder::makeFullyConnected(out0, ngPrc, output_size, true, { hidden_size, output_size }, { 1 }, { 1 }); + auto fc1 = ngraph::builder::makeFullyConnected(out0, ngPrc, output_size, true, { hiddenSize, output_size }, { 1 }, { 1 }); - ngraph::ResultVector results {std::make_shared(fc1)}; - function = std::make_shared(results, params, "Basic_LSTM_S"); + ngraph::ResultVector results{ std::make_shared(fc1) }; + return std::make_shared(results, params, "Basic_LSTM_S"); } void Basic_LSTM_S::Run() {