Auto plugin async infer request implementation (#5707)

* Async auto-request, now with revamped SetCallback (after https://github.com/openvinotoolkit/openvino/pull/5645 merged) it is safe to set.
Also test modification to verify that the callback is called on the same (user's) request and e.g. not on the actual device's request

* Override CreateInferRequestImpl() instead of CreateInferRequest()

Signed-off-by: Shoujiang Ma <shoujiang.ma@intel.com>

Co-authored-by: myshevts <maxim.y.shevtsov@intel.com>
This commit is contained in:
Shoujiang Ma
2021-05-20 20:05:37 +08:00
committed by GitHub
parent dcd27f0776
commit 370617d909
7 changed files with 43 additions and 5 deletions

View File

@@ -217,6 +217,18 @@ public:
* @return true if current InferRequest object is initialized, false - otherwise
*/
explicit operator bool() const noexcept;
/**
* @brief Compares whether this request wraps the same impl underneath
* @return true if current InferRequest object doesn't wrap the same impl as the operator's arg
*/
bool operator!=(const InferRequest&) const noexcept;
/**
* @brief Compares whether this request wraps the same impl underneath
* @return true if current InferRequest object wraps the same impl as the operator's arg
*/
bool operator==(const InferRequest&) const noexcept;
};
template<>

View File

@@ -21,10 +21,10 @@ AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal&
AutoExecutableNetwork::~AutoExecutableNetwork() = default;
IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
InferenceEngine::IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs) {
SoIInferRequestInternal inferRequest = { _network, _network->CreateInferRequest() };
return std::make_shared<AutoInferRequest>(networkInputs, networkOutputs, inferRequest);
SoIInferRequestInternal inferRequest = {_network, _network->CreateInferRequest()};
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest);
}
void AutoExecutableNetwork::Export(std::ostream& networkModel) {

View File

@@ -24,7 +24,7 @@ struct DeviceInformation {
std::map<std::string, std::string> config;
};
class AutoExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
class AutoExecutableNetwork : public InferenceEngine::ExecutableNetworkInternal {
public:
using Ptr = std::shared_ptr<AutoExecutableNetwork>;
@@ -38,6 +38,7 @@ public:
InferenceEngine::Parameter GetConfig(const std::string& name) const override;
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs) override;
~AutoExecutableNetwork() override;
private:

View File

@@ -36,4 +36,16 @@ void AutoInferRequest::Cancel() {
_inferRequest->Cancel();
}
void AutoInferRequest::StartAsync() {
_inferRequest->StartAsync();
}
InferenceEngine::StatusCode AutoInferRequest::Wait(int64_t millis_timeout) {
return _inferRequest->Wait(millis_timeout);
}
void AutoInferRequest::SetCallback(Callback callback) {
_inferRequest->SetCallback(callback);
}
} // namespace AutoPlugin

View File

@@ -30,6 +30,10 @@ public:
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
InferenceEngine::Blob::Ptr GetBlob(const std::string& name) override;
void Cancel() override;
//async impl
void StartAsync() override;
InferenceEngine::StatusCode Wait(int64_t millis_timeout) override;
void SetCallback(Callback callback) override;
private:
InferenceEngine::SoIInferRequestInternal _inferRequest;

View File

@@ -206,4 +206,12 @@ bool InferRequest::operator!() const noexcept {
InferRequest::operator bool() const noexcept {
return !!_impl;
}
bool InferRequest::operator!=(const InferRequest& r) const noexcept {
return !(r == *this);
}
bool InferRequest::operator==(const InferRequest& r) const noexcept {
return r._impl == _impl;
}
} // namespace InferenceEngine

View File

@@ -36,8 +36,9 @@ TEST_P(CallbackTests, canCallSyncAndAsyncWithCompletionCallback) {
// Create InferRequest
InferenceEngine::InferRequest req = execNet.CreateInferRequest();
bool isCalled = false;
req.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest, InferenceEngine::StatusCode)>>(
req.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest r, InferenceEngine::StatusCode)>>(
[&](InferenceEngine::InferRequest request, InferenceEngine::StatusCode status) {
ASSERT_TRUE(req == request); //the callback is called on the same impl of the request
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
if (targetDevice != CommonTestUtils::DEVICE_HDDL) {
ASSERT_EQ(static_cast<int>(InferenceEngine::StatusCode::OK), status);