Bell/fix lifecycle coredump (#11934)

* enable binder schedule

Signed-off-by: fishbell <bell.song@intel.com>

* add cases

Signed-off-by: fishbell <bell.song@intel.com>

* refine

Signed-off-by: fishbell <bell.song@intel.com>

* fix build failure

Signed-off-by: fishbell <bell.song@intel.com>

* fix coredump

Signed-off-by: fishbell <bell.song@intel.com>

* do not return hw requests directly, potential issues

Signed-off-by: fishbell <bell.song@intel.com>

* fix bug

Signed-off-by: fishbell <bell.song@intel.com>

typo

Signed-off-by: fishbell <bell.song@intel.com>

* optimize memory

Signed-off-by: fishbell <bell.song@intel.com>

* hold the hw plugin

Signed-off-by: fishbell <bell.song@intel.com>

* Revert "hold the hw plugin"

This reverts commit 5b537f5b6f.

* apply the fix

Signed-off-by: fishbell <bell.song@intel.com>

apply the fix

Signed-off-by: fishbell <bell.song@intel.com>

* hold the plugin library for destructing tensor

Signed-off-by: fishbell <bell.song@intel.com>

* solve the virtuual plugin Getblob life cycle issue

Signed-off-by: fishbell <bell.song@intel.com>

* remove log

Signed-off-by: fishbell <bell.song@intel.com>

* refine interface

Signed-off-by: fishbell <bell.song@intel.com>

* fix build failure

Signed-off-by: fishbell <bell.song@intel.com>

* fix for hetero plugin

Signed-off-by: fishbell <bell.song@intel.com>

* replace with vector

* enable life time tests for virtual plugins

Signed-off-by: fishbell <bell.song@intel.com>

rework cases due to vpux build issue

Signed-off-by: fishbell <bell.song@intel.com>

disable context test for now

Signed-off-by: fishbell <bell.song@intel.com>

Co-authored-by: Chen Peter <peter.chen@intel.com>
This commit is contained in:
yanlan song
2022-07-06 13:21:17 +08:00
committed by GitHub
parent 7a50ce2491
commit e718e51a85
31 changed files with 177 additions and 42 deletions

View File

@@ -345,7 +345,7 @@ class InferencePlugin;
* @brief This class represents an object to work with different types
*/
class OPENVINO_API Any {
std::shared_ptr<void> _so;
std::vector<std::shared_ptr<void>> _so;
template <typename T>
using decay_t = typename std::decay<T>::type;
@@ -634,7 +634,7 @@ class OPENVINO_API Any {
friend class ::ov::RemoteTensor;
friend class ::ov::InferencePlugin;
Any(const Any& other, const std::shared_ptr<void>& so);
Any(const Any& other, const std::vector<std::shared_ptr<void>>& so);
void impl_check() const;

View File

@@ -36,7 +36,7 @@ class VariableState;
class OPENVINO_API Tensor {
protected:
std::shared_ptr<InferenceEngine::Blob> _impl; //!< Shared pointer to internal tensor representation
std::shared_ptr<void> _so; //!< Reference to dynamically loaded library
std::vector<std::shared_ptr<void>> _so; //!< Reference to dynamically loaded library
/**
* @brief Constructs Tensor from the initialized std::shared_ptr
@@ -44,7 +44,7 @@ protected:
* @param so Plugin to use. This is required to ensure that Tensor can work properly even if plugin object is
* destroyed.
*/
Tensor(const std::shared_ptr<InferenceEngine::Blob>& impl, const std::shared_ptr<void>& so);
Tensor(const std::shared_ptr<InferenceEngine::Blob>& impl, const std::vector<std::shared_ptr<void>>& so);
friend class ov::Core;
friend class ov::InferRequest;

View File

@@ -72,7 +72,7 @@ Any::~Any() {
_impl = {};
}
Any::Any(const Any& other, const std::shared_ptr<void>& so) : _impl{other._impl}, _so{so} {}
Any::Any(const Any& other, const std::vector<std::shared_ptr<void>>& so) : _impl{other._impl}, _so{so} {}
Any::Any(const char* str) : Any(std::string{str}) {}

View File

@@ -28,7 +28,9 @@ Tensor::~Tensor() {
_impl = {};
}
Tensor::Tensor(const std::shared_ptr<ie::Blob>& impl, const std::shared_ptr<void>& so) : _impl{impl}, _so{so} {
Tensor::Tensor(const std::shared_ptr<ie::Blob>& impl, const std::vector<std::shared_ptr<void>>& so)
: _impl{impl},
_so{so} {
OPENVINO_ASSERT(_impl != nullptr, "Tensor was not initialized.");
}

View File

@@ -122,6 +122,13 @@ public:
*/
virtual void SetPointerToPlugin(const std::shared_ptr<IInferencePlugin>& plugin);
/**
* @brief Gets the pointer to plugin so.
* @note Needed to correctly handle ownership between objects.
* @return A shared pointer to the plugin so
*/
virtual std::shared_ptr<void> GetPointerToSo();
/**
* @brief Sets configuration for current executable network
* @param config Map of pairs: (config parameter name, config parameter value)
@@ -183,6 +190,12 @@ protected:
* @note Needed to correctly handle ownership between objects.
*/
std::shared_ptr<IInferencePlugin> _plugin;
/**
* @brief A pointer to a plugin library.
* @note Needed to correctly handle ownership between objects.
*/
std::shared_ptr<void> _so;
};
/**

View File

@@ -224,6 +224,18 @@ public:
*/
std::shared_ptr<IExecutableNetworkInternal> getPointerToExecutableNetworkInternal() const;
/**
* @brief Sets the pointer to so when needed.
* @note Needed to correctly handle ownership between objects.
* @param[in] so The library so
*/
void setPointerToSo(const std::shared_ptr<void>& so);
/**
* @brief Returns the pointer to so.
* @returns The library
*/
std::shared_ptr<void> getPointerToSo() const;
/**
* @brief Gets the pointer to userData.
* @return Pointer to user data
@@ -338,6 +350,11 @@ protected:
* @note Needed to correctly handle ownership between objects.
*/
std::shared_ptr<IExecutableNetworkInternal> _exeNetwork;
/**
* @brief A shared pointer to loaded library
* @note Needed to correctly handle ownership between objects.
*/
std::shared_ptr<void> _so;
Callback _callback; //!< A callback
private:

View File

@@ -84,7 +84,7 @@ struct SoPtr {
std::shared_ptr<T> _ptr;
/**
* @brief The shared object or dinamic loaded library
* @brief The shared object or dynamic loaded library
*/
std::shared_ptr<void> _so;
};

View File

@@ -37,7 +37,7 @@ class CompiledModel;
class OPENVINO_RUNTIME_API RemoteContext {
protected:
std::shared_ptr<InferenceEngine::RemoteContext> _impl; //!< Pointer to the remote context implementation.
std::shared_ptr<void> _so; //!< Reference to the shared object that loaded implementation.
std::vector<std::shared_ptr<void>> _so; //!< Reference to the shared object that loaded implementation.
/**
* @brief Constructs RemoteContext from the initialized std::shared_ptr.
@@ -45,7 +45,8 @@ protected:
* @param so Plugin to use. This is required to ensure that RemoteContext can work properly even if a plugin
* object is destroyed.
*/
RemoteContext(const std::shared_ptr<InferenceEngine::RemoteContext>& impl, const std::shared_ptr<void>& so);
RemoteContext(const std::shared_ptr<InferenceEngine::RemoteContext>& impl,
const std::vector<std::shared_ptr<void>>& so);
friend class ov::Core;
friend class ov::CompiledModel;

View File

@@ -29,7 +29,7 @@ class InferRequest;
*/
class OPENVINO_RUNTIME_API VariableState {
std::shared_ptr<InferenceEngine::IVariableStateInternal> _impl;
std::shared_ptr<void> _so;
std::vector<std::shared_ptr<void>> _so;
/**
* @brief Constructs VariableState from the initialized std::shared_ptr.
@@ -38,7 +38,7 @@ class OPENVINO_RUNTIME_API VariableState {
* plugin object is destroyed.
*/
VariableState(const std::shared_ptr<InferenceEngine::IVariableStateInternal>& impl,
const std::shared_ptr<void>& so);
const std::vector<std::shared_ptr<void>>& so);
friend class ov::InferRequest;

View File

@@ -96,11 +96,11 @@ void ExecutableNetwork::SetConfig(const std::map<std::string, Parameter>& config
}
Parameter ExecutableNetwork::GetConfig(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return {_impl->GetConfig(name), _so});
EXEC_NET_CALL_STATEMENT(return {_impl->GetConfig(name), {_so}});
}
Parameter ExecutableNetwork::GetMetric(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return {_impl->GetMetric(name), _so});
EXEC_NET_CALL_STATEMENT(return {_impl->GetMetric(name), {_so}});
}
RemoteContext::Ptr ExecutableNetwork::GetContext() const {
@@ -243,15 +243,15 @@ Any CompiledModel::get_property(const std::string& name) const {
}
}
try {
return {_impl->GetMetric(name), _so};
return {_impl->GetMetric(name), {_so}};
} catch (ie::Exception&) {
return {_impl->GetConfig(name), _so};
return {_impl->GetConfig(name), {_so}};
}
});
}
RemoteContext CompiledModel::get_context() const {
OV_EXEC_NET_CALL_STATEMENT(return {_impl->GetContext(), _so});
OV_EXEC_NET_CALL_STATEMENT(return {_impl->GetContext(), {_so}});
}
bool CompiledModel::operator!() const noexcept {

View File

@@ -357,6 +357,7 @@ void InferRequest::set_output_tensor(const Tensor& tensor) {
}
Tensor InferRequest::get_tensor(const ov::Output<const ov::Node>& port) {
std::vector<std::shared_ptr<void>> soVec;
OV_INFER_REQ_CALL_STATEMENT({
const auto& name = get_legacy_name_from_port(port);
OPENVINO_ASSERT(!_impl->GetBlobs(name),
@@ -365,7 +366,9 @@ Tensor InferRequest::get_tensor(const ov::Output<const ov::Node>& port) {
name,
"'");
auto blob = _impl->GetBlob(name);
return {blob, _so};
soVec = {_so, _impl->getPointerToSo()};
Tensor tensor = {blob, soVec};
return tensor;
});
}
@@ -492,9 +495,11 @@ void InferRequest::set_callback(std::function<void(std::exception_ptr)> callback
std::vector<VariableState> InferRequest::query_state() {
std::vector<VariableState> variable_states;
std::vector<std::shared_ptr<void>> soVec;
OV_INFER_REQ_CALL_STATEMENT({
soVec = {_so, _impl->getPointerToSo()};
for (auto&& state : _impl->QueryState()) {
variable_states.emplace_back(VariableState{state, _so});
variable_states.emplace_back(VariableState{state, soVec});
}
})
return variable_states;

View File

@@ -106,19 +106,19 @@ struct InferencePlugin {
}
Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), _so});
PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), {_so}});
}
ov::SoPtr<RemoteContext> CreateContext(const ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_ptr->CreateContext(params), _so});
PLUGIN_CALL_STATEMENT(return {_ptr->CreateContext(params), {_so}});
}
ov::SoPtr<RemoteContext> GetDefaultContext(const ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_ptr->GetDefaultContext(params), _so});
PLUGIN_CALL_STATEMENT(return {_ptr->GetDefaultContext(params), {_so}});
}
Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), _so});
PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), {_so}});
}
};
} // namespace InferenceEngine
@@ -219,7 +219,7 @@ public:
}
Any get_metric(const std::string& name, const AnyMap& options) const {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), _so});
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetMetric(name, options), {_so}});
}
SoPtr<ie::RemoteContext> create_context(const AnyMap& params) {
@@ -231,14 +231,14 @@ public:
}
Any get_config(const std::string& name, const AnyMap& options) const {
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), _so});
OV_PLUGIN_CALL_STATEMENT(return {_ptr->GetConfig(name, options), {_so}});
}
Any get_property(const std::string& name, const AnyMap& arguments) const {
OV_PLUGIN_CALL_STATEMENT({
if (ov::supported_properties == name) {
try {
return {_ptr->GetMetric(name, arguments), _so};
return {_ptr->GetMetric(name, arguments), {_so}};
} catch (ie::Exception&) {
std::vector<ov::PropertyName> supported_properties;
try {
@@ -263,9 +263,9 @@ public:
}
}
try {
return {_ptr->GetMetric(name, arguments), _so};
return {_ptr->GetMetric(name, arguments), {_so}};
} catch (ie::Exception&) {
return {_ptr->GetConfig(name, arguments), _so};
return {_ptr->GetConfig(name, arguments), {_so}};
}
});
}

View File

@@ -51,7 +51,7 @@ RemoteContext::~RemoteContext() {
_impl = {};
}
RemoteContext::RemoteContext(const ie::RemoteContext::Ptr& impl, const std::shared_ptr<void>& so)
RemoteContext::RemoteContext(const ie::RemoteContext::Ptr& impl, const std::vector<std::shared_ptr<void>>& so)
: _impl{impl},
_so{so} {
OPENVINO_ASSERT(_impl != nullptr, "RemoteContext was not initialized.");
@@ -67,7 +67,7 @@ RemoteTensor RemoteContext::create_tensor(const element::Type& type, const Shape
{ie::details::convertPrecision(type), shape, ie::TensorDesc::getLayoutByRank(shape.size())},
params);
blob->allocate();
return {blob, _so};
return {blob, {_so}};
});
}
@@ -76,7 +76,7 @@ Tensor RemoteContext::create_host_tensor(const element::Type element_type, const
auto blob = _impl->CreateHostBlob(
{ie::details::convertPrecision(element_type), shape, ie::TensorDesc::getLayoutByRank(shape.size())});
blob->allocate();
return {blob, _so};
return {blob, {_so}};
});
}

View File

@@ -65,7 +65,7 @@ VariableState::~VariableState() {
_impl = {};
}
VariableState::VariableState(const ie::IVariableStateInternal::Ptr& impl, const std::shared_ptr<void>& so)
VariableState::VariableState(const ie::IVariableStateInternal::Ptr& impl, const std::vector<std::shared_ptr<void>>& so)
: _impl{impl},
_so{so} {
OPENVINO_ASSERT(_impl != nullptr, "VariableState was not initialized.");
@@ -80,7 +80,7 @@ std::string VariableState::get_name() const {
}
Tensor VariableState::get_state() const {
OV_VARIABLE_CALL_STATEMENT(return {std::const_pointer_cast<ie::Blob>(_impl->GetState()), _so});
OV_VARIABLE_CALL_STATEMENT(return {std::const_pointer_cast<ie::Blob>(_impl->GetState()), {_so}});
}
void VariableState::set_state(const Tensor& state) {

View File

@@ -91,6 +91,10 @@ void IExecutableNetworkInternal::SetPointerToPlugin(const std::shared_ptr<IInfer
_plugin = plugin;
}
std::shared_ptr<void> IExecutableNetworkInternal::GetPointerToSo() {
return _so;
}
void IExecutableNetworkInternal::SetConfig(const std::map<std::string, Parameter>&) {
IE_THROW(NotImplemented);
}

View File

@@ -576,6 +576,14 @@ std::shared_ptr<IExecutableNetworkInternal> IInferRequestInternal::getPointerToE
return _exeNetwork;
}
void IInferRequestInternal::setPointerToSo(const std::shared_ptr<void>& so) {
_so = so;
}
std::shared_ptr<void> IInferRequestInternal::getPointerToSo() const {
return _so;
}
bool IInferRequestInternal::preProcessingRequired(const InputInfo::Ptr& info,
const Blob::Ptr& userBlob,
const Blob::Ptr& deviceBlob) {

View File

@@ -1954,7 +1954,7 @@ RemoteContext Core::create_context(const std::string& deviceName, const AnyMap&
OV_CORE_CALL_STATEMENT({
auto parsed = parseDeviceNameIntoConfig(deviceName, flatten_sub_properties(deviceName, params));
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).create_context(parsed._config);
return {remoteContext._ptr, remoteContext._so};
return {remoteContext._ptr, {remoteContext._so}};
});
}
@@ -1966,7 +1966,7 @@ RemoteContext Core::get_default_context(const std::string& deviceName) {
OV_CORE_CALL_STATEMENT({
auto parsed = parseDeviceNameIntoConfig(deviceName, AnyMap{});
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config);
return {remoteContext._ptr, remoteContext._so};
return {remoteContext._ptr, {remoteContext._so}};
});
}

View File

@@ -549,6 +549,19 @@ IInferPtr AutoSchedule::CreateInferRequest() {
if (!syncRequestImpl)
syncRequestImpl = CreateInferRequestImpl(execNetwork->_networkInputs, execNetwork->_networkOutputs);
syncRequestImpl->setPointerToExecutableNetworkInternal(execNetwork);
if (_passthroughExeNet) {
std::string perfmode;
try {
perfmode = _passthroughExeNet->GetConfig(
CONFIG_KEY(PERFORMANCE_HINT)).as<std::string>();
} catch(...) {
LOG_INFO("query perf hint from passthrough network failed");
}
if (_autoSContext->_batchingDisabled || perfmode != CONFIG_VALUE(THROUGHPUT))
syncRequestImpl->setPointerToSo(_passthroughExeNet._so);
else
syncRequestImpl->setPointerToSo(_passthroughExeNet._ptr->GetPointerToSo());
}
return std::make_shared<AsyncInferRequest>(shared_from_this(),
syncRequestImpl,
execNetwork->_callbackExecutor);

View File

@@ -121,6 +121,7 @@ public:
DeviceMap<SoExecNetwork> _networksPerDevice;
std::mutex _mutex;
bool _needPerfCounters;
bool _batchingDisabled = {false};
bool _bindBuffer = false;
virtual ~MultiScheduleContext() = default;
};
@@ -133,7 +134,6 @@ public:
IE::CNNNetwork _network;
std::string _strDevices;
unsigned int _modelPriority = 0;
bool _batchingDisabled = {false};
std::string _performanceHint;
std::mutex _confMutex;
MultiDeviceInferencePlugin* _plugin;

View File

@@ -303,6 +303,19 @@ IInferPtr MultiSchedule::CreateInferRequest() {
if (!syncRequestImpl)
syncRequestImpl = CreateInferRequestImpl(execNetwork->_networkInputs, execNetwork->_networkOutputs);
syncRequestImpl->setPointerToExecutableNetworkInternal(execNetwork);
if (_passthroughExeNet) {
std::string perfmode;
try {
perfmode = _passthroughExeNet->GetConfig(
CONFIG_KEY(PERFORMANCE_HINT)).as<std::string>();
} catch(...) {
LOG_INFO("query perf hint from passthrough network failed");
}
if (_multiSContext->_batchingDisabled || perfmode != CONFIG_VALUE(THROUGHPUT))
syncRequestImpl->setPointerToSo(_passthroughExeNet._so);
else
syncRequestImpl->setPointerToSo(_passthroughExeNet._ptr->GetPointerToSo());
}
return std::make_shared<AsyncInferRequest>(shared_from_this(),
syncRequestImpl,
execNetwork->_callbackExecutor);

View File

@@ -437,7 +437,7 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
metaDevices = ParseMetaDevices(priorities->second, fullConfig);
multiNetworkConfig.insert(*priorities);
}
auto multiSContext = std::make_shared<MultiScheduleContext>();
DeviceMap<SoExecutableNetworkInternal> executableNetworkPerDevice;
std::mutex load_mutex;
std::vector<Task> loads;
@@ -445,8 +445,11 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
for (auto& p : metaDevices) {
loads.push_back([&]() {
auto tmpiter = fullConfig.find(CONFIG_KEY(ALLOW_AUTO_BATCHING));
if (tmpiter != fullConfig.end())
if (tmpiter != fullConfig.end()) {
if (tmpiter->second == PluginConfigParams::NO)
multiSContext->_batchingDisabled = true;
p.config.insert({tmpiter->first, tmpiter->second});
}
const auto& deviceName = p.deviceName;
const auto& deviceConfig = p.config;
SoExecutableNetworkInternal exec_net;
@@ -487,7 +490,6 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
}
// MULTI can enable the perf counters only if all devices support/enable that
bool enablePerfCounters = num_plugins_supporting_perf_counters == executableNetworkPerDevice.size();
auto multiSContext = std::make_shared<MultiScheduleContext>();
multiSContext->_devicePriorities = metaDevices;
multiSContext->_devicePrioritiesInitial = metaDevices;
multiSContext->_networksPerDevice = executableNetworkPerDevice;

View File

@@ -577,6 +577,8 @@ InferenceEngine::IInferRequestInternal::Ptr AutoBatchExecutableNetwork::CreateIn
if (!_network) {
auto res = _networkWithoutBatch->CreateInferRequest();
res->setPointerToExecutableNetworkInternal(shared_from_this());
res->setPointerToSo(_networkWithoutBatch._so);
_so = _networkWithoutBatch._so;
return res;
}
// trying to create the new API request first

View File

@@ -57,6 +57,13 @@ StatusCode HeteroAsyncInferRequest::Wait(int64_t millis_timeout) {
return waitStatus;
}
InferenceEngine::Blob::Ptr HeteroAsyncInferRequest::GetBlob(const std::string& name) {
CheckState();
auto blob = _heteroInferRequest->GetBlob(name);
setPointerToSo(_heteroInferRequest->getPointerToSo());
return blob;
}
HeteroAsyncInferRequest::~HeteroAsyncInferRequest() {
StopAndWait();
}

View File

@@ -20,6 +20,7 @@ public:
const InferenceEngine::ITaskExecutor::Ptr& callbackExecutor);
~HeteroAsyncInferRequest();
InferenceEngine::StatusCode Wait(int64_t millis_timeout) override;
InferenceEngine::Blob::Ptr GetBlob(const std::string& name) override;
private:
HeteroInferRequest::Ptr _heteroInferRequest;

View File

@@ -53,14 +53,14 @@ void HeteroInferRequest::CreateInferRequest(
}
if (output) {
if (InferenceEngine::details::contains(_networkOutputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r._ptr.get());
_subRequestFromBlobName.emplace(blobName, r);
} else {
auto blob = r->GetBlob(blobName);
_blobs.emplace(intermediateBlobName, r->GetBlob(blobName));
}
} else {
if (InferenceEngine::details::contains(_networkInputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r._ptr.get());
_subRequestFromBlobName.emplace(blobName, r);
} else {
r->SetBlob(blobName, _blobs.at(intermediateBlobName));
}
@@ -98,6 +98,7 @@ InferenceEngine::Blob::Ptr HeteroInferRequest::GetBlob(const std::string& name)
if (itRequest == _subRequestFromBlobName.end()) {
IE_THROW() << "There is no infer requests binded to blob with name: " << name;
}
setPointerToSo(itRequest->second._so);
return itRequest->second->GetBlob(name);
}

View File

@@ -56,7 +56,7 @@ public:
SubRequestsList _inferRequests;
std::map<std::string, InferenceEngine::Blob::Ptr> _blobs;
std::map<std::string, InferenceEngine::IInferRequestInternal*> _subRequestFromBlobName;
std::map<std::string, InferenceEngine::SoIInferRequestInternal> _subRequestFromBlobName;
private:
void CreateInferRequest(const std::unordered_map<std::string, std::string>& subgraphInputToOutputBlobNames);

View File

@@ -0,0 +1,24 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "behavior/ov_plugin/life_time.hpp"
using namespace ov::test::behavior;
namespace {
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVHoldersTest,
::testing::Values(CommonTestUtils::DEVICE_CPU),
OVHoldersTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_VirtualPlugin_BehaviorTests, OVHoldersTest,
::testing::Values("AUTO:CPU",
"MULTI:CPU",
//CommonTestUtils::DEVICE_BATCH,
"HETERO:CPU"),
OVHoldersTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVHoldersTestOnImportedNetwork,
::testing::Values(CommonTestUtils::DEVICE_CPU),
OVHoldersTestOnImportedNetwork::getTestCaseName);
} // namespace

View File

@@ -0,0 +1,19 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "behavior/ov_plugin/life_time.hpp"
using namespace ov::test::behavior;
namespace {
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVHoldersTest,
::testing::Values(CommonTestUtils::DEVICE_GPU),
OVHoldersTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_VirtualPlugin_BehaviorTests, OVHoldersTest,
::testing::Values("AUTO:GPU",
"MULTI:GPU",
//CommonTestUtils::DEVICE_BATCH,
"HETERO:GPU"),
OVHoldersTest::getTestCaseName);
} // namespace

View File

@@ -91,5 +91,7 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*smoke_RemoteBlob.*canInferOnUserQueue.*)",
// Issue: CVS-76980
R"(.*smoke_Auto_BehaviorTests.*InferDynamicNetwork/.*)",
// Issue: CVS-86976
R"(.*smoke_VirtualPlugin_BehaviorTests.*LoadedRemoteContext.*)",
};
}

View File

@@ -36,7 +36,6 @@ public:
std::string targetDevice;
std::string deathTestStyle;
};
} // namespace behavior
} // namespace test
} // namespace ov

View File

@@ -92,6 +92,8 @@ TEST_P(OVHoldersTest, LoadedAny) {
}
TEST_P(OVHoldersTest, LoadedRemoteContext) {
// Skip test according to plugin specific disabledTestPatterns() (if any)
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::RemoteContext ctx;
{
ov::Core core = createCoreWithTemplate();