Removed IInferencePluginAPI interface (#1497)

* Removed legacy library includes from plugin api headers

* Removed IInferencePluginAPI interface; merged with IInferencePlugin

* Removed pluginAPIInterface usage in Core implementation
This commit is contained in:
Ilya Lavrenov 2020-07-28 11:08:45 +03:00 committed by GitHub
parent 534fe35c0a
commit a19a8645e8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 257 additions and 262 deletions

View File

@ -73,8 +73,6 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const
}
auto clonedNetwork = cloneNet(network);
ConstTransformer transformator(clonedNetwork.get());
transformator.fullTrim();
return std::make_shared<ExecutableNetwork>(*clonedNetwork, cfg);
}

View File

@ -32,6 +32,8 @@
#include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
#include <transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.hpp>
#include "convert_function_to_cnn_network.hpp"
#include <ie_util_internal.hpp>
#include <graph_transformer.h>
#undef min
#undef max

View File

@ -11,6 +11,7 @@
#include <cpp_interfaces/impl/ie_executable_network_internal.hpp>
#include "gna_executable_network.hpp"
#include "gna_plugin_config.hpp"
#include <ie_util_internal.hpp>
namespace GNAPluginNS {

View File

@ -8,6 +8,7 @@
#include "ie_util_internal.hpp"
#include "hetero_graph_splitter.hpp"
#include "xml_parse_utils.h"
#include <details/caseless.hpp>
#include <vector>
#include <deque>

View File

@ -13,7 +13,7 @@
#include <unordered_map>
#include <vector>
#include <utility>
#include <ie_util_internal.hpp>
namespace HeteroPlugin {

View File

@ -2,50 +2,33 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "ie_core.hpp"
#include <unordered_set>
#include <functional>
#include <limits>
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include <istream>
#include <mutex>
#include <ie_core.hpp>
#include <multi-device/multi_device_config.hpp>
#include <ngraph/opsets/opset.hpp>
#include "ie_plugin_cpp.hpp"
#include "cpp_interfaces/base/ie_plugin_base.hpp"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_pointer.hpp"
#include "ie_icore.hpp"
#include "ie_plugin_config.hpp"
#include "ie_profiling.hpp"
#include "ie_util_internal.hpp"
#include "file_utils.h"
#include "ie_network_reader.hpp"
#include "multi-device/multi_device_config.hpp"
#include "xml_parse_utils.h"
using namespace InferenceEngine::PluginConfigParams;
namespace InferenceEngine {
IInferencePlugin::~IInferencePlugin() {}
namespace {
IInferencePluginAPI* getInferencePluginAPIInterface(IInferencePlugin* iplugin) {
return dynamic_cast<IInferencePluginAPI*>(iplugin);
}
IInferencePluginAPI* getInferencePluginAPIInterface(InferenceEnginePluginPtr iplugin) {
return getInferencePluginAPIInterface(static_cast<IInferencePlugin*>(iplugin.operator->()));
}
IInferencePluginAPI* getInferencePluginAPIInterface(InferencePlugin plugin) {
return getInferencePluginAPIInterface(static_cast<InferenceEnginePluginPtr>(plugin));
}
template <typename T>
struct Parsed {
std::string _deviceName;
@ -293,13 +276,7 @@ public:
networkModel.seekg(currentPos, networkModel.beg);
}
auto cppPlugin = GetCPPPluginByName(parsed._deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << parsed._deviceName << " does not implement the ImportNetwork method";
}
return pluginAPIInterface->ImportNetwork(networkModel, parsed._config);
return GetCPPPluginByName(parsed._deviceName).ImportNetwork(networkModel, parsed._config);
}
QueryNetworkResult QueryNetwork(const ICNNNetwork& network, const std::string& deviceName,
@ -330,17 +307,11 @@ public:
}
auto parsed = parseDeviceNameIntoConfig(deviceName);
InferencePlugin cppPlugin = GetCPPPluginByName(parsed._deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << parsed._deviceName << " does not implement the GetMetric method";
}
// we need to return a copy of Parameter object which is created on Core side,
// not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return copyParameterValue(pluginAPIInterface->GetMetric(name, parsed._config));
return copyParameterValue(GetCPPPluginByName(parsed._deviceName).GetMetric(name, parsed._config));
}
/**
@ -364,15 +335,13 @@ public:
try {
InferenceEnginePluginPtr plugin(desc.libraryLocation);
IInferencePlugin* pplugin = static_cast<IInferencePlugin*>(plugin.operator->());
IInferencePluginAPI* iplugin_api_ptr = dynamic_cast<IInferencePluginAPI*>(pplugin);
if (iplugin_api_ptr != nullptr) {
iplugin_api_ptr->SetName(deviceName);
{
plugin->SetName(deviceName);
// Set Inference Engine class reference to plugins
ICore* mutableCore = const_cast<ICore*>(static_cast<const ICore*>(this));
iplugin_api_ptr->SetCore(mutableCore);
plugin->SetCore(mutableCore);
}
// Add registered extensions to new plugin
@ -613,14 +582,7 @@ ExecutableNetwork Core::LoadNetwork(const CNNNetwork& network, RemoteContext::Pt
DeviceIDParser device(deviceName_);
std::string deviceName = device.getDeviceName();
auto cppPlugin = _impl->GetCPPPluginByName(deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << deviceName << " does not implement the LoadNetwork method";
}
return pluginAPIInterface->LoadNetwork(network, config_, context);
return _impl->GetCPPPluginByName(deviceName).LoadNetwork(network, config_, context);
}
RemoteContext::Ptr Core::CreateContext(const std::string& deviceName_, const ParamMap& params) {
@ -634,14 +596,7 @@ RemoteContext::Ptr Core::CreateContext(const std::string& deviceName_, const Par
DeviceIDParser device(deviceName_);
std::string deviceName = device.getDeviceName();
auto cppPlugin = _impl->GetCPPPluginByName(deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << deviceName << " does not implement the CreateContext method";
}
return pluginAPIInterface->CreateContext(params);
return _impl->GetCPPPluginByName(deviceName).CreateContext(params);
}
RemoteContext::Ptr Core::GetDefaultContext(const std::string& deviceName_) {
@ -655,14 +610,7 @@ RemoteContext::Ptr Core::GetDefaultContext(const std::string& deviceName_) {
DeviceIDParser device(deviceName_);
std::string deviceName = device.getDeviceName();
auto cppPlugin = _impl->GetCPPPluginByName(deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << deviceName << " does not implement the CreateContext method";
}
return pluginAPIInterface->GetDefaultContext();
return _impl->GetCPPPluginByName(deviceName).GetDefaultContext();
}
void Core::AddExtension(IExtensionPtr extension, const std::string& deviceName_) {
@ -710,14 +658,7 @@ ExecutableNetwork Core::ImportNetwork(std::istream& networkModel,
std::string deviceName = device.getDeviceName();
auto parsed = parseDeviceNameIntoConfig(deviceName, config);
auto cppPlugin = _impl->GetCPPPluginByName(deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << deviceName << " does not implement the ImportNetwork method";
}
return pluginAPIInterface->ImportNetwork(networkModel, context, parsed._config);
return _impl->GetCPPPluginByName(deviceName).ImportNetwork(networkModel, context, parsed._config);
}
QueryNetworkResult Core::QueryNetwork(const ICNNNetwork& network, const std::string& deviceName,
@ -769,17 +710,11 @@ Parameter Core::GetConfig(const std::string& deviceName, const std::string& name
}
auto parsed = parseDeviceNameIntoConfig(deviceName);
auto cppPlugin = _impl->GetCPPPluginByName(parsed._deviceName);
auto pluginAPIInterface = getInferencePluginAPIInterface(cppPlugin);
if (pluginAPIInterface == nullptr) {
THROW_IE_EXCEPTION << parsed._deviceName << " does not implement the GetConfig method";
}
// we need to return a copy of Parameter object which is created on Core side,
// not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return copyParameterValue(pluginAPIInterface->GetConfig(name, parsed._config));
return copyParameterValue(_impl->GetCPPPluginByName(parsed._deviceName).GetConfig(name, parsed._config));
}
Parameter Core::GetMetric(const std::string& deviceName, const std::string& name) const {

View File

@ -1,17 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <string>
#include "cpp_interfaces/base/ie_plugin_base.hpp"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine {
IInferencePlugin::~IInferencePlugin() {}
IInferencePluginAPI::~IInferencePluginAPI() {}
} // namespace InferenceEngine

View File

@ -18,6 +18,10 @@
#include "details/ie_exception_conversion.hpp"
#include "ie_plugin_ptr.hpp"
#define CALL_RETURN_FNC_NO_ARGS(function, ...) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_RETURN_FNC_NO_ARGS was not initialized."; \
return actual->function(__VA_ARGS__);
namespace InferenceEngine {
/**
@ -141,6 +145,38 @@ public:
if (res.rc != OK) THROW_IE_EXCEPTION << res.resp.msg;
}
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string> &config) {
CALL_RETURN_FNC_NO_ARGS(ImportNetwork, networkModel, config);
}
Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
CALL_RETURN_FNC_NO_ARGS(GetMetric, name, options);
}
ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) {
CALL_RETURN_FNC_NO_ARGS(LoadNetwork, network, config, context);
}
RemoteContext::Ptr CreateContext(const ParamMap& params) {
CALL_RETURN_FNC_NO_ARGS(CreateContext, params);
}
RemoteContext::Ptr GetDefaultContext() {
CALL_RETURN_FNC_NO_ARGS(GetDefaultContext);
}
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
CALL_RETURN_FNC_NO_ARGS(ImportNetwork, networkModel, context, config);
}
Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
CALL_RETURN_FNC_NO_ARGS(GetConfig, name, options);
}
/**
* @brief Converts InferenceEngine to InferenceEnginePluginPtr pointer
*

View File

@ -15,6 +15,8 @@
#include <ie_system_conf.h>
#include <generic_ie.hpp>
#include <nodes/list.hpp>
#include <ie_util_internal.hpp>
#include <graph_transformer.h>
#include "convert_function_to_cnn_network.hpp"
#include <transformations/common_optimizations/common_optimizations.hpp>

View File

@ -13,7 +13,7 @@
#include <unordered_set>
#include "ie_metric_helpers.hpp"
#include <ie_api.h>
#include <ie_util_internal.hpp>
#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <cpp_interfaces/base/ie_infer_async_request_base.hpp>
#include <multi-device/multi_device_config.hpp>

View File

@ -1,142 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* \brief Inference Engine extended plugin API
* \file ie_inference_plugin_api.hpp
*/
#pragma once
#include <ie_api.h>
#include <cpp/ie_executable_network.hpp>
#include <ie_parameter.hpp>
#include <ie_remote_context.hpp>
#include <map>
#include <string>
#include <vector>
namespace InferenceEngine {
class ICore;
/**
* @brief Extends Inference Engine Plugin API to add new method to plugins but without changing the public IInferencePlugin interface.
* It should be used together with base IInferencePlugin which provides common interface, while this one just extends API.
* @ingroup ie_dev_api_plugin_api
*/
class INFERENCE_ENGINE_API_CLASS(IInferencePluginAPI) {
public:
/**
* @brief Sets plugin name
* @param pluginName Plugin name to set
*/
virtual void SetName(const std::string& pluginName) noexcept = 0;
/**
* @brief Returns plugin name
* @return Plugin name
*/
virtual std::string GetName() const noexcept = 0;
/**
* @brief Sets pointer to ICore interface
* @param core Pointer to Core interface
*/
virtual void SetCore(ICore* core) noexcept = 0;
/**
* @brief Gets refernce to ICore interface
* @return Reference to core interface
*/
virtual const ICore& GetCore() const = 0;
/**
* @brief Gets configuration dedicated to plugin behaviour
* @param name - value of config corresponding to config key
* @param options - configuration details for config
* @return Value of config corresponding to config key
*/
virtual Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
/**
* @brief Gets general runtime metric for dedicated hardware
* @param name - metric name to request
* @param options - configuration details for metric
* @return Metric value corresponding to metric key
*/
virtual Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
/**
* @brief Creates a remote context instance based on a map of parameters
* @param[in] params The map of parameters
* @return A remote context object
*/
virtual RemoteContext::Ptr CreateContext(const ParamMap& params) = 0;
/**
* @brief Provides a default remote context instance if supported by a plugin
* @return The default context.
*/
virtual RemoteContext::Ptr GetDefaultContext() = 0;
/**
* @brief Wraps original method
* IInferencePlugin::LoadNetwork
* @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
* @param config string-string map of config parameters relevant only for this load operation
* @param context - a pointer to plugin context derived from RemoteContext class used to
* execute the network
* @return Created Executable Network object
*/
virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) = 0;
/**
* @brief Creates an executable network from an previously exported network using plugin implementation
* and removes Inference Engine magic and plugin name
* @param networkModel Reference to network model output stream
* @param config A string -> string map of parameters
* @return An Executable network
*/
virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config) = 0;
/**
* @brief Creates an executable network from an previously exported network using plugin implementation
* and removes Inference Engine magic and plugin name
* @param networkModel Reference to network model output stream
* @param context - a pointer to plugin context derived from RemoteContext class used to
* execute the network
* @param config A string -> string map of parameters
* @return An Executable network
*/
virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) = 0;
/**
* @brief A virtual descturctor
*/
virtual ~IInferencePluginAPI();
};
/**
* @private
*/
class INFERENCE_ENGINE_API_CLASS(DeviceIDParser) {
std::string deviceName;
std::string deviceID;
public:
explicit DeviceIDParser(const std::string& deviceNameWithID);
std::string getDeviceID() const;
std::string getDeviceName() const;
static std::vector<std::string> getHeteroDevices(std::string fallbackDevice);
static std::vector<std::string> getMultiDevices(std::string devicesList);
};
} // namespace InferenceEngine

View File

@ -7,7 +7,6 @@
#include <memory>
#include "cpp_interfaces/exception2status.hpp"
#include "details/ie_no_copy.hpp"
#include "ie_imemory_state.hpp"
namespace InferenceEngine {

View File

@ -14,10 +14,8 @@
#include <string>
#include "cpp_interfaces/interface/ie_plugin.hpp"
#include "cpp_interfaces/base/ie_inference_plugin_api.hpp"
#include "cpp_interfaces/exception2status.hpp"
#include "description_buffer.hpp"
#include "ie_common.h"
namespace InferenceEngine {
@ -27,7 +25,7 @@ namespace InferenceEngine {
* @tparam T Minimal CPP implementation of IInferencePluginInternal (e.g. InferencePluginInternal)
*/
template <class T>
class PluginBase : public IInferencePluginAPI, public IInferencePlugin {
class PluginBase : public IInferencePlugin {
class VersionStore : public Version {
std::string _dsc;
std::string _buildNumber;

View File

@ -15,7 +15,6 @@
#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
#include "cpp_interfaces/interface/ie_iinfer_request_internal.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "ie_icore.hpp"
namespace InferenceEngine {

View File

@ -10,9 +10,7 @@
#pragma once
#include <ie_plugin_config.hpp>
#include <ie_util_internal.hpp>
#include <details/caseless.hpp>
#include <map>
#include <memory>
#include <string>
@ -21,7 +19,6 @@
#include "cpp_interfaces/base/ie_executable_network_base.hpp"
#include "cpp_interfaces/impl/ie_executable_network_internal.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "graph_transformer.h"
using namespace InferenceEngine;
using namespace InferenceEngine::details;

View File

@ -10,20 +10,16 @@
#pragma once
#include <ie_iextension.h>
#include <ie_icnn_network.hpp>
#include <ie_core.hpp>
#include <ie_icore.hpp>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <vector>
#include "details/ie_no_copy.hpp"
#include "ie_api.h"
#include "ie_core.hpp"
#include "ie_iexecutable_network.hpp"
#include "ie_version.hpp"
namespace InferenceEngine {
/**
@ -98,6 +94,94 @@ public:
res.rc = InferenceEngine::NOT_IMPLEMENTED;
}
/**
* @brief Sets plugin name
* @param pluginName Plugin name to set
*/
virtual void SetName(const std::string& pluginName) noexcept = 0;
/**
* @brief Returns plugin name
* @return Plugin name
*/
virtual std::string GetName() const noexcept = 0;
/**
* @brief Sets pointer to ICore interface
* @param core Pointer to Core interface
*/
virtual void SetCore(ICore* core) noexcept = 0;
/**
* @brief Gets refernce to ICore interface
* @return Reference to core interface
*/
virtual const ICore& GetCore() const = 0;
/**
* @brief Gets configuration dedicated to plugin behaviour
* @param name - value of config corresponding to config key
* @param options - configuration details for config
* @return Value of config corresponding to config key
*/
virtual Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
/**
* @brief Gets general runtime metric for dedicated hardware
* @param name - metric name to request
* @param options - configuration details for metric
* @return Metric value corresponding to metric key
*/
virtual Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
/**
* @brief Creates a remote context instance based on a map of parameters
* @param[in] params The map of parameters
* @return A remote context object
*/
virtual RemoteContext::Ptr CreateContext(const ParamMap& params) = 0;
/**
* @brief Provides a default remote context instance if supported by a plugin
* @return The default context.
*/
virtual RemoteContext::Ptr GetDefaultContext() = 0;
/**
* @brief Wraps original method
* IInferencePlugin::LoadNetwork
* @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
* @param config string-string map of config parameters relevant only for this load operation
* @param context - a pointer to plugin context derived from RemoteContext class used to
* execute the network
* @return Created Executable Network object
*/
virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) = 0;
/**
* @brief Creates an executable network from an previously exported network using plugin implementation
* and removes Inference Engine magic and plugin name
* @param networkModel Reference to network model output stream
* @param config A string -> string map of parameters
* @return An Executable network
*/
virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config) = 0;
/**
* @brief Creates an executable network from an previously exported network using plugin implementation
* and removes Inference Engine magic and plugin name
* @param networkModel Reference to network model output stream
* @param context - a pointer to plugin context derived from RemoteContext class used to
* execute the network
* @param config A string -> string map of parameters
* @return An Executable network
*/
virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) = 0;
/**
* @brief A default virtual destructor
*/

View File

@ -119,4 +119,20 @@ using ExportMagic = std::array<char, 4>;
*/
constexpr static const ExportMagic exportMagic = {{0x1, 0xE, 0xE, 0x1}};
/**
* @private
*/
class INFERENCE_ENGINE_API_CLASS(DeviceIDParser) {
std::string deviceName;
std::string deviceID;
public:
explicit DeviceIDParser(const std::string& deviceNameWithID);
std::string getDeviceID() const;
std::string getDeviceName() const;
static std::vector<std::string> getHeteroDevices(std::string fallbackDevice);
static std::vector<std::string> getMultiDevices(std::string devicesList);
};
} // namespace InferenceEngine

View File

@ -11,6 +11,7 @@
#include <cpp/ie_cnn_network.h>
#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <cpp_interfaces/impl/ie_executable_network_internal.hpp>
#include <ie_util_internal.hpp>
#include <vpu/vpu_plugin_config.hpp>
#include <vpu/parsed_config.hpp>

View File

@ -12,8 +12,8 @@
using namespace std;
using namespace InferenceEngine;
#define ACTION_IF_NOT_NULL(action) (nullptr == _target) ? NOT_IMPLEMENTED : _target->action
#define IF_NOT_NULL(action) if (nullptr != _target) {_target->action;}
MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin *target) {
_target = target;
@ -48,6 +48,51 @@ MockPlugin::ImportNetwork(IExecutableNetwork::Ptr &ret, const std::string &model
return NOT_IMPLEMENTED;
}
void MockPlugin::SetName(const std::string& pluginName) noexcept {
}
std::string MockPlugin::GetName() const noexcept {
return {};
}
void MockPlugin::SetCore(ICore* core) noexcept {
}
const ICore& MockPlugin::GetCore() const {
static ICore * core = nullptr;
return *core;
}
Parameter MockPlugin::GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
return {};
}
Parameter MockPlugin::GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
return {};
}
RemoteContext::Ptr MockPlugin::CreateContext(const ParamMap& params) {
return {};
}
RemoteContext::Ptr MockPlugin::GetDefaultContext() {
return {};
}
ExecutableNetwork MockPlugin::LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) {
return {};
}
ExecutableNetwork MockPlugin::ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config) {
return {};
}
ExecutableNetwork MockPlugin::ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
return {};
}
InferenceEngine::IInferencePlugin *__target = nullptr;
INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept {

View File

@ -35,5 +35,25 @@ public:
void Release() noexcept override;
void SetName(const std::string& pluginName) noexcept override;
std::string GetName() const noexcept override;
void SetCore(InferenceEngine::ICore* core) noexcept override;
const InferenceEngine::ICore& GetCore() const override;
InferenceEngine::Parameter
GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
InferenceEngine::Parameter
GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
InferenceEngine::RemoteContext::Ptr
CreateContext(const InferenceEngine::ParamMap& params) override;
InferenceEngine::RemoteContext::Ptr GetDefaultContext() override;
InferenceEngine::ExecutableNetwork
LoadNetwork(const InferenceEngine::ICNNNetwork& network, const std::map<std::string, std::string>& config,
InferenceEngine::RemoteContext::Ptr context) override;
InferenceEngine::ExecutableNetwork
ImportNetwork(std::istream& networkModel, const std::map<std::string, std::string>& config) override;
InferenceEngine::ExecutableNetwork
ImportNetwork(std::istream& networkModel, const InferenceEngine::RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) override;
std::map<std::string, std::string> config;
};

View File

@ -28,6 +28,26 @@ public:
const std::string &,
const std::map<std::string, std::string> &,
InferenceEngine::ResponseDesc *));
MOCK_QUALIFIED_METHOD2(SetConfig, noexcept, InferenceEngine::StatusCode(const std::map<std::string, std::string> &,
InferenceEngine::ResponseDesc *resp));
MOCK_QUALIFIED_METHOD2(SetConfig, noexcept, InferenceEngine::StatusCode(
const std::map<std::string, std::string> &, InferenceEngine::ResponseDesc *resp));
MOCK_QUALIFIED_METHOD1(SetName, noexcept, void(const std::string&));
MOCK_QUALIFIED_METHOD0(GetName, const noexcept, std::string(void));
MOCK_QUALIFIED_METHOD1(SetCore, noexcept, void(InferenceEngine::ICore*));
MOCK_QUALIFIED_METHOD0(GetCore, const, const InferenceEngine::ICore&(void));
MOCK_QUALIFIED_METHOD2(GetConfig, const, InferenceEngine::Parameter(
const std::string&, const std::map<std::string, InferenceEngine::Parameter>&));
MOCK_QUALIFIED_METHOD2(GetMetric, const, InferenceEngine::Parameter(
const std::string&, const std::map<std::string, InferenceEngine::Parameter>&));
MOCK_METHOD1(CreateContext,
InferenceEngine::RemoteContext::Ptr(const InferenceEngine::ParamMap&));
MOCK_METHOD0(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(void));
MOCK_METHOD3(LoadNetwork, InferenceEngine::ExecutableNetwork(
const InferenceEngine::ICNNNetwork&, const std::map<std::string, std::string>&,
InferenceEngine::RemoteContext::Ptr));
MOCK_METHOD2(ImportNetwork, InferenceEngine::ExecutableNetwork(
std::istream&, const std::map<std::string, std::string>&));
MOCK_METHOD3(ImportNetwork, InferenceEngine::ExecutableNetwork(
std::istream&, const InferenceEngine::RemoteContext::Ptr&,
const std::map<std::string, std::string>&));
};