[IE][VPU]: Improves myriad plugin API (#2816)

LoadNetwork takes network argument by constant reference.
Myriad plugin implementation applies transformations to
given network in order to get compiled model.
Transformations take network argument by non-constant
reference, so at some point of time network copy must be
acquired. ICNNNetwork is neither copyable nor movable, so
the only way to get network copy is using special utility
returning std::shared_ptr.

Myriad plugin does not expose any ownership strategy,
so prefers to take network argument by simple reference.
Plugin also requires nGraph -> CNN conversion during
LoadNetwork implementation. Conversion utilities returns
std::shared_ptr, which makes plugin to use workaround for
lifetime of converted object (to have 2 "pointers" to
network: raw pointer to input network and smart pointer to
converted network). Such workarounds make code more
error-prone, because using wrong pointer to semantically
the same object may lead to unexpected results.

To overcome these issues API has been changed in a way to
make interfaces more clear (do not expose ownership strategy
or mutability) and get rid of unnecessary workarounds.

Signed-off-by: Gladilov, Gleb <gleb.gladilov@intel.com>
This commit is contained in:
Gladilov, Gleb 2020-10-28 11:14:14 +03:00 committed by GitHub
parent 91afa14901
commit 18f7e4f4f0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 43 additions and 79 deletions

View File

@ -32,9 +32,9 @@ public:
explicit FrontEnd(StageBuilder::Ptr stageBuilder, const ie::ICore* core);
ModelPtr buildInitialModel(ie::ICNNNetwork& network);
ModelPtr buildInitialModel(const ie::ICNNNetwork& network);
std::set<std::string> checkSupportedLayers(ie::ICNNNetwork& network);
std::set<std::string> checkSupportedLayers(const ie::ICNNNetwork& network);
const std::vector<ie::CNNLayerPtr>& origLayers() const {
return _ieParsedNetwork.orderedLayers;
@ -45,11 +45,11 @@ public:
//
private:
ModelPtr runCommonPasses(const ie::ICNNNetwork& network);
using SupportedLayerCallback = std::function<void(const ie::CNNLayerPtr&)>;
using UnsupportedLayerCallback = std::function<void(const Model&, const ie::CNNLayerPtr&, const DataVector&, const DataVector&, const std::string&)>;
ModelPtr runCommonPasses(ie::ICNNNetwork& network);
ModelPtr runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLayerCallback& unsupportedLayer,
ModelPtr runCommonPasses(ie::ICNNNetwork::Ptr network, const UnsupportedLayerCallback& unsupportedLayer,
const SupportedLayerCallback& supportedLayer = nullptr);
//

View File

@ -163,28 +163,17 @@ struct CompiledGraph final {
// compileNetwork
//
CompiledGraph::Ptr compileNetwork(
ie::ICNNNetwork& network,
Platform platform,
const CompilationConfig& config,
const Logger::Ptr& log,
const ie::ICore* core);
CompiledGraph::Ptr compileNetwork(const ie::ICNNNetwork& network, Platform platform, const CompilationConfig& config, const Logger::Ptr& log,
const ie::ICore* core);
CompiledGraph::Ptr compileSubNetwork(
ie::ICNNNetwork& network,
const CompilationConfig& subConfig,
const ie::ICore* core);
CompiledGraph::Ptr compileSubNetwork(const ie::ICNNNetwork& network, const CompilationConfig& subConfig, const ie::ICore* core);
//
// getSupportedLayers
//
std::set<std::string> getSupportedLayers(
const ie::ICNNNetwork& network,
Platform platform,
const CompilationConfig& config,
const Logger::Ptr& log,
const ie::ICore* core);
std::set<std::string> getSupportedLayers(const ie::ICNNNetwork& network, Platform platform, const CompilationConfig& config, const Logger::Ptr& log,
const ie::ICore* core);
//
// Blob version and checks

View File

@ -35,6 +35,7 @@
#include "vpu/ngraph/transformations/dynamic_to_static_shape.hpp"
#include "vpu/ngraph/transformations/eliminate_shapeof_after_dsr.hpp"
#include <vpu/ngraph/operations/dynamic_shape_resolver.hpp>
#include <legacy/ie_util_internal.hpp>
namespace vpu {
@ -133,7 +134,7 @@ FrontEnd::FrontEnd(StageBuilder::Ptr stageBuilder, const ie::ICore* core)
VPU_THROW_UNLESS(_core != nullptr, "Argument core is null");
}
ModelPtr FrontEnd::buildInitialModel(ie::ICNNNetwork& network) {
ModelPtr FrontEnd::buildInitialModel(const ie::ICNNNetwork& network) {
VPU_PROFILE(buildInitialModel);
const auto& env = CompileEnv::get();
@ -148,7 +149,6 @@ bool FrontEnd::isLayerSupported(const std::string& type) {
}
ie::ICNNNetwork::Ptr FrontEnd::convertNetwork(ie::ICNNNetwork& network) {
std::shared_ptr<ie::ICNNNetwork> convertedNetwork;
// disable transformations for some cases
const auto transformationsPredicate = [](const std::shared_ptr<const ngraph::Node>& node) -> bool {
const bool casesWithDynamicOrStaticUsage =
@ -185,11 +185,10 @@ ie::ICNNNetwork::Ptr FrontEnd::convertNetwork(ie::ICNNNetwork& network) {
vpu::MergeSubsequentDSROperations().run_on_function(nGraphFunc);
convertedNetwork = InferenceEngine::details::convertFunctionToICNNNetwork(nGraphFunc, network);
return convertedNetwork;
return InferenceEngine::details::convertFunctionToICNNNetwork(nGraphFunc, network);
}
std::set<std::string> FrontEnd::checkSupportedLayers(ie::ICNNNetwork& network) {
std::set<std::string> FrontEnd::checkSupportedLayers(const ie::ICNNNetwork& network) {
VPU_PROFILE(checkSupportedLayers);
const auto& env = CompileEnv::get();
@ -212,7 +211,7 @@ std::set<std::string> FrontEnd::checkSupportedLayers(ie::ICNNNetwork& network) {
_stageBuilder->addNoneStage(model, layer->name, layer, inputs, outputs);
};
runCommonPasses(network, onUnsupportedLayer, onSupportedLayer);
runCommonPasses(cloneNetwork(network), onUnsupportedLayer, onSupportedLayer);
return supportedLayers;
}
@ -367,22 +366,14 @@ void FrontEnd::defaultOnUnsupportedLayerCallback(const Model& model, const ie::C
_stageBuilder->addNoneStage(model, layer->name, layer, inputs, outputs);
}
ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network) {
return runCommonPasses(network, [this](const Model& model, const ie::CNNLayerPtr& layer,
const DataVector& inputs, const DataVector& outputs, const std::string& extraMessage)
{ defaultOnUnsupportedLayerCallback(model, layer, inputs, outputs, extraMessage); });
ModelPtr FrontEnd::runCommonPasses(const ie::ICNNNetwork& network) {
return runCommonPasses(cloneNetwork(network),
[this](const Model& model, const ie::CNNLayerPtr& layer, const DataVector& inputs, const DataVector& outputs, const std::string& extraMessage) {
defaultOnUnsupportedLayerCallback(model, layer, inputs, outputs, extraMessage);});
}
ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLayerCallback& unsupportedLayer, const SupportedLayerCallback& supportedLayer) {
// NGraph -> CNN conversion may be called in 2 different moments: at
// the beginning if conversion was forced by configuration or after detect
// network batch and precision conversions. Conversion utility
// returns std::shared_ptr. ICNNNetwork is neither copyable nor movable.
// As a result, it is impossible to overwrite given "network" argument.
// Do not use network parameter in this function to avoid using wrong network
// reference (e.g. original instead of converted).
auto* originalOrConvertNetwork = &network;
ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork::Ptr network,
const UnsupportedLayerCallback& unsupportedLayer, const SupportedLayerCallback& supportedLayer) {
const auto& env = CompileEnv::get();
//
@ -416,7 +407,7 @@ ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLa
// Create new VPU model
//
const auto model = std::make_shared<ModelObj>(originalOrConvertNetwork->getName());
auto model = std::make_shared<ModelObj>(network->getName());
model->attrs().set<int>("index", g_counter.fetch_add(1));
model->attrs().set<Resources>("resources", env.resources);
@ -425,39 +416,35 @@ ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLa
// Update IE Network
//
std::shared_ptr<ie::ICNNNetwork> convertedNetwork;
{
env.log->trace("Update IE Network");
VPU_LOGGER_SECTION(env.log);
if (originalOrConvertNetwork->getFunction() && env.config.forceDeprecatedCnnConversion) {
convertedNetwork = convertNetwork(*originalOrConvertNetwork);
originalOrConvertNetwork = convertedNetwork.get();
if (network->getFunction() && env.config.forceDeprecatedCnnConversion) {
network = convertNetwork(*network);
}
detectNetworkBatch(*originalOrConvertNetwork, model);
detectNetworkBatch(*network, model);
if (originalOrConvertNetwork->getFunction()) {
convertedNetwork = convertNetwork(*originalOrConvertNetwork);
originalOrConvertNetwork = convertedNetwork.get();
if (network->getFunction()) {
network = convertNetwork(*network);
}
ie::NetPass::ConvertPrecision(*originalOrConvertNetwork, ie::Precision::I64, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*originalOrConvertNetwork, ie::Precision::U32, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*originalOrConvertNetwork, ie::Precision::U64, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*originalOrConvertNetwork, ie::Precision::BOOL, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*network, ie::Precision::I64, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*network, ie::Precision::U32, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*network, ie::Precision::U64, ie::Precision::I32);
ie::NetPass::ConvertPrecision(*network, ie::Precision::BOOL, ie::Precision::I32);
removeConstLayers(*originalOrConvertNetwork);
removeConstLayers(*network);
unrollLoops(*originalOrConvertNetwork);
unrollLoops(*network);
}
//
// Parse IR Network
//
_ieParsedNetwork = parseNetwork(*originalOrConvertNetwork);
_ieParsedNetwork = parseNetwork(*network);
//
// Process internal VPU Model

View File

@ -144,8 +144,7 @@ void CompileEnv::free() {
namespace {
CompiledGraph::Ptr compileImpl(ie::ICNNNetwork& network,
const ie::ICore* core) {
CompiledGraph::Ptr compileImpl(const ie::ICNNNetwork& network, const ie::ICore* core) {
const auto& env = CompileEnv::get();
env.log->debug("Compile network [%s]", network.getName());
@ -193,12 +192,8 @@ CompiledGraph::Ptr compileImpl(const Model& model) {
} // namespace
CompiledGraph::Ptr compileNetwork(
ie::ICNNNetwork& network,
Platform platform,
const CompilationConfig& config,
const Logger::Ptr& log,
const ie::ICore* core) {
CompiledGraph::Ptr compileNetwork(const ie::ICNNNetwork& network, Platform platform, const CompilationConfig& config, const Logger::Ptr& log,
const ie::ICore* core) {
CompileEnv::init(platform, config, log);
AutoScope autoDeinit([] {
CompileEnv::free();
@ -224,10 +219,7 @@ CompiledGraph::Ptr compileModel(
return compileImpl(model);
}
CompiledGraph::Ptr compileSubNetwork(
ie::ICNNNetwork& network,
const CompilationConfig& subConfig,
const ie::ICore* core) {
CompiledGraph::Ptr compileSubNetwork(const ie::ICNNNetwork& network, const CompilationConfig& subConfig, const ie::ICore* core) {
VPU_PROFILE(compileSubNetwork);
const auto& env = CompileEnv::get();
@ -261,10 +253,7 @@ std::set<std::string> getSupportedLayers(
auto stageBuilder = std::make_shared<StageBuilder>();
auto frontEnd = std::make_shared<FrontEnd>(stageBuilder, core);
auto clonedNetworkImpl = ie::cloneNet(network);
return frontEnd->checkSupportedLayers(*clonedNetworkImpl);
return frontEnd->checkSupportedLayers(network);
}
int DeviceResources::numShaves(const Platform& platform) {

View File

@ -51,7 +51,7 @@ ExecutableNetwork::ExecutableNetwork(
}
ExecutableNetwork::ExecutableNetwork(
ICNNNetwork& network,
const ICNNNetwork& network,
std::shared_ptr<IMvnc> mvnc,
std::vector<DevicePtr>& devicePool,
const MyriadConfig& config,
@ -84,7 +84,7 @@ ExecutableNetwork::ExecutableNetwork(
return;
}
auto networkName = network.getName();
const auto& networkName = network.getName();
_executor->allocateGraph(_device, _graphDesc, _graphBlob, compiledGraph->blobHeader, compiledGraph->numActiveStages, networkName, _actualNumExecutors);
if (_config.exclusiveAsyncRequests()) {
ExecutorManager *executorManager = ExecutorManager::getInstance();

View File

@ -32,7 +32,7 @@ class ExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDef
public:
typedef std::shared_ptr<ExecutableNetwork> Ptr;
explicit ExecutableNetwork(InferenceEngine::ICNNNetwork &network,
explicit ExecutableNetwork(const InferenceEngine::ICNNNetwork& network,
std::shared_ptr<IMvnc> mvnc,
std::vector<DevicePtr> &devicePool,
const MyriadConfig& config,

View File

@ -39,8 +39,7 @@ ExecutableNetworkInternal::Ptr Engine::LoadExeNetworkImpl(const ICNNNetwork& net
auto parsedConfigCopy = _parsedConfig;
parsedConfigCopy.update(config);
auto clonedNetwork = cloneNetwork(network);
return std::make_shared<ExecutableNetwork>(*clonedNetwork, _mvnc, _devicePool, parsedConfigCopy, GetCore());
return std::make_shared<ExecutableNetwork>(network, _mvnc, _devicePool, parsedConfigCopy, GetCore());
}
void Engine::SetConfig(const std::map<std::string, std::string> &config) {