Removed obsolete ie_profiling.hpp (#4043)

This commit is contained in:
Ilya Lavrenov 2021-01-28 20:04:52 +03:00 committed by GitHub
parent 117c04def1
commit 5f982ca6ac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 19 additions and 257 deletions

View File

@ -25,16 +25,19 @@ TemplateAsyncInferRequest::TemplateAsyncInferRequest(
if (remoteDevice) {
_pipeline = {
{cpuTaskExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(PreprocessingAndStartPipeline)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::PreprocessingAndStartPipeline");
_inferRequest->inferPreprocess();
_inferRequest->startPipeline();
}},
{_waitExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(WaitPipeline)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::WaitPipeline");
_inferRequest->waitPipeline();
}},
{cpuTaskExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(Postprocessing)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::Postprocessing");
_inferRequest->inferPostprocess();
}}
};

View File

@ -10,6 +10,7 @@
#include "template/template_config.hpp"
#include "template_plugin.hpp"
#include "template_executable_network.hpp"
#include "template_itt.hpp"
using namespace TemplatePlugin;
@ -61,7 +62,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(std::istream & model,
model.read(dataBlob->buffer(), dataSize);
}
// TODO: implement Import / Export of configuration options
// TODO: implement Import / Export of configuration options and merge with `cfg`
// TODO: implement Import / Export of network precisions, layouts, preprocessing info
auto cnnnetwork = _plugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob));
@ -188,6 +189,8 @@ InferenceEngine::Parameter TemplatePlugin::ExecutableNetwork::GetMetric(const st
// ! [executable_network:export_impl]
void TemplatePlugin::ExecutableNetwork::ExportImpl(std::ostream& modelStream) {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "ExecutableNetwork::ExportImpl");
// Note: custom ngraph extensions are not supported
std::map<std::string, ngraph::OpSet> custom_opsets;
std::stringstream xmlFile, binFile;

View File

@ -16,6 +16,7 @@
#include <transformations/rt_info/fused_names_attribute.hpp>
#include "template/template_config.hpp"
#include "template_itt.hpp"
#include "template_plugin.hpp"
#include "template_executable_network.hpp"
#include "template_infer_request.hpp"
@ -74,6 +75,8 @@ std::shared_ptr<ngraph::Function> TransformNetwork(const std::shared_ptr<const n
// ! [plugin:load_exe_network_impl]
InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork & network,
const ConfigMap &config) {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::LoadExeNetworkImpl");
auto cfg = Configuration{ config, _cfg };
InferenceEngine::InputsDataMap networkInputs = network.getInputsInfo();
InferenceEngine::OutputsDataMap networkOutputs = network.getOutputsInfo();
@ -113,15 +116,11 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const
// ! [plugin:import_network_impl]
InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model, const std::map<std::string, std::string>& config) {
// TODO: Import network from stream is not mandatory functionality;
// Can just throw an exception and remove the code below
Configuration exportedCfg;
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::ImportNetworkImpl");
// some code below which reads exportedCfg from `model` stream
// ..
auto cfg = Configuration(config, exportedCfg);
auto exec_network_impl = std::make_shared<ExecutableNetwork>(model, cfg, std::static_pointer_cast<Plugin>(shared_from_this()));
Configuration cfg(config);
auto exec_network_impl = std::make_shared<ExecutableNetwork>(model, cfg,
std::static_pointer_cast<Plugin>(shared_from_this()));
return make_executable_network(exec_network_impl);
}
@ -129,6 +128,8 @@ InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model
// ! [plugin:query_network]
InferenceEngine::QueryNetworkResult Plugin::QueryNetwork(const InferenceEngine::CNNNetwork &network, const ConfigMap& config) const {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::QueryNetwork");
InferenceEngine::QueryNetworkResult res;
Configuration cfg{config, _cfg, false};

View File

@ -13,7 +13,6 @@
#include <cpp_interfaces/base/ie_variable_state_base.hpp>
#include "ie_iinfer_request.hpp"
#include "ie_preprocess.hpp"
#include "ie_profiling.hpp"
namespace InferenceEngine {

View File

@ -1,242 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief [DEPRECATED] Defines API to profile your plugin using Intel VTune.
* @details This API is still available but deprecated. Use plugin_itt.hpp instead.
* @file ie_profiling.hpp
*/
#pragma once
#include <cfloat>
#include <chrono>
#include <cmath>
#include <deque>
#include <iomanip>
#include <iostream>
#include <limits>
#include <mutex>
#include <string>
#include <unordered_map>
#include <utility>
/**
* @cond
*/
#ifdef ENABLE_PROFILING_ITT
#include <ittnotify.h>
#endif
namespace InferenceEngine {
template <typename Static, typename Block>
void annotateBegin(Static&& static_, Block&& block_);
template <typename Static, typename Block>
void annotateEnd(Static&& static_, Block&& block_);
template <typename Static, typename Block, typename Local>
struct Annotate {
struct Static_ {
template <std::size_t...>
struct idx {};
template <std::size_t N, std::size_t... S>
struct idx<N, S...> : idx<N - 1, N - 1, S...> {};
template <std::size_t... S>
struct idx<0, S...> {
using type = idx<S...>;
};
template <typename ArgTuple, std::size_t... I>
Static_(ArgTuple&& arg_tuple, idx<I...>): static_ {std::get<I>(std::forward<ArgTuple>(arg_tuple))...} {}
template <typename ArgTuple>
explicit Static_(ArgTuple&& arg_tuple)
: Static_ {std::forward<ArgTuple>(arg_tuple), typename idx<std::tuple_size<ArgTuple>::value>::type {}} {}
Static static_;
};
static Static_ static_;
Block block_;
Annotate(const Annotate&) = delete;
Annotate& operator=(const Annotate&) = delete;
Annotate(Annotate&&) = default;
Annotate& operator=(Annotate&&) = default;
template <typename... Ts>
inline explicit Annotate(Ts&&... xs): block_ {std::forward<Ts>(xs)...} {
annotateBegin(static_.static_, block_);
}
inline ~Annotate() {
annotateEnd(static_.static_, block_);
}
};
template <typename Static, typename Block, typename Local>
typename Annotate<Static, Block, Local>::Static_ Annotate<Static, Block, Local>::static_(Local::staticArgs());
#define IE_ANNOTATE_CONCAT(x, y) IE_ANNOTATE_CONCAT_EVAL(x, y)
#define IE_ANNOTATE_CONCAT_EVAL(x, y) x##y
#define IE_ANNOTATE_UNPACK(tuple) IE_ANNOTATE_UNPACK_EVAL tuple
#define IE_ANNOTATE_UNPACK_EVAL(...) __VA_ARGS__
#define IE_ANNOTATE_MAKE_NAME(lib_name, postfix) \
IE_ANNOTATE_CONCAT(IE_ANNOTATE_CONCAT(IE_ANNOTATE_CONCAT(__intel_util_annotate_, lib_name), postfix), __LINE__)
#define IE_ANNOTATE_LOCAL_TYPE_NAME(lib_name) IE_ANNOTATE_MAKE_NAME(lib_name, _ctx)
#define IE_ANNOTATE_VARIABLE_NAME(lib_name) IE_ANNOTATE_MAKE_NAME(lib_name, _variable)
#define IE_ANNOTATE_FUNC_NAME(lib_name) IE_ANNOTATE_MAKE_NAME(lib_name, _func)
#define IE_ANNOTATE_MAKE_SCOPE_TYPE(lib_name, static_type, block_type, make_static_args_tuple) \
struct IE_ANNOTATE_LOCAL_TYPE_NAME(lib_name) \
: ::InferenceEngine::Annotate<static_type, block_type, IE_ANNOTATE_LOCAL_TYPE_NAME(lib_name)> { \
using ::InferenceEngine::Annotate<static_type, block_type, IE_ANNOTATE_LOCAL_TYPE_NAME(lib_name)>::Annotate; \
static auto staticArgs() -> decltype(std::make_tuple(IE_ANNOTATE_UNPACK(make_static_args_tuple))) { \
return std::make_tuple(IE_ANNOTATE_UNPACK(make_static_args_tuple)); \
} \
}
#define IE_ANNOTATE_MAKE_SCOPE(lib_name, static_type, block_type, make_static_args_tuple, make_block_args_tuple) \
IE_ANNOTATE_MAKE_SCOPE_TYPE(lib_name, static_type, block_type, make_static_args_tuple) \
IE_ANNOTATE_VARIABLE_NAME(lib_name) {IE_ANNOTATE_UNPACK(make_block_args_tuple)};
#ifdef ENABLE_PROFILING_ITT
struct IttTaskHandles {
__itt_domain* const domain;
__itt_string_handle* const handle;
explicit IttTaskHandles(const char* taskName)
: domain {__itt_domain_create("InferenceEngine")}, handle {__itt_string_handle_create(taskName)} {}
};
struct IttBlock {};
inline static void annotateBegin(IttTaskHandles& h, IttBlock&) {
__itt_task_begin(h.domain, __itt_null, __itt_null, h.handle);
}
inline static void annotateEnd(IttTaskHandles& h, IttBlock&) {
__itt_task_end(h.domain);
}
#define IE_ITT_SCOPE(task_name) \
IE_ANNOTATE_MAKE_SCOPE(InferenceEngineItt, ::InferenceEngine::IttTaskHandles, ::InferenceEngine::IttBlock, \
(task_name), ())
#else
#define IE_ITT_SCOPE(task_name)
#endif
#define IE_STR(x) IE_STR_(x)
#define IE_STR_(x) #x
struct ProfilingTask;
struct IttStatic {};
struct IttProfilingTask {
ProfilingTask* t;
};
static void annotateBegin(IttStatic&, IttProfilingTask& t);
static void annotateEnd(IttStatic&, IttProfilingTask& t);
/**
* @class ProfilingTask
* @ingroup ie_dev_profiling
* @brief Used to annotate section of code which would be named at runtime
*/
struct ProfilingTask {
ProfilingTask() = default;
//! @private
ProfilingTask(const ProfilingTask&) = default;
ProfilingTask& operator =(const ProfilingTask&) = default;
/**
* @brief Construct ProfilingTask with runtime defined name
*/
inline explicit ProfilingTask(const std::string& taskName)
: name(taskName)
#ifdef ENABLE_PROFILING_ITT
,
domain(__itt_domain_create("InferenceEngine")),
handle(__itt_string_handle_create(taskName.c_str()))
#endif
{
}
private:
friend void annotateBegin(IttStatic&, IttProfilingTask& t);
friend void annotateEnd(IttStatic&, IttProfilingTask& t);
std::string name;
#ifdef ENABLE_PROFILING_ITT
__itt_domain* domain;
__itt_string_handle* handle;
#endif
};
inline static void annotateBegin(IttStatic&, IttProfilingTask& t) {
#ifdef ENABLE_PROFILING_ITT
__itt_task_begin(t.t->domain, __itt_null, __itt_null, t.t->handle);
#else
(void)t;
#endif
}
inline static void annotateEnd(IttStatic&, IttProfilingTask& t) {
#ifdef ENABLE_PROFILING_ITT
__itt_task_end(t.t->domain);
#else
(void)t;
#endif
}
#ifdef ENABLE_PROFILING_ITT
#define IE_ITT_TASK_SCOPE(profilingTask) \
IE_ANNOTATE_MAKE_SCOPE(InferenceEngineIttScopeTask, ::InferenceEngine::IttStatic, \
::InferenceEngine::IttProfilingTask, (), (&(profilingTask)))
#else
#define IE_ITT_TASK_SCOPE(profiling_task)
#endif
inline static void annotateSetThreadName(const char* name) {
#ifdef ENABLE_PROFILING_ITT
__itt_thread_set_name(name);
#else
(void)(name);
#endif
}
/**
* @def IE_PROFILING_AUTO_SCOPE(NAME)
* @ingroup ie_dev_profiling
* @brief Annotate section of code till scope exit to be profiled using known at compile time @p NAME as section id
* @param NAME Known at compile time name of section of code that is passed to profiling back end
*/
#define IE_PROFILING_AUTO_SCOPE(NAME) IE_ITT_SCOPE(IE_STR(NAME));
/**
* @def IE_PROFILING_AUTO_SCOPE_TASK(PROFILING_TASK)
* @ingroup ie_dev_profiling
* @brief Annotate section of code till scope exit to be profiled runtime configured variable of ProfilingTask type.
* ProfilingTask::name will be used as section id.
* @param PROFILING_TASK variable of ProfilingTask type
*/
#define IE_PROFILING_AUTO_SCOPE_TASK(PROFILING_TASK) IE_ITT_TASK_SCOPE(PROFILING_TASK);
} // namespace InferenceEngine
/**
* @endcond
*/

View File

@ -9,7 +9,6 @@
#include <memory>
#include <ie_blob.h>
#include <ie_profiling.hpp>
#include <file_utils.h>
#include <ie_preprocess.hpp>

View File

@ -13,7 +13,6 @@
#include <opencv2/gapi/gcompiled.hpp>
#include <opencv2/gapi/gcomputation.hpp>
#include <opencv2/gapi/util/optional.hpp>
#include "ie_profiling.hpp"
#include <openvino/itt.hpp>
// FIXME: Move this definition back to ie_preprocess_data,