Remove some legacy headers from inference component (#19325)
* Remove some legacy headers from inference component * Fixed code style
This commit is contained in:
parent
aa53394c07
commit
dcfb6bb042
21
src/inference/dev_api/openvino/runtime/plugin_itt.hpp
Normal file
21
src/inference/dev_api/openvino/runtime/plugin_itt.hpp
Normal file
@ -0,0 +1,21 @@
|
||||
// Copyright (C) 2018-2023 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
/**
|
||||
* @brief Defines openvino domains for tracing
|
||||
* @file plugin_itt.hpp
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "openvino/itt.hpp"
|
||||
|
||||
namespace ov {
|
||||
namespace itt {
|
||||
namespace domains {
|
||||
OV_ITT_DOMAIN(Plugin)
|
||||
OV_ITT_DOMAIN(PluginLoadTime)
|
||||
} // namespace domains
|
||||
} // namespace itt
|
||||
} // namespace ov
|
@ -5,7 +5,6 @@
|
||||
#include "openvino/runtime/core.hpp"
|
||||
|
||||
#include "any_copy.hpp"
|
||||
#include "cnn_network_ngraph_impl.hpp"
|
||||
#include "dev/converter_utils.hpp"
|
||||
#include "dev/core_impl.hpp"
|
||||
#include "itt.hpp"
|
||||
|
@ -4,13 +4,6 @@
|
||||
|
||||
#include "openvino/runtime/iremote_tensor.hpp"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "ie_blob.h"
|
||||
#include "ie_ngraph_utils.hpp"
|
||||
#include "ie_remote_blob.hpp"
|
||||
#include "openvino/runtime/properties.hpp"
|
||||
|
||||
namespace ov {
|
||||
|
||||
IRemoteTensor::~IRemoteTensor() = default;
|
||||
|
@ -7,7 +7,6 @@
|
||||
#include <memory>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "cpp_interfaces/plugin_itt.hpp"
|
||||
#include "openvino/core/except.hpp"
|
||||
#include "openvino/core/layout.hpp"
|
||||
#include "openvino/core/parallel.hpp"
|
||||
@ -16,6 +15,7 @@
|
||||
#include "openvino/runtime/iinfer_request.hpp"
|
||||
#include "openvino/runtime/iremote_context.hpp"
|
||||
#include "openvino/runtime/make_tensor.hpp"
|
||||
#include "openvino/runtime/plugin_itt.hpp"
|
||||
#include "openvino/runtime/tensor.hpp"
|
||||
|
||||
namespace {
|
||||
@ -185,12 +185,12 @@ ov::SoPtr<ov::ITensor>& ov::ISyncInferRequest::get_tensor_ptr(const ov::Output<c
|
||||
}
|
||||
|
||||
ov::SoPtr<ov::ITensor> ov::ISyncInferRequest::get_tensor(const ov::Output<const ov::Node>& port) const {
|
||||
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "get_tensor");
|
||||
OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "get_tensor");
|
||||
return get_tensor_ptr(port);
|
||||
}
|
||||
|
||||
void ov::ISyncInferRequest::set_tensor(const ov::Output<const ov::Node>& port, const ov::SoPtr<ov::ITensor>& tensor) {
|
||||
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "set_tensor");
|
||||
OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "set_tensor");
|
||||
auto found_port = find_port(port);
|
||||
OPENVINO_ASSERT(found_port.found(), "Cannot find tensor for port ", port);
|
||||
try {
|
||||
@ -207,7 +207,7 @@ void ov::ISyncInferRequest::set_tensor(const ov::Output<const ov::Node>& port, c
|
||||
}
|
||||
|
||||
std::vector<ov::SoPtr<ov::ITensor>> ov::ISyncInferRequest::get_tensors(const ov::Output<const ov::Node>& port) const {
|
||||
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "get_tensors");
|
||||
OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "get_tensors");
|
||||
auto found_port = find_port(port);
|
||||
OPENVINO_ASSERT(found_port.found(), "Cannot find input tensors for port ", port);
|
||||
if (found_port.is_input() && m_batched_tensors.count(get_inputs().at(found_port.idx).get_tensor_ptr()))
|
||||
@ -217,7 +217,7 @@ std::vector<ov::SoPtr<ov::ITensor>> ov::ISyncInferRequest::get_tensors(const ov:
|
||||
|
||||
void ov::ISyncInferRequest::set_tensors(const ov::Output<const ov::Node>& port,
|
||||
const std::vector<ov::SoPtr<ov::ITensor>>& tensors) {
|
||||
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "set_tensors");
|
||||
OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "set_tensors");
|
||||
auto found_port = find_port(port);
|
||||
OPENVINO_ASSERT(found_port.found() && found_port.is_input(), "Cannot find input tensors for port ", port);
|
||||
if (tensors.size() == 1) {
|
||||
|
@ -3,7 +3,7 @@
|
||||
//
|
||||
|
||||
/**
|
||||
* @brief This is a header file for the Inference Engine plugin C++ API
|
||||
* @brief This is a header file for the OpenVINO plugin C++ API
|
||||
*
|
||||
* @file plugin.hpp
|
||||
*/
|
||||
|
@ -6,10 +6,10 @@
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <openvino/core/except.hpp>
|
||||
#include <string>
|
||||
|
||||
#include "ie_common.h"
|
||||
#include "openvino/core/except.hpp"
|
||||
#include "openvino/core/node.hpp"
|
||||
#include "openvino/runtime/compiled_model.hpp"
|
||||
#include "openvino/runtime/exception.hpp"
|
||||
|
Loading…
Reference in New Issue
Block a user