Remove some legacy headers from inference component (#19325)

* Remove some legacy headers from inference component

* Fixed code style
This commit is contained in:
Ilya Churaev 2023-08-23 15:13:30 +04:00 committed by GitHub
parent aa53394c07
commit dcfb6bb042
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 29 additions and 16 deletions

View File

@ -0,0 +1,21 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief Defines openvino domains for tracing
* @file plugin_itt.hpp
*/
#pragma once
#include "openvino/itt.hpp"
namespace ov {
namespace itt {
namespace domains {
OV_ITT_DOMAIN(Plugin)
OV_ITT_DOMAIN(PluginLoadTime)
} // namespace domains
} // namespace itt
} // namespace ov

View File

@ -5,7 +5,6 @@
#include "openvino/runtime/core.hpp" #include "openvino/runtime/core.hpp"
#include "any_copy.hpp" #include "any_copy.hpp"
#include "cnn_network_ngraph_impl.hpp"
#include "dev/converter_utils.hpp" #include "dev/converter_utils.hpp"
#include "dev/core_impl.hpp" #include "dev/core_impl.hpp"
#include "itt.hpp" #include "itt.hpp"

View File

@ -4,13 +4,6 @@
#include "openvino/runtime/iremote_tensor.hpp" #include "openvino/runtime/iremote_tensor.hpp"
#include <memory>
#include "ie_blob.h"
#include "ie_ngraph_utils.hpp"
#include "ie_remote_blob.hpp"
#include "openvino/runtime/properties.hpp"
namespace ov { namespace ov {
IRemoteTensor::~IRemoteTensor() = default; IRemoteTensor::~IRemoteTensor() = default;

View File

@ -7,7 +7,6 @@
#include <memory> #include <memory>
#include <unordered_map> #include <unordered_map>
#include "cpp_interfaces/plugin_itt.hpp"
#include "openvino/core/except.hpp" #include "openvino/core/except.hpp"
#include "openvino/core/layout.hpp" #include "openvino/core/layout.hpp"
#include "openvino/core/parallel.hpp" #include "openvino/core/parallel.hpp"
@ -16,6 +15,7 @@
#include "openvino/runtime/iinfer_request.hpp" #include "openvino/runtime/iinfer_request.hpp"
#include "openvino/runtime/iremote_context.hpp" #include "openvino/runtime/iremote_context.hpp"
#include "openvino/runtime/make_tensor.hpp" #include "openvino/runtime/make_tensor.hpp"
#include "openvino/runtime/plugin_itt.hpp"
#include "openvino/runtime/tensor.hpp" #include "openvino/runtime/tensor.hpp"
namespace { namespace {
@ -185,12 +185,12 @@ ov::SoPtr<ov::ITensor>& ov::ISyncInferRequest::get_tensor_ptr(const ov::Output<c
} }
ov::SoPtr<ov::ITensor> ov::ISyncInferRequest::get_tensor(const ov::Output<const ov::Node>& port) const { ov::SoPtr<ov::ITensor> ov::ISyncInferRequest::get_tensor(const ov::Output<const ov::Node>& port) const {
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "get_tensor"); OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "get_tensor");
return get_tensor_ptr(port); return get_tensor_ptr(port);
} }
void ov::ISyncInferRequest::set_tensor(const ov::Output<const ov::Node>& port, const ov::SoPtr<ov::ITensor>& tensor) { void ov::ISyncInferRequest::set_tensor(const ov::Output<const ov::Node>& port, const ov::SoPtr<ov::ITensor>& tensor) {
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "set_tensor"); OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "set_tensor");
auto found_port = find_port(port); auto found_port = find_port(port);
OPENVINO_ASSERT(found_port.found(), "Cannot find tensor for port ", port); OPENVINO_ASSERT(found_port.found(), "Cannot find tensor for port ", port);
try { try {
@ -207,7 +207,7 @@ void ov::ISyncInferRequest::set_tensor(const ov::Output<const ov::Node>& port, c
} }
std::vector<ov::SoPtr<ov::ITensor>> ov::ISyncInferRequest::get_tensors(const ov::Output<const ov::Node>& port) const { std::vector<ov::SoPtr<ov::ITensor>> ov::ISyncInferRequest::get_tensors(const ov::Output<const ov::Node>& port) const {
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "get_tensors"); OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "get_tensors");
auto found_port = find_port(port); auto found_port = find_port(port);
OPENVINO_ASSERT(found_port.found(), "Cannot find input tensors for port ", port); OPENVINO_ASSERT(found_port.found(), "Cannot find input tensors for port ", port);
if (found_port.is_input() && m_batched_tensors.count(get_inputs().at(found_port.idx).get_tensor_ptr())) if (found_port.is_input() && m_batched_tensors.count(get_inputs().at(found_port.idx).get_tensor_ptr()))
@ -217,7 +217,7 @@ std::vector<ov::SoPtr<ov::ITensor>> ov::ISyncInferRequest::get_tensors(const ov:
void ov::ISyncInferRequest::set_tensors(const ov::Output<const ov::Node>& port, void ov::ISyncInferRequest::set_tensors(const ov::Output<const ov::Node>& port,
const std::vector<ov::SoPtr<ov::ITensor>>& tensors) { const std::vector<ov::SoPtr<ov::ITensor>>& tensors) {
OV_ITT_SCOPED_TASK(InferenceEngine::itt::domains::Plugin, "set_tensors"); OV_ITT_SCOPED_TASK(ov::itt::domains::Plugin, "set_tensors");
auto found_port = find_port(port); auto found_port = find_port(port);
OPENVINO_ASSERT(found_port.found() && found_port.is_input(), "Cannot find input tensors for port ", port); OPENVINO_ASSERT(found_port.found() && found_port.is_input(), "Cannot find input tensors for port ", port);
if (tensors.size() == 1) { if (tensors.size() == 1) {

View File

@ -3,7 +3,7 @@
// //
/** /**
* @brief This is a header file for the Inference Engine plugin C++ API * @brief This is a header file for the OpenVINO plugin C++ API
* *
* @file plugin.hpp * @file plugin.hpp
*/ */

View File

@ -6,10 +6,10 @@
#include <map> #include <map>
#include <memory> #include <memory>
#include <openvino/core/except.hpp>
#include <string> #include <string>
#include "ie_common.h" #include "ie_common.h"
#include "openvino/core/except.hpp"
#include "openvino/core/node.hpp" #include "openvino/core/node.hpp"
#include "openvino/runtime/compiled_model.hpp" #include "openvino/runtime/compiled_model.hpp"
#include "openvino/runtime/exception.hpp" #include "openvino/runtime/exception.hpp"