diff --git a/src/core/shape_inference/include/tensor_data_accessor.hpp b/src/core/shape_inference/include/tensor_data_accessor.hpp index 8f017e05f1b..36d29d934c2 100644 --- a/src/core/shape_inference/include/tensor_data_accessor.hpp +++ b/src/core/shape_inference/include/tensor_data_accessor.hpp @@ -27,11 +27,10 @@ protected: * @brief Tensor data accessor functor. * * Creates the ov::Tensor found in tensors container. - * This accessor not take ownership of tensors container. + * This accessor does not take ownership of tensors container. * Supports following containers: * - ov::TensorVector - * - ngraph::HostTensorVector - * - std::map + * - std::unordered_map * * @tparam TContainer Type of tensor container. */ @@ -61,15 +60,9 @@ private: template <> Tensor TensorAccessor::operator()(size_t port) const; -template <> -Tensor TensorAccessor::operator()(size_t port) const; - template <> Tensor TensorAccessor>::operator()(size_t port) const; -template <> -Tensor TensorAccessor>::operator()(size_t port) const; - template <> Tensor TensorAccessor::operator()(size_t port) const; diff --git a/src/core/shape_inference/include/utils.hpp b/src/core/shape_inference/include/utils.hpp index 308a7f84594..4b302b6618b 100644 --- a/src/core/shape_inference/include/utils.hpp +++ b/src/core/shape_inference/include/utils.hpp @@ -62,31 +62,6 @@ TResult get_raw_data_as(const element::Type_t et, const void* const ptr, const s return out; } -OPENVINO_SUPPRESS_DEPRECATED_START -/** - * \brief Get data from Host tensor as object TResult. - * - * \tparam T TResult data type. - * \tparam TResult Type of return object, must support creation of std::inserter. Default std::vector. - * \tparam UnaryOperation Unary function object applied on data with signature (T f(const U u)). - * - * \param tv Input host tensor. - * \param func Unary operation function object. - * - * \return Object of TResult with data from host tensor. - */ -template , class UnaryOperation> -TResult get_tensor_data_as(ngraph::HostTensor& tv, UnaryOperation&& func) { - auto t = Tensor(tv.get_element_type(), tv.get_shape(), tv.get_data_ptr()); - return get_tensor_data_as(t, std::forward(func)); -} - -template , class UnaryOperation> -TResult get_tensor_data_as(ngraph::HostTensor* tv, UnaryOperation&& func) { - return get_tensor_data_as(*tv, std::forward(func)); -} -OPENVINO_SUPPRESS_DEPRECATED_END - /** * \brief Get data from ov:tensor as object TResult. * diff --git a/src/core/shape_inference/src/tensor_data_accessor.cpp b/src/core/shape_inference/src/tensor_data_accessor.cpp index 1eeb4e6c949..9b8af21dba6 100644 --- a/src/core/shape_inference/src/tensor_data_accessor.cpp +++ b/src/core/shape_inference/src/tensor_data_accessor.cpp @@ -3,55 +3,21 @@ // #include "tensor_data_accessor.hpp" - -#include "ngraph/runtime/host_tensor.hpp" - -OPENVINO_SUPPRESS_DEPRECATED_START namespace ov { template <> -Tensor TensorAccessor::operator()(size_t port) const { - if (port < m_tensors->size()) { - return (*m_tensors)[port]; - } else { - return make_tensor_accessor()(port); - } +Tensor TensorAccessor::operator()(const size_t port) const { + return (port < m_tensors->size()) ? (*m_tensors)[port] : Tensor{}; } template <> -Tensor TensorAccessor::operator()(size_t port) const { - if (port < m_tensors->size()) { - auto ptr = (*m_tensors)[port]; - return {ptr->get_element_type(), ptr->get_shape(), ptr->get_data_ptr()}; - } else { - return make_tensor_accessor()(port); - } -} - -template <> -Tensor TensorAccessor>::operator()(size_t port) const { +Tensor TensorAccessor>::operator()(const size_t port) const { const auto t_iter = m_tensors->find(port); - if (t_iter != m_tensors->cend()) { - return t_iter->second; - } else { - return make_tensor_accessor()(port); - } + return (t_iter != m_tensors->cend()) ? t_iter->second : Tensor{}; } template <> -Tensor TensorAccessor>::operator()(size_t port) const { - const auto t_iter = m_tensors->find(port); - if (t_iter != m_tensors->cend()) { - auto ptr = t_iter->second.get(); - return {ptr->get_element_type(), ptr->get_shape(), ptr->get_data_ptr()}; - } else { - return make_tensor_accessor()(port); - } -} - -template <> -Tensor TensorAccessor::operator()(size_t) const { - static const auto empty = Tensor(); - return empty; +Tensor TensorAccessor::operator()(const size_t) const { + return {}; } auto make_tensor_accessor() -> const TensorAccessor& { diff --git a/src/core/tests/type_prop/eye.cpp b/src/core/tests/type_prop/eye.cpp index b3325d5f127..b82dddda653 100644 --- a/src/core/tests/type_prop/eye.cpp +++ b/src/core/tests/type_prop/eye.cpp @@ -356,17 +356,16 @@ TEST_F(TypePropEyeV9Test, default_ctor) { EXPECT_THAT(get_shape_labels(op->get_output_partial_shape(0)), Each(no_label)); } -OPENVINO_SUPPRESS_DEPRECATED_START TEST_F(TypePropEyeV9Test, default_ctor_no_arguments) { auto op = make_op(); op->set_out_type(element::i32); int64_t rows = 8, cols = 5; auto batch = std::array{2, 4, 1}; - const auto constant_map = std::map{ - {0, std::make_shared(element::i64, Shape{}, &rows)}, - {1, std::make_shared(element::i64, Shape{}, &cols)}, - {3, std::make_shared(element::i32, Shape{batch.size()}, batch.data())}}; + const auto constant_map = + std::unordered_map{{0, {element::i64, Shape{}, &rows}}, + {1, {element::i64, Shape{}, &cols}}, + {3, {element::i32, Shape{batch.size()}, batch.data()}}}; const auto output_shapes = op::v9::shape_infer(op.get(), PartialShapes{{}, {}, {}, {3}}, make_tensor_accessor(constant_map)); diff --git a/src/tests/functional/shared_test_classes/src/subgraph/mul_conv_fusion.cpp b/src/tests/functional/shared_test_classes/src/subgraph/mul_conv_fusion.cpp index 411cff4a46a..4fcc4c562d4 100644 --- a/src/tests/functional/shared_test_classes/src/subgraph/mul_conv_fusion.cpp +++ b/src/tests/functional/shared_test_classes/src/subgraph/mul_conv_fusion.cpp @@ -5,6 +5,7 @@ #include "shared_test_classes/subgraph/mul_conv_fusion.hpp" #include "common_test_utils/graph_comparator.hpp" +#include "openvino/core/validation_util.hpp" #include "openvino/pass/manager.hpp" #include "ov_models/builders.hpp" #include "transformations/common_optimizations/mul_conv_fusion.hpp"