Support set tensor for remote tensors in proxy plugin
This commit is contained in:
@@ -51,7 +51,13 @@ ov::Tensor ov::proxy::InferRequest::get_tensor(const ov::Output<const ov::Node>&
|
||||
}
|
||||
|
||||
void ov::proxy::InferRequest::set_tensor(const ov::Output<const ov::Node>& port, const ov::Tensor& tensor) {
|
||||
m_infer_request->set_tensor(port, tensor);
|
||||
auto new_tensor = tensor;
|
||||
if (tensor.is<ov::RemoteTensor>()) {
|
||||
auto remote_context = std::dynamic_pointer_cast<ov::proxy::RemoteContext>(m_compiled_model->get_context());
|
||||
OPENVINO_ASSERT(remote_context);
|
||||
new_tensor = remote_context->wrap_tensor(tensor.as<ov::RemoteTensor>());
|
||||
}
|
||||
m_infer_request->set_tensor(port, new_tensor);
|
||||
}
|
||||
|
||||
std::vector<ov::Tensor> ov::proxy::InferRequest::get_tensors(const ov::Output<const ov::Node>& port) const {
|
||||
@@ -68,7 +74,15 @@ std::vector<ov::Tensor> ov::proxy::InferRequest::get_tensors(const ov::Output<co
|
||||
|
||||
void ov::proxy::InferRequest::set_tensors(const ov::Output<const ov::Node>& port,
|
||||
const std::vector<ov::Tensor>& tensors) {
|
||||
return m_infer_request->set_tensors(port, tensors);
|
||||
auto new_tensors = tensors;
|
||||
for (auto&& tensor : new_tensors) {
|
||||
if (tensor.is<ov::RemoteTensor>()) {
|
||||
auto remote_context = std::dynamic_pointer_cast<ov::proxy::RemoteContext>(m_compiled_model->get_context());
|
||||
OPENVINO_ASSERT(remote_context);
|
||||
tensor = remote_context->wrap_tensor(tensor.as<ov::RemoteTensor>());
|
||||
}
|
||||
}
|
||||
return m_infer_request->set_tensors(port, new_tensors);
|
||||
}
|
||||
|
||||
std::vector<std::shared_ptr<ov::IVariableState>> ov::proxy::InferRequest::query_state() const {
|
||||
|
||||
Reference in New Issue
Block a user