Fix remote blob creation to use original shape
This commit is contained in:
parent
3644c26402
commit
35c674aa97
@ -142,9 +142,9 @@ InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::reuse_surface(InferenceEngin
|
||||
cldnn::shared_surface surf = extract_object<cldnn::shared_surface>(params, GPU_PARAM_KEY(DEV_OBJECT_HANDLE));
|
||||
#endif
|
||||
|
||||
cldnn::layout layout(DataTypeFromPrecision(desc.getPrecision()),
|
||||
ImageFormatFromLayout(desc.getLayout()),
|
||||
tensor_from_dims(desc.getDims()));
|
||||
cldnn::layout layout(ov::PartialShape(desc.getDims()),
|
||||
DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()));
|
||||
|
||||
#ifdef _WIN32
|
||||
auto blob = std::make_shared<RemoteD3DSurface>(public_context, stream,
|
||||
@ -164,10 +164,9 @@ InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::reuse_memory(InferenceEngine
|
||||
cldnn::shared_handle mem,
|
||||
BlobType blob_type) {
|
||||
auto& stream = m_engine->get_service_stream();
|
||||
|
||||
cldnn::layout layout(DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()),
|
||||
tensor_from_dims(desc.getDims()));
|
||||
cldnn::layout layout(ov::PartialShape(desc.getDims()),
|
||||
DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()));
|
||||
|
||||
switch (blob_type) {
|
||||
case BlobType::BT_BUF_SHARED: {
|
||||
@ -194,9 +193,10 @@ InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::reuse_memory(InferenceEngine
|
||||
|
||||
InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::create_buffer(InferenceEngine::gpu::ClContext::Ptr public_context,
|
||||
const InferenceEngine::TensorDesc& desc) {
|
||||
cldnn::layout layout(DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()),
|
||||
tensor_from_dims(desc.getDims()));
|
||||
cldnn::layout layout(ov::PartialShape(desc.getDims()),
|
||||
DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()));
|
||||
|
||||
auto& stream = m_engine->get_service_stream();
|
||||
return std::make_shared<RemoteCLbuffer>(public_context,
|
||||
stream,
|
||||
@ -209,9 +209,10 @@ InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::create_buffer(InferenceEngin
|
||||
InferenceEngine::RemoteBlob::Ptr RemoteContextImpl::create_usm(InferenceEngine::gpu::ClContext::Ptr public_context,
|
||||
const InferenceEngine::TensorDesc& desc,
|
||||
BlobType alloc_type) {
|
||||
cldnn::layout layout(DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()),
|
||||
tensor_from_dims(desc.getDims()));
|
||||
cldnn::layout layout(ov::PartialShape(desc.getDims()),
|
||||
DataTypeFromPrecision(desc.getPrecision()),
|
||||
FormatFromLayout(desc.getLayout()));
|
||||
|
||||
auto& stream = m_engine->get_service_stream();
|
||||
|
||||
return std::make_shared<RemoteUSMbuffer>(public_context,
|
||||
|
Loading…
Reference in New Issue
Block a user