diff --git a/src/inference/src/core.cpp b/src/inference/src/core.cpp index 72f2a84cea8..80bf4f199fe 100644 --- a/src/inference/src/core.cpp +++ b/src/inference/src/core.cpp @@ -179,24 +179,9 @@ CompiledModel Core::import_model(std::istream& modelStream, const std::string& d CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& context, const AnyMap& config) { OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); - using ExportMagic = std::array; - constexpr static const ExportMagic exportMagic = {{0x1, 0xE, 0xE, 0x1}}; - - std::string deviceName; - ExportMagic magic = {}; - auto currentPos = modelStream.tellg(); - modelStream.read(magic.data(), magic.size()); - if (exportMagic == magic) { - std::getline(modelStream, deviceName); - } else { - OPENVINO_ASSERT(false, - "Passed compiled stream does not contain device name. " - "Please, provide device name manually"); - } - modelStream.seekg(currentPos, modelStream.beg); - + auto parsed = parseDeviceNameIntoConfig(context.get_device_name(), config); OV_CORE_CALL_STATEMENT({ - auto exec = _impl->get_plugin(deviceName).import_model(modelStream, {}); + auto exec = _impl->get_plugin(parsed._deviceName).import_model(modelStream, context, parsed._config); return {exec._ptr, exec._so}; }); } diff --git a/src/inference/src/dev/converter_utils.cpp b/src/inference/src/dev/converter_utils.cpp index dce19714d08..2d057fecc68 100644 --- a/src/inference/src/dev/converter_utils.cpp +++ b/src/inference/src/dev/converter_utils.cpp @@ -56,21 +56,11 @@ void fill_output_info(ov::Output& input, InferenceEngine::DataPtr& out } } -InferenceEngine::SizeVector get_dims(const ov::Output& port, - const std::function& callback = {}) { +InferenceEngine::SizeVector get_dims(const ov::Output& port) { InferenceEngine::SizeVector dims = {}; const auto& p_shape = port.get_partial_shape(); if (p_shape.is_static()) dims = p_shape.get_shape(); - else { - if (!callback || !callback(dims)) { - if (p_shape.rank().is_static()) { - for (size_t i = 0; i < static_cast(p_shape.rank().get_length()); i++) { - dims.emplace_back(0); - } - } - } - } return dims; } @@ -81,14 +71,7 @@ void ov::legacy_convert::fill_input_info(const ov::Output& input if (!input_info) { // Create input info auto param_name = input.get_node()->get_friendly_name(); - auto dims = get_dims(input, [&](InferenceEngine::SizeVector& dims) -> bool { - auto param = std::dynamic_pointer_cast(input.get_node_shared_ptr()); - if (param && param->get_partial_shape().is_static()) { - dims = param->get_partial_shape().get_shape(); - return true; - } - return false; - }); + auto dims = get_dims(input); InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()), dims, InferenceEngine::TensorDesc::getLayoutByDims(dims));