Fixed some leftovers (#15251)

* Fixed some leftovers

* Remove callback

* Changed logic
This commit is contained in:
Ilya Churaev 2023-01-23 16:02:58 +04:00 committed by GitHub
parent 5a986a5b16
commit ba908e8e2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 36 deletions

View File

@ -179,24 +179,9 @@ CompiledModel Core::import_model(std::istream& modelStream, const std::string& d
CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& context, const AnyMap& config) { CompiledModel Core::import_model(std::istream& modelStream, const RemoteContext& context, const AnyMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model"); OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
using ExportMagic = std::array<char, 4>; auto parsed = parseDeviceNameIntoConfig(context.get_device_name(), config);
constexpr static const ExportMagic exportMagic = {{0x1, 0xE, 0xE, 0x1}};
std::string deviceName;
ExportMagic magic = {};
auto currentPos = modelStream.tellg();
modelStream.read(magic.data(), magic.size());
if (exportMagic == magic) {
std::getline(modelStream, deviceName);
} else {
OPENVINO_ASSERT(false,
"Passed compiled stream does not contain device name. "
"Please, provide device name manually");
}
modelStream.seekg(currentPos, modelStream.beg);
OV_CORE_CALL_STATEMENT({ OV_CORE_CALL_STATEMENT({
auto exec = _impl->get_plugin(deviceName).import_model(modelStream, {}); auto exec = _impl->get_plugin(parsed._deviceName).import_model(modelStream, context, parsed._config);
return {exec._ptr, exec._so}; return {exec._ptr, exec._so};
}); });
} }

View File

@ -56,21 +56,11 @@ void fill_output_info(ov::Output<ov::Node>& input, InferenceEngine::DataPtr& out
} }
} }
InferenceEngine::SizeVector get_dims(const ov::Output<const ov::Node>& port, InferenceEngine::SizeVector get_dims(const ov::Output<const ov::Node>& port) {
const std::function<bool(InferenceEngine::SizeVector& dims)>& callback = {}) {
InferenceEngine::SizeVector dims = {}; InferenceEngine::SizeVector dims = {};
const auto& p_shape = port.get_partial_shape(); const auto& p_shape = port.get_partial_shape();
if (p_shape.is_static()) if (p_shape.is_static())
dims = p_shape.get_shape(); dims = p_shape.get_shape();
else {
if (!callback || !callback(dims)) {
if (p_shape.rank().is_static()) {
for (size_t i = 0; i < static_cast<size_t>(p_shape.rank().get_length()); i++) {
dims.emplace_back(0);
}
}
}
}
return dims; return dims;
} }
@ -81,14 +71,7 @@ void ov::legacy_convert::fill_input_info(const ov::Output<const ov::Node>& input
if (!input_info) { if (!input_info) {
// Create input info // Create input info
auto param_name = input.get_node()->get_friendly_name(); auto param_name = input.get_node()->get_friendly_name();
auto dims = get_dims(input, [&](InferenceEngine::SizeVector& dims) -> bool { auto dims = get_dims(input);
auto param = std::dynamic_pointer_cast<const ov::op::v0::Parameter>(input.get_node_shared_ptr());
if (param && param->get_partial_shape().is_static()) {
dims = param->get_partial_shape().get_shape();
return true;
}
return false;
});
InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()), InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()),
dims, dims,
InferenceEngine::TensorDesc::getLayoutByDims(dims)); InferenceEngine::TensorDesc::getLayoutByDims(dims));