From 6c69535d6c556785f6f0336fac324f24c251a3d5 Mon Sep 17 00:00:00 2001 From: Fedor Zharinov Date: Thu, 13 Jan 2022 23:34:38 +0300 Subject: [PATCH] Benchmark_app batch calculation fix (#9554) * BenchmarkApp - batch size calculation fix * stylefix * -ip/op fix * stylefix --- samples/cpp/benchmark_app/inputs_filling.cpp | 7 ++- samples/cpp/benchmark_app/main.cpp | 36 ++++++++++-- samples/cpp/benchmark_app/utils.cpp | 16 +---- samples/cpp/benchmark_app/utils.hpp | 1 - .../utils/include/samples/args_helper.hpp | 3 +- samples/cpp/common/utils/src/args_helper.cpp | 58 +------------------ 6 files changed, 42 insertions(+), 79 deletions(-) diff --git a/samples/cpp/benchmark_app/inputs_filling.cpp b/samples/cpp/benchmark_app/inputs_filling.cpp index 1d78f375578..033280eae4b 100644 --- a/samples/cpp/benchmark_app/inputs_filling.cpp +++ b/samples/cpp/benchmark_app/inputs_filling.cpp @@ -455,11 +455,16 @@ std::map getTensors(std::map batchSizes; + for (const auto& info : app_inputs_info) { + batchSizes.push_back(getBatchSize(info)); + } + for (const auto& files : inputFiles) { std::string input_name = files.first.empty() ? app_inputs_info[0].begin()->first : files.first; size_t n_shape = 0, m_file = 0; while (n_shape < app_inputs_info.size() || m_file < filesNum) { - size_t batchSize = getBatchSize(app_inputs_info[n_shape % app_inputs_info.size()]); + size_t batchSize = batchSizes[n_shape % app_inputs_info.size()]; size_t inputId = m_file % files.second.size(); auto input_info = app_inputs_info[n_shape % app_inputs_info.size()].at(input_name); diff --git a/samples/cpp/benchmark_app/main.cpp b/samples/cpp/benchmark_app/main.cpp index b1e5f3d6c75..43449d10b06 100644 --- a/samples/cpp/benchmark_app/main.cpp +++ b/samples/cpp/benchmark_app/main.cpp @@ -467,13 +467,25 @@ int main(int argc, char* argv[]) { next_step(); auto preproc = ov::preprocess::PrePostProcessor(model); - processPrecision(*model, FLAGS_ip, FLAGS_op, FLAGS_iop); + ov::runtime::ConfigMap user_precisions_map; + if (!FLAGS_iop.empty()) { + user_precisions_map = parseArgMap(FLAGS_iop); + } + + const auto input_precision = FLAGS_ip.empty() ? ov::element::undefined : getPrecision2(FLAGS_ip); + const auto output_precision = FLAGS_op.empty() ? ov::element::undefined : getPrecision2(FLAGS_op); + for (auto& item : model->inputs()) { // if precision for input set by user, then set it to app_inputs const auto& name = item.get_any_name(); - if (!FLAGS_ip.empty() || FLAGS_iop.find(name) != std::string::npos) { + if (user_precisions_map.count(name)) { + const auto precision = getPrecision2(user_precisions_map.at(name)); for (auto& info : app_inputs_info) { - info.at(name).type = item.get_element_type(); + info.at(name).type = precision; + } + } else if (input_precision != ov::element::undefined) { + for (auto& info : app_inputs_info) { + info.at(name).type = input_precision; } } else if (app_inputs_info[0].at(name).isImage()) { // image input, set U8 @@ -488,6 +500,17 @@ int main(int argc, char* argv[]) { in.model().set_layout(app_inputs_info[0].at(name).layout); } + for (auto& item : model->outputs()) { + // if precision for input set by user, then set it to app_inputs + const auto& name = item.get_any_name(); + if (user_precisions_map.count(name)) { + const auto precision = getPrecision2(user_precisions_map.at(name)); + preproc.output(name).tensor().set_element_type(precision); + } else if (output_precision != ov::element::undefined) { + preproc.output(name).tensor().set_element_type(output_precision); + } + } + model = preproc.build(); // Check if network has dynamic shapes @@ -499,9 +522,10 @@ int main(int argc, char* argv[]) { }); topology_name = model->get_friendly_name(); - // use batch size according to provided layout and shapes (static case) - if (!isDynamicNetwork) { - batchSize = getModelInputBatchSize(*model); + + // Calculate batch size according to provided layout and shapes (static case) + if (!isDynamicNetwork && app_inputs_info.size()) { + batchSize = getBatchSize(app_inputs_info.front()); slog::info << "Network batch size: " << batchSize << slog::endl; } else if (batchSize == 0) { diff --git a/samples/cpp/benchmark_app/utils.cpp b/samples/cpp/benchmark_app/utils.cpp index 483aefaa37c..5b5349675ca 100644 --- a/samples/cpp/benchmark_app/utils.cpp +++ b/samples/cpp/benchmark_app/utils.cpp @@ -158,24 +158,14 @@ size_t getBatchSize(const benchmark_app::InputsInfo& inputs_info) { } } if (batch_size == 0) { + slog::warn << "No batch dimension was found at any input, asssuming batch to be 1. Beware: this might affect " + "FPS calculation." + << slog::endl; batch_size = 1; } return batch_size; } -size_t getModelInputBatchSize(const ov::Model& model) { - try { - auto& param = model.get_parameters()[0]; - auto layout = param->get_layout(); - return param->get_shape().at(ov::layout::batch_idx(layout)); - } catch (...) { - slog::warn - << "No batch dimension was found, asssuming batch to be 1. Beware: this might affect FPS calculation." - << slog::endl; - return 1; // Default batch value - } -} - std::string getShapeString(const ov::Shape& shape) { std::stringstream ss; ss << shape; diff --git a/samples/cpp/benchmark_app/utils.hpp b/samples/cpp/benchmark_app/utils.hpp index 8851987d9d9..d53ad5cd0a2 100644 --- a/samples/cpp/benchmark_app/utils.hpp +++ b/samples/cpp/benchmark_app/utils.hpp @@ -58,7 +58,6 @@ std::vector parseDevices(const std::string& device_string); uint32_t deviceDefaultDeviceDurationInSeconds(const std::string& device); std::map parseNStreamsValuePerDevice(const std::vector& devices, const std::string& values_string); -size_t getModelInputBatchSize(const ov::Model& model); std::string getShapeString(const ov::Shape& shape); std::string getShapesString(const benchmark_app::PartialShapes& shapes); size_t getBatchSize(const benchmark_app::InputsInfo& inputs_info); diff --git a/samples/cpp/common/utils/include/samples/args_helper.hpp b/samples/cpp/common/utils/include/samples/args_helper.hpp index 8b87e79e344..88fcbe4296e 100644 --- a/samples/cpp/common/utils/include/samples/args_helper.hpp +++ b/samples/cpp/common/utils/include/samples/args_helper.hpp @@ -29,6 +29,7 @@ void readInputFilesArguments(std::vector& files, const std::string& * @return files updated vector of verified input files */ void parseInputFilesArguments(std::vector& files); +std::map parseArgMap(std::string argMap); void printInputAndOutputsInfo(const ov::Model& network); @@ -45,4 +46,4 @@ void configurePrePostProcessing(std::shared_ptr& function, void printInputAndOutputsInfo(const ov::Model& network); void printInputAndOutputsInfoShort(const ov::Model& network); -void processPrecision(const ov::Model& network, const std::string& ip, const std::string& op, const std::string& iop); +ov::element::Type getPrecision2(const std::string& value); \ No newline at end of file diff --git a/samples/cpp/common/utils/src/args_helper.cpp b/samples/cpp/common/utils/src/args_helper.cpp index 207ee1bae7e..02a2dc979e2 100644 --- a/samples/cpp/common/utils/src/args_helper.cpp +++ b/samples/cpp/common/utils/src/args_helper.cpp @@ -102,7 +102,6 @@ void parseInputFilesArguments(std::vector& files) { } } -namespace { std::vector splitStringList(const std::string& str, char delim) { if (str.empty()) return {}; @@ -200,8 +199,6 @@ ov::element::Type getType(const std::string& value) { return getType(value, supported_types); } -} // namespace - namespace { using supported_layouts_t = std::unordered_map; using matchLayoutToDims_t = std::unordered_map; @@ -458,57 +455,4 @@ ov::element::Type getPrecision2(const std::string& value) { }; return getPrecision(value, supported_precisions); -} - -void setPrecisions(const ov::Model& network, const std::string& iop) { - const auto user_precisions_map = parseArgMap(iop); - - for (auto&& item : user_precisions_map) { - const auto& layer_name = item.first; - const auto& user_precision = item.second; - - auto& params = network.get_parameters(); - auto& results = network.get_results(); - - const auto input = - std::find_if(params.begin(), params.end(), [&item](const std::shared_ptr& a) { - return a->get_friendly_name() == item.first; - }); - const auto output = - std::find_if(results.begin(), results.end(), [&layer_name](const std::shared_ptr& a) { - return a->get_friendly_name() == layer_name; - }); - - if (input != params.end()) { - (*input)->set_element_type(getPrecision2(user_precision)); - } else if (output != results.end()) { - for (int i = 0; i < (*output)->get_output_size(); i++) { - (*output)->set_output_type(i, getPrecision2(user_precision), (*output)->get_output_shape(i)); - } - } else { - throw std::logic_error(layer_name + " is not an input neither output"); - } - } -} - -void processPrecision(const ov::Model& network, const std::string& ip, const std::string& op, const std::string& iop) { - if (!ip.empty()) { - const auto user_precision = getPrecision2(ip); - for (auto&& layer : network.get_parameters()) { - layer->set_element_type(user_precision); - } - } - - if (!op.empty()) { - auto user_precision = getPrecision2(op); - for (auto&& layer : network.get_results()) { - for (int i = 0; i < layer->get_output_size(); i++) { - layer->set_output_type(i, user_precision, layer->get_output_shape(i)); - } - } - } - - if (!iop.empty()) { - setPrecisions(network, iop); - } -} +} \ No newline at end of file