[IE Samples] json configuration reader and dumper for benchmark_app (#9648)

* added load_config and dump_config functions implemented with json library

* add warning, upd readme

* Update samples/cpp/benchmark_app/README.md

Co-authored-by: Fedor Zharinov <fedor.zharinov@intel.com>

Co-authored-by: Fedor Zharinov <fedor.zharinov@intel.com>
This commit is contained in:
Ivan Vikhrev 2022-01-18 11:22:47 +03:00 committed by GitHub
parent eb9e2c986a
commit a2cf98bebb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 46 additions and 9 deletions

View File

@ -10,7 +10,7 @@ file (GLOB HDR ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
ie_add_sample(NAME ${TARGET_NAME}
SOURCES ${SRC}
HEADERS ${HDR}
DEPENDENCIES format_reader ie_samples_utils)
DEPENDENCIES nlohmann_json format_reader ie_samples_utils)
# Optional OpenCL dependnency

View File

@ -148,8 +148,8 @@ Options:
-report_folder Optional. Path to a folder where statistics report is stored.
-exec_graph_path Optional. Path to a file where to store executable graph information serialized.
-pc Optional. Report performance counters.
-dump_config Optional. Path to XML/YAML/JSON file to dump IE parameters, which were set by application.
-load_config Optional. Path to XML/YAML/JSON file to load custom IE parameters. Please note, command line parameters have higher priority then parameters from configuration file.
-dump_config Optional. Path to JSON file to dump IE parameters, which were set by application.
-load_config Optional. Path to JSON file to load custom IE parameters. Please note, command line parameters have higher priority than parameters from configuration file.
```
Running the application with the empty list of options yields the usage message given above and an error message.

View File

@ -159,11 +159,11 @@ int main(int argc, char* argv[]) {
// Load device config file if specified
std::map<std::string, std::map<std::string, std::string>> config;
#ifdef USE_OPENCV
if (!FLAGS_load_config.empty()) {
load_config(FLAGS_load_config, config);
}
#endif
/** This vector stores paths to the processed images with input names**/
auto inputFiles = parseInputArguments(gflags::GetArgvs());
@ -1028,12 +1028,10 @@ int main(int argc, char* argv[]) {
// -------------------------------------------------------------
next_step();
#ifdef USE_OPENCV
if (!FLAGS_dump_config.empty()) {
dump_config(FLAGS_dump_config, config);
slog::info << "Inference Engine configuration settings were dumped to " << FLAGS_dump_config << slog::endl;
}
#endif
if (!FLAGS_exec_graph_path.empty()) {
try {

View File

@ -6,6 +6,7 @@
#include <algorithm>
#include <map>
#include <nlohmann/json.hpp>
#include <regex>
#include <string>
#include <utility>
@ -648,6 +649,7 @@ std::vector<benchmark_app::InputsInfo> getInputsInfo(const std::string& shape_st
#ifdef USE_OPENCV
void dump_config(const std::string& filename, const std::map<std::string, std::map<std::string, std::string>>& config) {
slog::warn << "YAML and XML formats for config file won't be supported soon." << slog::endl;
auto plugin_to_opencv_format = [](const std::string& str) -> std::string {
if (str.find("_") != std::string::npos) {
slog::warn
@ -675,6 +677,7 @@ void dump_config(const std::string& filename, const std::map<std::string, std::m
}
void load_config(const std::string& filename, std::map<std::string, std::map<std::string, std::string>>& config) {
slog::warn << "YAML and XML formats for config file won't be supported soon." << slog::endl;
auto opencv_to_plugin_format = [](const std::string& str) -> std::string {
std::string new_str(str);
auto pos = new_str.find("_");
@ -698,6 +701,44 @@ void load_config(const std::string& filename, std::map<std::string, std::map<std
}
}
}
#else
void dump_config(const std::string& filename, const std::map<std::string, std::map<std::string, std::string>>& config) {
nlohmann::json jsonConfig;
for (const auto& item : config) {
std::string deviceName = item.first;
for (const auto& option : item.second) {
jsonConfig[deviceName][option.first] = option.second;
}
}
std::ofstream ofs(filename);
if (!ofs.is_open()) {
throw std::runtime_error("Can't load config file \"" + filename + "\".");
}
ofs << jsonConfig;
}
void load_config(const std::string& filename, std::map<std::string, std::map<std::string, std::string>>& config) {
std::ifstream ifs(filename);
if (!ifs.is_open()) {
throw std::runtime_error("Can't load config file \"" + filename + "\".");
}
nlohmann::json jsonConfig;
try {
ifs >> jsonConfig;
} catch (const nlohmann::json::parse_error& e) {
throw std::runtime_error("Can't parse config file \"" + filename + "\".\n" + e.what());
}
for (const auto& item : jsonConfig.items()) {
std::string deviceName = item.key();
for (const auto& option : item.value().items()) {
config[deviceName][option.key()] = option.value().get<std::string>();
}
}
}
#endif
#ifdef USE_OPENCV

View File

@ -122,10 +122,8 @@ std::vector<benchmark_app::InputsInfo> getInputsInfo(const std::string& shape_st
const std::string& mean_string,
const std::vector<ov::Output<const ov::Node>>& input_info);
#ifdef USE_OPENCV
void dump_config(const std::string& filename, const std::map<std::string, std::map<std::string, std::string>>& config);
void load_config(const std::string& filename, std::map<std::string, std::map<std::string, std::string>>& config);
#endif
extern const std::vector<std::string> supported_image_extensions;
extern const std::vector<std::string> supported_binary_extensions;