First time inference validation for dynamic shapes (#9671)
* add performance hint to time infer * add reshape methods for time infer tool * add func declarations to common_utils.h * add check if node is static (reshape pipeline) * changes in desc of reshape funcs * fix logic operator * add reshape utils.h | move parsing logic in main func * add tests_shared_lib to CMakeLists.txt * change fill blobs with setStaticShapesBlobs * change delimiter
This commit is contained in:
@@ -12,8 +12,8 @@ using namespace InferenceEngine;
|
||||
* @brief Fill InferRequest blobs with random values or image information
|
||||
*/
|
||||
void fillBlobs(InferenceEngine::InferRequest inferRequest,
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
const size_t& batchSize) {
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
const size_t& batchSize) {
|
||||
std::vector<std::pair<size_t, size_t>> input_image_sizes;
|
||||
for (const ConstInputsDataMap::value_type& item : inputsInfo) {
|
||||
if (isImage(item.second))
|
||||
|
||||
@@ -61,7 +61,6 @@ inline std::pair<size_t, size_t> getTensorHeightWidth(const InferenceEngine::Ten
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Fill InferenceEngine blob with random values
|
||||
*/
|
||||
@@ -110,5 +109,5 @@ void fillBlobImInfo(Blob::Ptr& inputBlob,
|
||||
* @brief Fill InferRequest blobs with random values or image information
|
||||
*/
|
||||
void fillBlobs(InferenceEngine::InferRequest inferRequest,
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
const size_t& batchSize);
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
const size_t& batchSize);
|
||||
104
tests/lib/src/reshape_utils.cpp
Normal file
104
tests/lib/src/reshape_utils.cpp
Normal file
@@ -0,0 +1,104 @@
|
||||
// Copyright (C) 2018-2021 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "common_utils.h"
|
||||
#include "reshape_utils.h"
|
||||
#include <inference_engine.hpp>
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
|
||||
/**
|
||||
* @brief Parse data shapes for model
|
||||
*/
|
||||
std::map<std::string, std::vector<size_t>> parseDataShapes(const std::string& shapeString) {
|
||||
std::map<std::string, std::vector<size_t>> data_shapes;
|
||||
// Parse input parameter string
|
||||
std::vector<std::string> inputsShapes = split(shapeString, '&');
|
||||
|
||||
for (int i = 0; i < inputsShapes.size(); i++) {
|
||||
std::vector<std::string> curLayout = split(inputsShapes[i], ':');
|
||||
|
||||
std::string curLayoutName = curLayout.at(0);
|
||||
std::vector<size_t> shape;
|
||||
|
||||
for (auto& dim : split(curLayout.at(1), ','))
|
||||
shape.emplace_back(std::stoi(dim));
|
||||
|
||||
data_shapes[curLayoutName] = shape;
|
||||
}
|
||||
return data_shapes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Parse input shapes for model reshape
|
||||
*/
|
||||
std::map<std::string, ov::PartialShape> parseReshapeShapes(const std::string& shapeString) {
|
||||
std::map<std::string, ov::PartialShape> reshape_info;
|
||||
// Parse input parameter string
|
||||
std::vector<std::string> inputsShapes = split(shapeString, '&');
|
||||
|
||||
for (int i = 0; i < inputsShapes.size(); i++) {
|
||||
std::vector<std::string> curLayout = split(inputsShapes[i], ':');
|
||||
|
||||
std::string curLayoutName = curLayout.at(0);
|
||||
std::vector<ov::Dimension> shape;
|
||||
|
||||
for (auto& dim : split(curLayout.at(1), ',')) {
|
||||
if (dim == "?" || dim == "-1") {
|
||||
shape.emplace_back(ov::Dimension::dynamic());
|
||||
}
|
||||
else {
|
||||
const std::string range_divider = "..";
|
||||
size_t range_index = dim.find(range_divider);
|
||||
if (range_index != std::string::npos) {
|
||||
std::string min = dim.substr(0, range_index);
|
||||
std::string max = dim.substr(range_index + range_divider.length());
|
||||
shape.emplace_back(ov::Dimension(std::stoi(min), std::stoi(max)));
|
||||
} else {
|
||||
shape.emplace_back(ov::Dimension(std::stoi(dim)));
|
||||
}
|
||||
}
|
||||
}
|
||||
reshape_info[curLayoutName] = ov::PartialShape(shape);
|
||||
}
|
||||
return reshape_info;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Split input string using specified delimiter.
|
||||
Return vector with input tensor information
|
||||
*/
|
||||
std::vector<std::string> split(const std::string& s, char delim) {
|
||||
std::vector<std::string> result;
|
||||
std::stringstream ss(s);
|
||||
std::string item;
|
||||
|
||||
while (getline(ss, item, delim)) {
|
||||
result.push_back(item);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Reshape blobs with dynamic shapes with static information from data shape
|
||||
*/
|
||||
void setStaticShapesBlobs(InferenceEngine::InferRequest inferRequest,
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
std::map<std::string, std::vector<size_t>> dataShape) {
|
||||
for (const ConstInputsDataMap::value_type& item : inputsInfo) {
|
||||
Blob::Ptr inputBlob = inferRequest.GetBlob(item.first);
|
||||
|
||||
if (dataShape.count(item.first)) {
|
||||
SizeVector newInputShape;
|
||||
for (size_t i = 0; i < dataShape[item.first].size(); i++) {
|
||||
newInputShape.emplace_back(dataShape[item.first][i]);
|
||||
}
|
||||
inputBlob->setShape(newInputShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
36
tests/lib/src/reshape_utils.h
Normal file
36
tests/lib/src/reshape_utils.h
Normal file
@@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2018-2021 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <inference_engine.hpp>
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
|
||||
/**
|
||||
* @brief Split input string using specified delimiter.
|
||||
Return vector with input tensor information
|
||||
*/
|
||||
std::vector<std::string> split(const std::string& s, char delim);
|
||||
|
||||
|
||||
/**
|
||||
* @brief Parse input shapes for model reshape
|
||||
*/
|
||||
std::map<std::string, ov::PartialShape> parseReshapeShapes(const std::string& shapeString);
|
||||
|
||||
|
||||
/**
|
||||
* @brief Parse data shapes for model
|
||||
*/
|
||||
std::map<std::string, std::vector<size_t>> parseDataShapes(const std::string& shapeString);
|
||||
|
||||
|
||||
/**
|
||||
* @brief Reshape blobs with dynamic shapes with static information from data shape
|
||||
*/
|
||||
void setStaticShapesBlobs(InferenceEngine::InferRequest inferRequest,
|
||||
const InferenceEngine::ConstInputsDataMap& inputsInfo,
|
||||
std::map<std::string, std::vector<size_t>> dataShape);
|
||||
@@ -62,7 +62,7 @@ def prepare_executable_cmd(args: dict):
|
||||
str(args["executable"].resolve(strict=True)),
|
||||
"-m", str(args["model"].resolve(strict=True)),
|
||||
"-d", args["device"],
|
||||
"-c" if args["model_cache"] else "",
|
||||
"-c" if args["model_cache"] else ""
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,10 @@
|
||||
#include <iostream>
|
||||
|
||||
#include "common_utils.h"
|
||||
#include "reshape_utils.h"
|
||||
#include "timetests_helper/timer.h"
|
||||
#include "timetests_helper/utils.h"
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
|
||||
@@ -16,14 +18,23 @@ using namespace InferenceEngine;
|
||||
* main(). The function should not throw any exceptions and responsible for
|
||||
* handling it by itself.
|
||||
*/
|
||||
int runPipeline(const std::string &model, const std::string &device, const bool isCacheEnabled) {
|
||||
auto pipeline = [](const std::string &model, const std::string &device, const bool isCacheEnabled) {
|
||||
int runPipeline(const std::string &model, const std::string &device, const bool isCacheEnabled,
|
||||
std::map<std::string, ov::PartialShape> reshapeShapes,
|
||||
std::map<std::string, std::vector<size_t>> dataShapes) {
|
||||
auto pipeline = [](const std::string &model, const std::string &device, const bool isCacheEnabled,
|
||||
std::map<std::string, ov::PartialShape> reshapeShapes,
|
||||
std::map<std::string, std::vector<size_t>> dataShapes) {
|
||||
Core ie;
|
||||
CNNNetwork cnnNetwork;
|
||||
ExecutableNetwork exeNetwork;
|
||||
InferRequest inferRequest;
|
||||
size_t batchSize = 0;
|
||||
|
||||
bool reshape = false;
|
||||
if (!reshapeShapes.empty()) {
|
||||
reshape = true;
|
||||
}
|
||||
|
||||
// first_inference_latency = time_to_inference + first_inference
|
||||
{
|
||||
SCOPED_TIMER(time_to_inference);
|
||||
@@ -48,6 +59,12 @@ int runPipeline(const std::string &model, const std::string &device, const bool
|
||||
cnnNetwork = ie.ReadNetwork(model);
|
||||
batchSize = cnnNetwork.getBatchSize();
|
||||
}
|
||||
if (reshape) {
|
||||
{
|
||||
SCOPED_TIMER(reshape);
|
||||
cnnNetwork.reshape(reshapeShapes);
|
||||
}
|
||||
}
|
||||
{
|
||||
SCOPED_TIMER(load_network);
|
||||
exeNetwork = ie.LoadNetwork(cnnNetwork, device);
|
||||
@@ -65,16 +82,23 @@ int runPipeline(const std::string &model, const std::string &device, const bool
|
||||
SCOPED_TIMER(first_inference);
|
||||
{
|
||||
SCOPED_TIMER(fill_inputs);
|
||||
|
||||
const InferenceEngine::ConstInputsDataMap inputsInfo(exeNetwork.GetInputsInfo());
|
||||
batchSize = batchSize != 0 ? batchSize : 1;
|
||||
fillBlobs(inferRequest, inputsInfo, batchSize);
|
||||
|
||||
if (reshape) {
|
||||
setStaticShapesBlobs(inferRequest, inputsInfo, dataShapes);
|
||||
fillBlobs(inferRequest, inputsInfo, batchSize);
|
||||
} else {
|
||||
fillBlobs(inferRequest, inputsInfo, batchSize);
|
||||
}
|
||||
}
|
||||
inferRequest.Infer();
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
pipeline(model, device, isCacheEnabled);
|
||||
pipeline(model, device, isCacheEnabled, reshapeShapes, dataShapes);
|
||||
} catch (const InferenceEngine::Exception &iex) {
|
||||
std::cerr
|
||||
<< "Inference Engine pipeline failed with Inference Engine exception:\n"
|
||||
@@ -89,4 +113,4 @@ int runPipeline(const std::string &model, const std::string &device, const bool
|
||||
return 3;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -12,4 +12,4 @@ add_subdirectory(${OpenVINO_SOURCE_DIR}/thirdparty/gflags
|
||||
${CMAKE_CURRENT_BINARY_DIR}/gflags_build
|
||||
EXCLUDE_FROM_ALL)
|
||||
|
||||
target_link_libraries(${TARGET_NAME} gflags)
|
||||
target_link_libraries(${TARGET_NAME} gflags tests_shared_lib)
|
||||
|
||||
@@ -32,6 +32,16 @@ static const char model_cache_message[] =
|
||||
"Not required. Use this key to run timetests with models caching. \n"
|
||||
"TimeInfer executable should be run twice - the second run will use cache prepared from first run.";
|
||||
|
||||
/// @brief message for shapes argument
|
||||
static const char reshape_shapes_message[] =
|
||||
"Not required. Use this key to run timetests with reshape. \n"
|
||||
"Example: 'input:1..2 3 100 100'. Use '&' delimiter for several inputs. Example: 'input1:1..2 100&input2:1..2 100' ";
|
||||
|
||||
/// @brief message for shapes argument
|
||||
static const char data_shapes_message[] =
|
||||
"Not required. Use this key to run timetests with reshape. Used with 'reshape_shapes' arg. \n"
|
||||
"Only static shapes for data. Example: 'input:1 3 100 100'. Use '&' delimiter for several inputs. Example: 'input1:1 100&input2:1 100' ";
|
||||
|
||||
/// @brief message for statistics path argument
|
||||
static const char statistics_path_message[] =
|
||||
"Required. Path to a file to write statistics.";
|
||||
@@ -50,6 +60,14 @@ DEFINE_string(m, "", model_message);
|
||||
/// It is a required parameter
|
||||
DEFINE_string(d, "", target_device_message);
|
||||
|
||||
/// @brief Define parameter for set shapes to reshape function <br>
|
||||
/// It is a non-required parameter
|
||||
DEFINE_string(reshape_shapes, "", reshape_shapes_message);
|
||||
|
||||
/// @brief Define parameter for set shapes of the network data <br>
|
||||
/// It is a non-required parameter
|
||||
DEFINE_string(data_shapes, "", data_shapes_message);
|
||||
|
||||
/// @brief Define parameter for set CPU models caching <br>
|
||||
/// It is a non-required parameter
|
||||
DEFINE_bool(c, false, model_cache_message);
|
||||
@@ -66,9 +84,11 @@ static void showUsage() {
|
||||
std::cout << "TimeInfer [OPTION]" << std::endl;
|
||||
std::cout << "Options:" << std::endl;
|
||||
std::cout << std::endl;
|
||||
std::cout << " -h, --help " << help_message << std::endl;
|
||||
std::cout << " -m \"<path>\" " << model_message << std::endl;
|
||||
std::cout << " -d \"<device>\" " << target_device_message << std::endl;
|
||||
std::cout << " -s \"<path>\" " << statistics_path_message << std::endl;
|
||||
std::cout << " -c " << model_cache_message << std::endl;
|
||||
}
|
||||
std::cout << " -h, --help " << help_message << std::endl;
|
||||
std::cout << " -m \"<path>\" " << model_message << std::endl;
|
||||
std::cout << " -d \"<device>\" " << target_device_message << std::endl;
|
||||
std::cout << " -s \"<path>\" " << statistics_path_message << std::endl;
|
||||
std::cout << " -c " << model_cache_message << std::endl;
|
||||
std::cout << " -reshape_shapes " << reshape_shapes_message << std::endl;
|
||||
std::cout << " -data_shapes " << data_shapes_message << std::endl;
|
||||
}
|
||||
@@ -4,11 +4,15 @@
|
||||
|
||||
#include "cli.h"
|
||||
#include "statistics_writer.h"
|
||||
#include "reshape_utils.h"
|
||||
#include "timetests_helper/timer.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
int runPipeline(const std::string &model, const std::string &device, const bool isCacheEnabled);
|
||||
|
||||
int runPipeline(const std::string &model, const std::string &device, const bool isCacheEnabled,
|
||||
std::map<std::string, ov::PartialShape> reshapeShapes,
|
||||
std::map<std::string, std::vector<size_t>> dataShapes);
|
||||
|
||||
/**
|
||||
* @brief Parses command line and check required arguments
|
||||
@@ -32,15 +36,20 @@ bool parseAndCheckCommandLine(int argc, char **argv) {
|
||||
throw std::logic_error(
|
||||
"Statistics file path is required but not set. Please set -s option.");
|
||||
|
||||
if (!FLAGS_reshape_shapes.empty() && FLAGS_data_shapes.empty())
|
||||
throw std::logic_error(
|
||||
"Data shapes is required for reshape shapes argument. Please set -data_shapes option.");
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Function calls `runPipeline` with mandatory time tracking of full run
|
||||
*/
|
||||
int _runPipeline() {
|
||||
int _runPipeline(std::map<std::string, ov::PartialShape> dynamicShapes,
|
||||
std::map<std::string, std::vector<size_t>> staticShapes) {
|
||||
SCOPED_TIMER(full_run);
|
||||
return runPipeline(FLAGS_m, FLAGS_d, FLAGS_c);
|
||||
return runPipeline(FLAGS_m, FLAGS_d, FLAGS_c, dynamicShapes, staticShapes);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -50,8 +59,11 @@ int main(int argc, char **argv) {
|
||||
if (!parseAndCheckCommandLine(argc, argv))
|
||||
return -1;
|
||||
|
||||
auto status = _runPipeline();
|
||||
auto dynamicShapes = parseReshapeShapes(FLAGS_reshape_shapes);
|
||||
auto staticShapes = parseDataShapes(FLAGS_data_shapes);
|
||||
|
||||
auto status = _runPipeline(dynamicShapes, staticShapes);
|
||||
StatisticsWriter::Instance().setFile(FLAGS_s);
|
||||
StatisticsWriter::Instance().write();
|
||||
return status;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user