Adds first inference time measurements in benchmark_app (#1487)

This commit is contained in:
Nadezhda Ageeva
2020-07-27 16:45:07 +03:00
committed by GitHub
parent 5ff59eb711
commit 40d597c313
3 changed files with 23 additions and 6 deletions

View File

@@ -513,13 +513,19 @@ int main(int argc, char *argv[]) {
if (!inferRequest) {
THROW_IE_EXCEPTION << "No idle Infer Requests!";
}
if (FLAGS_api == "sync") {
inferRequest->infer();
} else {
inferRequest->startAsync();
}
inferRequestsQueue.waitAll();
auto duration_ms = double_to_string(inferRequestsQueue.getLatencies()[0]);
slog::info << "First inference took " << duration_ms << " ms" << slog::endl;
if (statistics)
statistics->addParameters(StatisticsReport::Category::EXECUTION_RESULTS,
{
{"first inference time (ms)", duration_ms}
});
inferRequestsQueue.resetTimes();
auto startTime = Time::now();

View File

@@ -85,18 +85,22 @@ class Benchmark:
self.nireq = len(exe_network.requests)
return exe_network
def infer(self, exe_network, batch_size, progress_bar=None):
progress_count = 0
infer_requests = exe_network.requests
def first_infer(self, exe_network):
infer_request = exe_network.requests[0]
# warming up - out of scope
if self.api_type == 'sync':
infer_requests[0].infer()
infer_request.infer()
else:
infer_requests[0].async_infer()
infer_request.async_infer()
status = exe_network.wait()
if status != StatusCode.OK:
raise Exception("Wait for all requests is failed with status code {}!".format(status))
return infer_request.latency
def infer(self, exe_network, batch_size, progress_bar=None):
progress_count = 0
infer_requests = exe_network.requests
start_time = datetime.utcnow()
exec_time = 0

View File

@@ -299,6 +299,13 @@ def run(args):
progress_bar = ProgressBar(progress_bar_total_count, args.stream_output, args.progress) if args.progress else None
duration_ms = "{:.2f}".format(benchmark.first_infer(exe_network))
logger.info("First inference took {} ms".format(duration_ms))
if statistics:
statistics.add_parameters(StatisticsReport.Category.EXECUTION_RESULTS,
[
('first inference time (ms)', duration_ms)
])
fps, latency_ms, total_duration_sec, iteration = benchmark.infer(exe_network, batch_size, progress_bar)
# ------------------------------------ 11. Dumping statistics report -------------------------------------------