[tools] correct options for dynamic models (#9010)
* process dynamic model with one data_shape as static * static reshaped to dynamic with one data shape can't be mesuared as static
This commit is contained in:
parent
86383d07e3
commit
ebba28a3e3
@ -5,6 +5,8 @@ import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from openvino.runtime import Dimension
|
||||
|
||||
from openvino.tools.benchmark.benchmark import Benchmark
|
||||
from openvino.tools.benchmark.parameters import parse_args
|
||||
from openvino.tools.benchmark.utils.constants import MULTI_DEVICE_NAME, HETERO_DEVICE_NAME, CPU_DEVICE_NAME, \
|
||||
@ -15,8 +17,8 @@ from openvino.tools.benchmark.utils.progress_bar import ProgressBar
|
||||
from openvino.tools.benchmark.utils.utils import next_step, get_number_iterations, pre_post_processing, \
|
||||
process_help_inference_string, print_perf_counters, dump_exec_graph, get_duration_in_milliseconds, \
|
||||
get_command_line_arguments, parse_nstreams_value_per_device, parse_devices, get_inputs_info, \
|
||||
print_inputs_and_outputs_info, get_batch_size, load_config, dump_config, get_latency_groups, \
|
||||
check_for_static
|
||||
print_inputs_and_outputs_info, get_network_batch_size, load_config, dump_config, get_latency_groups, \
|
||||
check_for_static, can_measure_as_static
|
||||
from openvino.tools.benchmark.utils.statistics_report import StatisticsReport, averageCntReport, detailedCntReport
|
||||
|
||||
|
||||
@ -225,9 +227,7 @@ def run(args):
|
||||
('load network time (ms)', duration_ms)
|
||||
])
|
||||
app_inputs_info, _ = get_inputs_info(args.shape, args.data_shape, args.layout, args.batch_size, args.input_scale, args.input_mean, exe_network.get_runtime_function().get_parameters())
|
||||
batch_size = get_batch_size(app_inputs_info)
|
||||
if batch_size.is_dynamic and benchmark.api_type == 'sync':
|
||||
raise Exception("Dynamic batch size is supported only in async mode")
|
||||
batch_size = get_network_batch_size(app_inputs_info)
|
||||
elif not is_network_compiled:
|
||||
# --------------------- 4. Read the Intermediate Representation of the network -----------------------------
|
||||
next_step()
|
||||
@ -262,10 +262,7 @@ def run(args):
|
||||
])
|
||||
|
||||
# use batch size according to provided layout and shapes
|
||||
batch_size = get_batch_size(app_inputs_info)
|
||||
if batch_size.is_dynamic and benchmark.api_type == 'sync':
|
||||
raise Exception("Dynamic batch size is supported only in async mode")
|
||||
|
||||
batch_size = get_network_batch_size(app_inputs_info)
|
||||
logger.info(f'Network batch size: {batch_size}')
|
||||
|
||||
# --------------------- 6. Configuring inputs and outputs of the model --------------------------------------------------
|
||||
@ -307,10 +304,7 @@ def run(args):
|
||||
('import network time (ms)', duration_ms)
|
||||
])
|
||||
app_inputs_info, _ = get_inputs_info(args.shape, args.data_shape, args.layout, args.batch_size, args.input_scale, args.input_mean, exe_network.get_runtime_function().get_parameters())
|
||||
batch_size = get_batch_size(app_inputs_info)
|
||||
if batch_size.is_dynamic and benchmark.api_type == 'sync':
|
||||
raise Exception("Dynamic batch size is supported only in async mode")
|
||||
|
||||
batch_size = get_network_batch_size(app_inputs_info)
|
||||
|
||||
# --------------------- 8. Querying optimal runtime parameters --------------------------------------------------
|
||||
next_step()
|
||||
@ -353,7 +347,8 @@ def run(args):
|
||||
data_queue = get_input_data(paths_to_input, app_inputs_info)
|
||||
|
||||
static_mode = check_for_static(app_inputs_info)
|
||||
if not static_mode and benchmark.api_type == 'sync':
|
||||
allow_inference_only_or_sync = can_measure_as_static(app_inputs_info)
|
||||
if not allow_inference_only_or_sync and benchmark.api_type == 'sync':
|
||||
raise Exception("Benchmarking of the model with dynamic shapes is available for async API only."
|
||||
"Please use -api async -nstreams 1 -nireq 1 to emulate sync behavior.")
|
||||
|
||||
@ -362,9 +357,13 @@ def run(args):
|
||||
benchmark.inference_only = True
|
||||
else:
|
||||
benchmark.inference_only = False
|
||||
elif benchmark.inference_only and not static_mode:
|
||||
elif benchmark.inference_only and not allow_inference_only_or_sync:
|
||||
raise Exception("Benchmarking dynamic model available with input filling in measurement loop only!")
|
||||
|
||||
# update batch size in case dynamic network with one data_shape
|
||||
if benchmark.inference_only and batch_size.is_dynamic:
|
||||
batch_size = Dimension(data_queue.batch_sizes[data_queue.current_group_id])
|
||||
|
||||
benchmark.latency_groups = get_latency_groups(app_inputs_info)
|
||||
|
||||
if len(benchmark.latency_groups) > 1:
|
||||
|
@ -236,11 +236,17 @@ def get_duration_in_secs(target_device):
|
||||
|
||||
|
||||
def check_for_static(app_input_info):
|
||||
is_static = True
|
||||
for info in app_input_info:
|
||||
if info.is_dynamic:
|
||||
return False
|
||||
return is_static
|
||||
return True
|
||||
|
||||
|
||||
def can_measure_as_static(app_input_info):
|
||||
for info in app_input_info:
|
||||
if info.is_dynamic and (len(info.shapes) > 1 or info.original_shape.is_static):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def parse_devices(device_string):
|
||||
@ -428,6 +434,7 @@ class AppInputInfo:
|
||||
def __init__(self):
|
||||
self.element_type = None
|
||||
self.layout = Layout()
|
||||
self.original_shape = None
|
||||
self.partial_shape = None
|
||||
self.data_shapes = []
|
||||
self.scale = []
|
||||
@ -550,6 +557,7 @@ def get_inputs_info(shape_string, data_shape_string, layout_string, batch_size,
|
||||
# Input name
|
||||
info.name = input_names[i]
|
||||
# Shape
|
||||
info.original_shape = parameters[i].get_partial_shape()
|
||||
if info.name in shape_map.keys():
|
||||
info.partial_shape = parse_partial_shape(shape_map[info.name])
|
||||
reshape = True
|
||||
@ -625,7 +633,7 @@ def get_inputs_info(shape_string, data_shape_string, layout_string, batch_size,
|
||||
return input_info, reshape
|
||||
|
||||
|
||||
def get_batch_size(inputs_info):
|
||||
def get_network_batch_size(inputs_info):
|
||||
null_dimension = Dimension(0)
|
||||
batch_size = null_dimension
|
||||
for info in inputs_info:
|
||||
|
Loading…
Reference in New Issue
Block a user