diff --git a/samples/cpp/speech_sample/README.md b/samples/cpp/speech_sample/README.md index 0b5f51e8a20..152d12d1cdb 100644 --- a/samples/cpp/speech_sample/README.md +++ b/samples/cpp/speech_sample/README.md @@ -59,6 +59,7 @@ Several execution modes are supported via the `-d` flag: - `CPU` - All calculation are performed on CPU device using CPU Plugin. - `GPU` - All calculation are performed on GPU device using GPU Plugin. - `MYRIAD` - All calculation are performed on IntelĀ® Neural Compute Stick 2 device using VPU MYRIAD Plugin. +- `VPUX` - All calculation are performed on VPUX device using VPUX Plugin. - `GNA_AUTO` - GNA hardware is used if available and the driver is installed. Otherwise, the GNA device is emulated in fast-but-not-bit-exact mode. - `GNA_HW` - GNA hardware is used if available and the driver is installed. Otherwise, an error will occur. - `GNA_SW` - Deprecated. The GNA device is emulated in fast-but-not-bit-exact mode. @@ -102,7 +103,7 @@ Options: -i "" Required. Paths to input file or Layers names with corresponding paths to the input files. Example of usage for single file: or . Example of usage for named layers: =,=. -m "" Required. Path to an .xml file with a trained model (required if -rg is missing). -o "" Optional. Output file name to save scores or Layer names with corresponding files names to save scores. Example of usage for single file: or . Example of usage for named layers: Example of usage for named layers: =,=. - -d "" Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, GNA_HW_WITH_SW_FBACK, GNA_SW_FP32, GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU as a secondary (e.g. HETERO:GNA,CPU) are supported. The sample will look for a suitable plugin for device specified. + -d "" Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, VPUX, GNA_AUTO, GNA_HW, GNA_HW_WITH_SW_FBACK, GNA_SW_FP32, GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU as a secondary (e.g. HETERO:GNA,CPU) are supported. The sample will look for a suitable plugin for device specified. -pc Optional. Enables per-layer performance report. -q "" Optional. Input quantization mode: static (default), dynamic, or user (use with -sf). -qb "" Optional. Weight bits for quantization: 8 or 16 (default) @@ -119,7 +120,7 @@ Options: -exec_target "" Optional. Specify GNA execution target generation. May be one of GNA_TARGET_2_0, GNA_TARGET_3_0. By default, generation corresponds to the GNA HW available in the system or the latest fully supported generation by the software. See the GNA Plugin's GNA_EXEC_TARGET config option description. -compile_target "" Optional. Specify GNA compile target generation. May be one of GNA_TARGET_2_0, GNA_TARGET_3_0. By default, generation corresponds to the GNA HW available in the system or the latest fully supported generation by the software. See the GNA Plugin's GNA_COMPILE_TARGET config option description. -Available target devices: CPU GNA GPU +Available target devices: CPU GNA GPU VPUX ``` ### Model Preparation diff --git a/samples/cpp/speech_sample/speech_sample.hpp b/samples/cpp/speech_sample/speech_sample.hpp index 8ca4fc806ff..3d6d6493e71 100644 --- a/samples/cpp/speech_sample/speech_sample.hpp +++ b/samples/cpp/speech_sample/speech_sample.hpp @@ -23,7 +23,7 @@ static const char model_message[] = "Required. Path to an .xml file with a train /// @brief message for assigning calculation to device static const char target_device_message[] = - "Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, " + "Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, VPUX, GNA_AUTO, GNA_HW, " "GNA_HW_WITH_SW_FBACK, GNA_SW_FP32, " "GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU" " as a secondary (e.g. HETERO:GNA,CPU) are supported. " @@ -272,7 +272,8 @@ bool parse_and_check_command_line(int argc, char* argv[]) { "HETERO:GNA_HW,CPU", "HETERO:GNA_SW_EXACT,CPU", "HETERO:GNA_SW_FP32,CPU", - "MYRIAD"}; + "MYRIAD", + "VPUX"}; if (std::find(supportedDevices.begin(), supportedDevices.end(), FLAGS_d) == supportedDevices.end()) { throw std::logic_error("Specified device is not supported."); diff --git a/samples/python/speech_sample/README.md b/samples/python/speech_sample/README.md index ce617b1d1ed..ddc8d1b00bb 100644 --- a/samples/python/speech_sample/README.md +++ b/samples/python/speech_sample/README.md @@ -58,6 +58,7 @@ Several execution modes are supported via the `-d` flag: - `CPU` - All calculation are performed on CPU device using CPU Plugin. - `GPU` - All calculation are performed on GPU device using GPU Plugin. - `MYRIAD` - All calculation are performed on IntelĀ® Neural Compute Stick 2 device using VPU MYRIAD Plugin. +- `VPUX` - All calculation are performed on VPUX device using VPUX Plugin. - `GNA_AUTO` - GNA hardware is used if available and the driver is installed. Otherwise, the GNA device is emulated in fast-but-not-bit-exact mode. - `GNA_HW` - GNA hardware is used if available and the driver is installed. Otherwise, an error will occur. - `GNA_SW` - Deprecated. The GNA device is emulated in fast-but-not-bit-exact mode. @@ -108,7 +109,7 @@ Options: -r REFERENCE, --reference REFERENCE Optional. Read reference score file and compare scores. -d DEVICE, --device DEVICE - Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, GNA_SW_FP32, + Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, VPUX, GNA_AUTO, GNA_HW, GNA_SW_FP32, GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU as a secondary (e.g. HETERO:GNA,CPU) are supported. The sample will look for a suitable plugin for device specified. Default value is CPU. diff --git a/samples/python/speech_sample/arg_parser.py b/samples/python/speech_sample/arg_parser.py index f7c9e167a45..6d4b29ceeca 100644 --- a/samples/python/speech_sample/arg_parser.py +++ b/samples/python/speech_sample/arg_parser.py @@ -25,7 +25,7 @@ def build_arg_parser() -> argparse.ArgumentParser: help='Optional. Read reference score file and compare scores.') args.add_argument('-d', '--device', default='CPU', type=str, help='Optional. Specify a target device to infer on. ' - 'CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, GNA_SW_FP32, GNA_SW_EXACT and HETERO with combination of GNA' + 'CPU, GPU, MYRIAD, VPUX, GNA_AUTO, GNA_HW, GNA_SW_FP32, GNA_SW_EXACT and HETERO with combination of GNA' ' as the primary device and CPU as a secondary (e.g. HETERO:GNA,CPU) are supported. ' 'The sample will look for a suitable plugin for device specified. Default value is CPU.') args.add_argument('-bs', '--batch_size', type=int, choices=range(1, 9), metavar='[1-8]',