Publishing 2019 R2 content (#223)

This commit is contained in:
Alexey Suhov
2019-08-09 19:02:42 +03:00
committed by openvino-pushbot
parent c585b530c1
commit ba6e22b1b5
3578 changed files with 289053 additions and 165277 deletions

View File

@@ -0,0 +1,6 @@
# Copyright (C) 2019 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
ie_add_sample(NAME hello_query_device
SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/main.cpp")

View File

@@ -0,0 +1,56 @@
# Hello Query Device C++ Sample
This topic demonstrates how to run the Hello Query Device sample application, which queries Inference Engine devices and prints their metrics and default configuration values. The sample shows how to use [Query Device API feature](./docs/IE_DG/QueryDeviceAPI.md).
> **NOTE:** This topic describes usage of C++ implementation of the Query Device Sample.
> For the Python* implementation, refer to [Hello Query Device Python* Sample](./inference-engine/ie_brudges/python/sample/hello_query_device/README.md)
## Running
To see quired information, run the following:
```sh
./hello_query_device
```
## Sample Output
The application prints all available devices with their supported metrics and default values for configuration parameters:
```
Available devices:
Device: CPU
Metrics:
AVAILABLE_DEVICES : [ 0 ]
SUPPORTED_METRICS : [ AVAILABLE_DEVICES SUPPORTED_METRICS FULL_DEVICE_NAME OPTIMIZATION_CAPABILITIES SUPPORTED_CONFIG_KEYS RANGE_FOR_ASYNC_INFER_REQUESTS RANGE_FOR_STREAMS ]
FULL_DEVICE_NAME : Intel(R) Core(TM) i7-8700 CPU @ 3.20GHz
OPTIMIZATION_CAPABILITIES : [ WINOGRAD FP32 INT8 BIN ]
SUPPORTED_CONFIG_KEYS : [ CPU_BIND_THREAD CPU_THREADS_NUM CPU_THROUGHPUT_STREAMS DUMP_EXEC_GRAPH_AS_DOT DYN_BATCH_ENABLED DYN_BATCH_LIMIT EXCLUSIVE_ASYNC_REQUESTS PERF_COUNT ]
...
Default values for device configuration keys:
CPU_BIND_THREAD : YES
CPU_THREADS_NUM : 0
CPU_THROUGHPUT_STREAMS : 1
DUMP_EXEC_GRAPH_AS_DOT : ""
DYN_BATCH_ENABLED : NO
DYN_BATCH_LIMIT : 0
EXCLUSIVE_ASYNC_REQUESTS : NO
PERF_COUNT : NO
Device: FPGA
Metrics:
AVAILABLE_DEVICES : [ 0 ]
SUPPORTED_METRICS : [ AVAILABLE_DEVICES SUPPORTED_METRICS SUPPORTED_CONFIG_KEYS FULL_DEVICE_NAME OPTIMIZATION_CAPABILITIES RANGE_FOR_ASYNC_INFER_REQUESTS ]
SUPPORTED_CONFIG_KEYS : [ DEVICE_ID PERF_COUNT EXCLUSIVE_ASYNC_REQUESTS DLIA_IO_TRANSFORMATIONS_NATIVE DLIA_ARCH_ROOT_DIR DLIA_PERF_ESTIMATION ]
FULL_DEVICE_NAME : a10gx_2ddr : Intel Vision Accelerator Design with Intel Arria 10 FPGA (acla10_1150_sg10)
OPTIMIZATION_CAPABILITIES : [ FP16 ]
RANGE_FOR_ASYNC_INFER_REQUESTS : { 2, 5, 1 }
Default values for device configuration keys:
DEVICE_ID : [ 0 ]
PERF_COUNT : true
EXCLUSIVE_ASYNC_REQUESTS : false
DLIA_IO_TRANSFORMATIONS_NATIVE : false
DLIA_PERF_ESTIMATION : true
```
## See Also
* [Using Inference Engine Samples](./docs/IE_DG/Samples_Overview.md)
* [Model Downloader](https://github.com/opencv/open_model_zoo/tree/2018/model_downloader)
* [Model Optimizer](./docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md)

View File

@@ -0,0 +1,113 @@
// Copyright (C) 2018-2019 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <iomanip>
#include <vector>
#include <memory>
#include <string>
#include <tuple>
#include <cstdlib>
#include <samples/common.hpp>
#include <inference_engine.hpp>
using namespace InferenceEngine;
namespace {
template <typename T>
std::ostream & operator << (std::ostream & stream, const std::vector<T> & v) {
stream << "[ ";
for (auto && value : v)
stream << value << " ";
return stream << "]";
}
void printParameterValue(const Parameter & value) {
if (value.is<bool>()) {
std::cout << std::boolalpha << value.as<bool>() << std::noboolalpha << std::endl;
} else if (value.is<int>()) {
std::cout << value.as<int>() << std::endl;
} else if (value.is<unsigned int>()) {
std::cout << value.as<unsigned int>() << std::endl;
} else if (value.is<float>()) {
std::cout << value.as<float>() << std::endl;
} else if (value.is<std::string>()) {
std::string stringValue = value.as<std::string>();
std::cout << (stringValue.empty() ? "\"\"" : stringValue) << std::endl;
} else if (value.is<std::vector<std::string> >()) {
std::cout << value.as<std::vector<std::string> >() << std::endl;
} else if (value.is<std::vector<int> >()) {
std::cout << value.as<std::vector<int> >() << std::endl;
} else if (value.is<std::vector<float> >()) {
std::cout << value.as<std::vector<float> >() << std::endl;
} else if (value.is<std::vector<unsigned int> >()) {
std::cout << value.as<std::vector<unsigned int> >() << std::endl;
} else if (value.is<std::tuple<unsigned int, unsigned int, unsigned int> >()) {
auto values = value.as<std::tuple<unsigned int, unsigned int, unsigned int> >();
std::cout << "{ ";
std::cout << std::get<0>(values) << ", ";
std::cout << std::get<1>(values) << ", ";
std::cout << std::get<2>(values);
std::cout << " }";
std::cout << std::endl;
} else if (value.is<std::tuple<unsigned int, unsigned int> >()) {
auto values = value.as<std::tuple<unsigned int, unsigned int> >();
std::cout << "{ ";
std::cout << std::get<0>(values) << ", ";
std::cout << std::get<1>(values);
std::cout << " }";
std::cout << std::endl;
} else {
std::cout << "UNSUPPORTED TYPE" << std::endl;
}
}
} // namespace
int main(int argc, char *argv[]) {
try {
// ------------------------------ Parsing and validation of input args ---------------------------------
if (argc != 1) {
std::cout << "Usage : ./hello_query_device" << std::endl;
return EXIT_FAILURE;
}
// --------------------------- 1. Load Inference engine instance -------------------------------------
Core ie;
// --------------------------- 2. Get list of available devices -------------------------------------
std::vector<std::string> availableDevices = ie.GetAvailableDevices();
// --------------------------- 3. Query and print supported metrics and config keys--------------------
std::cout << "Available devices: " << std::endl;
for (auto && device : availableDevices) {
std::cout << "\tDevice: " << device << std::endl;
std::cout << "\tMetrics: " << std::endl;
std::vector<std::string> supportedMetrics = ie.GetMetric(device, METRIC_KEY(SUPPORTED_METRICS));
for (auto && metricName : supportedMetrics) {
std::cout << "\t\t" << metricName << " : " << std::flush;
printParameterValue(ie.GetMetric(device, metricName));
}
std::cout << "\tDefault values for device configuration keys: " << std::endl;
std::vector<std::string> supportedConfigKeys = ie.GetMetric(device, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
for (auto && configKey : supportedConfigKeys) {
std::cout << "\t\t" << configKey << " : " << std::flush;
printParameterValue(ie.GetConfig(device, configKey));
}
std::cout << std::endl;
}
} catch (const std::exception & ex) {
std::cerr << ex.what() << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}