Compare commits
62 Commits
2023.1.0.d
...
2022.3.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9752fafe8e | ||
|
|
3111e2394c | ||
|
|
b2feb56b22 | ||
|
|
7da99de9ae | ||
|
|
756b0de9bf | ||
|
|
1a72966d4e | ||
|
|
4c7c9418a5 | ||
|
|
00d462b1d6 | ||
|
|
21ae66ad5f | ||
|
|
a5c616bc3c | ||
|
|
d33d6b7a6d | ||
|
|
dffe45ebd1 | ||
|
|
bba7e77242 | ||
|
|
5f8d05e342 | ||
|
|
b84161848e | ||
|
|
7904b34679 | ||
|
|
bda7227502 | ||
|
|
f3964bcea0 | ||
|
|
8f13a77a15 | ||
|
|
97ae79fe80 | ||
|
|
ed5e237403 | ||
|
|
7338f4b049 | ||
|
|
288b5326cb | ||
|
|
6f5edfa942 | ||
|
|
ad0ef56665 | ||
|
|
7e1b9353d5 | ||
|
|
ece291a246 | ||
|
|
669d35b3e6 | ||
|
|
12b019e027 | ||
|
|
b07fea664f | ||
|
|
4f9a8757c8 | ||
|
|
12afd97e14 | ||
|
|
9183d96394 | ||
|
|
2d0f8b4f27 | ||
|
|
05d8648854 | ||
|
|
3914fd8aa8 | ||
|
|
4911f80f37 | ||
|
|
53265f0097 | ||
|
|
3dd6b50bff | ||
|
|
9b8ab44500 | ||
|
|
7e87923f0c | ||
|
|
083a17e89e | ||
|
|
67bbc7361c | ||
|
|
0121921068 | ||
|
|
5cfc3cbf5d | ||
|
|
85506f263c | ||
|
|
36c6d58fb2 | ||
|
|
690a10c386 | ||
|
|
bceb7dbfe1 | ||
|
|
57d8e96a30 | ||
|
|
2d072e8129 | ||
|
|
36ab5c2192 | ||
|
|
afa9bdde93 | ||
|
|
970d53b420 | ||
|
|
633fe397a3 | ||
|
|
5c33eff533 | ||
|
|
8680791ffe | ||
|
|
edb3f3e820 | ||
|
|
0eebb124c9 | ||
|
|
cc886509c6 | ||
|
|
a9ad74b690 | ||
|
|
6947916541 |
@@ -32,7 +32,7 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: android_arm64
|
||||
|
||||
@@ -30,13 +30,13 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
- repository: testdata
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/testdata
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Lin
|
||||
|
||||
@@ -32,7 +32,7 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: linux_arm64
|
||||
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
- script: ls -alR $(REPO_DIR)/bin/
|
||||
displayName: 'List bin files ON'
|
||||
# TODO: ebable after the fix on CPU side
|
||||
# - script: |
|
||||
# $(REPO_DIR)/bin/intel64/Release/benchmark_app -niter 1 -nireq 1 -m $(MODELS_PATH)/models/test_model/test_model_fp32.xml -d CPU
|
||||
# workingDirectory: $(REPO_DIR)
|
||||
# displayName: 'Use OpenVINO after CC'
|
||||
- script: |
|
||||
$(REPO_DIR)/bin/intel64/Release/benchmark_app -niter 1 -nireq 1 -m $(MODELS_PATH)/models/test_model/test_model_fp32.xml -d CPU
|
||||
workingDirectory: $(REPO_DIR)
|
||||
displayName: 'Use OpenVINO after CC'
|
||||
|
||||
@@ -4,7 +4,7 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Lin
|
||||
|
||||
@@ -30,13 +30,13 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
- repository: testdata
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/testdata
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Lin_Debian
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# type: github
|
||||
# endpoint: openvinotoolkit
|
||||
# name: openvinotoolkit/testdata
|
||||
# ref: master
|
||||
# ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Lin_lohika
|
||||
|
||||
@@ -30,13 +30,13 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
- repository: testdata
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/testdata
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Mac
|
||||
|
||||
@@ -30,13 +30,13 @@ resources:
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/openvino_contrib
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
- repository: testdata
|
||||
type: github
|
||||
endpoint: openvinotoolkit
|
||||
name: openvinotoolkit/testdata
|
||||
ref: master
|
||||
ref: releases/2022/3
|
||||
|
||||
jobs:
|
||||
- job: Win
|
||||
|
||||
@@ -318,7 +318,7 @@ macro(ov_cpack_settings)
|
||||
# Samples
|
||||
#
|
||||
|
||||
set(samples_build_deps "cmake, g++, gcc, libc6-dev, make")
|
||||
set(samples_build_deps "cmake, g++, gcc, libc6-dev, make, pkg-config")
|
||||
set(samples_build_deps_suggest "libopencv-core-dev, libopencv-imgproc-dev, libopencv-imgcodecs-dev")
|
||||
if(OV_GLIBC_VERSION VERSION_LESS_EQUAL 2.27)
|
||||
# Ubuntu 18.04, Debian 9 cases
|
||||
|
||||
@@ -277,7 +277,7 @@ macro(ov_cpack_settings)
|
||||
# Samples
|
||||
#
|
||||
|
||||
set(samples_build_deps "cmake3, gcc-c++, gcc, glibc-devel, make")
|
||||
set(samples_build_deps "cmake3, gcc-c++, gcc, glibc-devel, make, pkgconf-pkg-config")
|
||||
set(samples_build_deps_suggest "opencv-devel >= 3.0")
|
||||
|
||||
# c_samples / cpp_samples
|
||||
|
||||
@@ -17,6 +17,6 @@ Description: OpenVINO™ Toolkit
|
||||
URL: https://docs.openvino.ai/latest/index.html
|
||||
Version: @OpenVINO_VERSION@
|
||||
Conflicts: openvino < @OpenVINO_VERSION@
|
||||
Cflags: -I${includedir_old} -I${includedir_new}
|
||||
Cflags: -I${includedir_old} -I${includedir_new} @PKGCONFIG_OpenVINO_DEFINITIONS@
|
||||
Libs: -L${libdir} @PKGCONFIG_OpenVINO_FRONTENDS@ -lopenvino_c -lopenvino @PKGCONFIG_OpenVINO_PRIVATE_DEPS@
|
||||
Libs.private: -ldl -lm -lpthread -lrt
|
||||
|
||||
@@ -1,11 +1,36 @@
|
||||
# Introduction to OpenVINO™ Deployment {#openvino_docs_deployment_guide_introduction}
|
||||
# OpenVINO™ Deployment {#openvino_docs_deployment_guide_introduction}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
Deploy via OpenVINO Runtime <openvino_deployment_guide>
|
||||
Deploy via Model Serving <ovms_what_is_openvino_model_server>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
Once you have a model that meets both OpenVINO™ and your requirements, you can choose among several ways of deploying it with your application:
|
||||
Once you have a model that meets both OpenVINO™ and your requirements, you can choose how to deploy it with your application.
|
||||
|
||||
* [Deploy your application locally](../OV_Runtime_UG/deployment/deployment_intro.md).
|
||||
* [Deploy your model with OpenVINO Model Server](@ref ovms_what_is_openvino_model_server).
|
||||
* [Deploy your application for the TensorFlow framework with OpenVINO Integration](./openvino_ecosystem_ovtf.md).
|
||||
@sphinxdirective
|
||||
.. panels::
|
||||
|
||||
`Deploy Locally <openvino_deployment_guide>`_
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Local deployment uses OpenVINO Runtime installed on the device. It utilizes resources available to the system and provides the quickest way of launching inference.
|
||||
|
||||
---
|
||||
|
||||
`Deploy by Model Serving <ovms_what_is_openvino_model_server>`_
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Deployment via OpenVINO Model Server allows the device to connect to the server set up remotely. This way inference uses external resources instead of the ones provided by the device itself.
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
> **NOTE**: Note that [running inference in OpenVINO Runtime](../OV_Runtime_UG/openvino_intro.md) is the most basic form of deployment. Before moving forward, make sure you know how to create a proper Inference configuration.
|
||||
|
||||
Apart from the default deployment options, you may also [deploy your application for the TensorFlow framework with OpenVINO Integration](./openvino_ecosystem_ovtf.md).
|
||||
15
docs/Documentation/media_processing_cv_libraries.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Media Processing and CV Libraries {#media_processing_cv_libraries}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
Intel® Deep Learning Streamer <openvino_docs_dlstreamer>
|
||||
openvino_docs_gapi_gapi_intro
|
||||
OpenCV Developer Guide <https://docs.opencv.org/master/>
|
||||
OpenCL™ Developer Guide <https://software.intel.com/en-us/openclsdk-devguide>
|
||||
OneVPL Developer Guide <https://www.intel.com/content/www/us/en/developer/articles/release-notes/oneapi-video-processing-library-release-notes.html>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
@@ -1,4 +1,16 @@
|
||||
# Introduction to Model Processing {#openvino_docs_model_processing_introduction}
|
||||
# Model Preparation {#openvino_docs_model_processing_introduction}
|
||||
|
||||
@sphinxdirective
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
Supported_Model_Formats
|
||||
openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide
|
||||
omz_tools_downloader
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
Every deep learning workflow begins with obtaining a model. You can choose to prepare a custom one, use a ready-made solution and adjust it to your needs, or even download and run a pre-trained network from an online database, such as OpenVINO's [Open Model Zoo](../model_zoo.md).
|
||||
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# OpenVINO™ Ecosystem Overview {#openvino_ecosystem}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
ovtf_integration
|
||||
ote_documentation
|
||||
ovsa_get_started
|
||||
openvino_inference_engine_tools_compile_tool_README
|
||||
openvino_docs_tuning_utilities
|
||||
workbench_docs_Workbench_DG_Introduction
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
|
||||
OpenVINO™ is not just one tool. It is an expansive ecosystem of utilities, providing a comprehensive workflow for deep learning solution development. Learn more about each of them to reach the full potential of OpenVINO™ Toolkit.
|
||||
|
||||
|
||||
53
docs/Documentation/openvino_workflow.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# OPENVINO Workflow {#openvino_workflow}
|
||||
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
Model Preparation <openvino_docs_model_processing_introduction>
|
||||
Model Optimization and Compression <openvino_docs_model_optimization_guide>
|
||||
Deployment <openvino_docs_deployment_guide_introduction>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
|
||||
THIS IS A PAGE ABOUT THE WORKFLOW
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<div class="section" id="welcome-to-openvino-toolkit-s-documentation">
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="_static/css/homepage_style.css">
|
||||
<div style="clear:both;"> </div>
|
||||
<div id="HP_flow-container">
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_model_processing_introduction.html">
|
||||
<img src="_static/images/OV_flow_model_hvr.svg" alt="link to model processing introduction" />
|
||||
</a>
|
||||
</div>
|
||||
<div class="HP_flow-arrow" >
|
||||
<img src="_static/images/OV_flow_arrow.svg" alt="" />
|
||||
</div>
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_deployment_optimization_guide_dldt_optimization_guide.html">
|
||||
<img src="_static/images/OV_flow_optimization_hvr.svg" alt="link to an optimization guide" />
|
||||
</a>
|
||||
</div>
|
||||
<div class="HP_flow-arrow" >
|
||||
<img src="_static/images/OV_flow_arrow.svg" alt="" />
|
||||
</div>
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_deployment_guide_introduction.html">
|
||||
<img src="_static/images/OV_flow_deployment_hvr.svg" alt="link to deployment introduction" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@endsphinxdirective
|
||||
@@ -1,6 +1,6 @@
|
||||
# How to Implement Custom GPU Operations {#openvino_docs_Extensibility_UG_GPU}
|
||||
|
||||
To enable operations not supported by OpenVINO out of the box, you may need an extension for an OpenVINO operation set, and a custom kernel for the device you will target. This page describes custom kernel support for the GPU device.
|
||||
To enable operations not supported by OpenVINO™ out of the box, you may need an extension for OpenVINO operation set, and a custom kernel for the device you will target. This article describes custom kernel support for the GPU device.
|
||||
|
||||
The GPU codepath abstracts many details about OpenCL. You need to provide the kernel code in OpenCL C and an XML configuration file that connects the kernel and its parameters to the parameters of the operation.
|
||||
|
||||
@@ -30,7 +30,7 @@ $ ./classification_sample -m <path_to_model>/bvlc_alexnet_fp16.xml -i ./validati
|
||||
## Configuration File Format <a name="config-file-format"></a>
|
||||
|
||||
The configuration file is expected to follow the `.xml` file structure
|
||||
with a node of the `CustomLayer` type for every custom operation you provide.
|
||||
with a node of the type `CustomLayer` for every custom operation you provide.
|
||||
|
||||
The definitions described in the sections below use the following notations:
|
||||
|
||||
@@ -43,44 +43,44 @@ Notation | Description
|
||||
|
||||
### CustomLayer Node and Sub-Node Structure
|
||||
|
||||
`CustomLayer` node contains the entire configuration for a single custom operation.
|
||||
The `CustomLayer` node contains the entire configuration for a single custom operation.
|
||||
|
||||
| Attribute Name |\# | Description |
|
||||
|-----|-----|-----|
|
||||
| `name` | (1) | The name of the operation type to be used. This name should be identical to the type used in the IR.|
|
||||
| `type` | (1) | Must be `SimpleGPU`. |
|
||||
| `version` | (1) | Must be `1`. |
|
||||
| `name` | (1) | The name of the operation type to be used. This name should be identical to the type used in the OpenVINO IR.|
|
||||
| `type` | (1) | Must be `SimpleGPU`. |
|
||||
| `version` | (1) | Must be `1`. |
|
||||
|
||||
**Sub-nodes**: `Kernel` (1), `Buffers` (1), `CompilerOptions` (0+),
|
||||
`WorkSizes` (0/1)
|
||||
|
||||
### Kernel Node and Sub-Node Structure
|
||||
|
||||
`Kernel` node contains all kernel source code configuration.
|
||||
The `Kernel` node contains all kernel source code configuration.
|
||||
|
||||
**Sub-nodes**: `Source` (1+), `Define` (0+)
|
||||
|
||||
### Source Node and Sub-Node Structure
|
||||
|
||||
`Source` node points to a single OpenCL source file.
|
||||
The `Source` node points to a single OpenCL source file.
|
||||
|
||||
| Attribute Name | \# |Description|
|
||||
|-----|-----|-----|
|
||||
| `filename` | (1) | Name of the file containing OpenCL source code. Note that the path is relative to your executable. Multiple source nodes will have their sources concatenated in order. |
|
||||
| `filename` | (1) | Name of the file containing OpenCL source code. The path is relative to your executable. Multiple source nodes will have their sources concatenated in order. |
|
||||
|
||||
**Sub-nodes**: None
|
||||
|
||||
### Define Node and Sub-Node Structure
|
||||
|
||||
`Define` node configures a single `#‍define` instruction to be added to
|
||||
The `Define` node configures a single `#‍define` instruction to be added to
|
||||
the sources during compilation (JIT).
|
||||
|
||||
| Attribute Name | \# | Description |
|
||||
|------|-------|------|
|
||||
| `name` | (1) | The name of the defined JIT. For static constants, this can include the value as well, which is taken as a string. |
|
||||
| `param` | (0/1) | This parameter value is used as the value of this JIT definition. |
|
||||
| `type` | (0/1) | The parameter type. Accepted values: `int`, `float`, and `int[]`, `float[]` for arrays. |
|
||||
| `default` | (0/1) | The default value to be used if the specified parameters are missing from the operation in the IR. |
|
||||
| `name` | (1) | The name of the defined JIT. For static constants, this can include the value as well, which is taken as a string. |
|
||||
| `param` | (0/1) | This parameter value is used as the value of this JIT definition. |
|
||||
| `type` | (0/1) | The parameter type. Accepted values: `int`, `float`, and `int[]`, `float[]` for arrays. |
|
||||
| `default` | (0/1) | The default value to be used if the specified parameters are missing from the operation in the OpenVINO IR. |
|
||||
|
||||
**Sub-nodes:** None
|
||||
|
||||
@@ -89,37 +89,37 @@ The resulting JIT has the following form:
|
||||
|
||||
### Buffers Node and Sub-Node Structure
|
||||
|
||||
`Buffers` node configures all input/output buffers for the OpenCL entry
|
||||
The `Buffers` node configures all input/output buffers for the OpenCL entry
|
||||
function. No buffers node structure exists.
|
||||
|
||||
**Sub-nodes:** `Data` (0+), `Tensor` (1+)
|
||||
|
||||
### Data Node and Sub-Node Structure
|
||||
|
||||
`Data` node configures a single input with static data, for example,
|
||||
The `Data` node configures a single input with static data, for example,
|
||||
weights or biases.
|
||||
|
||||
| Attribute Name | \# | Description |
|
||||
|----|-----|------|
|
||||
| `name` | (1) | Name of a blob attached to an operation in the IR |
|
||||
| `arg-index` | (1) | 0-based index in the entry function arguments to be bound to |
|
||||
| `name` | (1) | Name of a blob attached to an operation in the OpenVINO IR. |
|
||||
| `arg-index` | (1) | 0-based index in the entry function arguments to be bound to. |
|
||||
|
||||
**Sub-nodes**: None
|
||||
|
||||
### Tensor Node and Sub-Node Structure
|
||||
|
||||
`Tensor` node configures a single input or output tensor.
|
||||
The `Tensor` node configures a single input or output tensor.
|
||||
|
||||
| Attribute Name | \# | Description |
|
||||
|------|-------|-------|
|
||||
| `arg-index` | (1) | 0-based index in the entry function arguments to be bound to. |
|
||||
| `type` | (1) | `input` or `output` |
|
||||
| `port-index` | (1) | 0-based index in the operation input/output ports in the IR |
|
||||
| `format` | (0/1) | Data layout declaration for the tensor. Accepted values: `BFYX`, `BYXF`, `YXFB`, `FYXB`, and same values in all lowercase. Default value: `BFYX` |
|
||||
| `port-index` | (1) | 0-based index in the operation input/output ports in the OpenVINO IR |
|
||||
| `format` | (0/1) | Data layout declaration for the tensor. Accepted values: `BFYX`, `BYXF`, `YXFB`, `FYXB`(also in lowercase). The default value: `BFYX` |
|
||||
|
||||
### CompilerOptions Node and Sub-Node Structure
|
||||
|
||||
`CompilerOptions` node configures the compilation flags for the OpenCL
|
||||
The `CompilerOptions` node configures the compilation flags for the OpenCL
|
||||
sources.
|
||||
|
||||
| Attribute Name | \# | Description |
|
||||
@@ -130,20 +130,20 @@ sources.
|
||||
|
||||
### WorkSizes Node and Sub-Node Structure
|
||||
|
||||
`WorkSizes` node configures the global/local work sizes to be used when
|
||||
The `WorkSizes` node configures the global/local work sizes to be used when
|
||||
queuing an OpenCL program for execution.
|
||||
|
||||
| Attribute Name | \# | Description |
|
||||
|-----|------|-----|
|
||||
| `global`<br>`local` | (0/1)<br>(0/1) | An array of up to three integers or formulas for defining OpenCL work-sizes to be used during execution.<br> The formulas can use the values of the B,F,Y,X dimensions and contain the operators: +,-,/,\*,%. All operators are evaluated in integer arithmetic. <br>Default value: `global=”B*F*Y*X” local=””` |
|
||||
| `dim` | (0/1) | A tensor to take the work-size from. Accepted values: `input N`, `output`, where `N` is an index of input tensor starting with 0. Default value: `output` |
|
||||
| `dim` | (0/1) | A tensor to take the work-size from. Accepted values: `input N`, `output`, where `N` is an index of input tensor starting with 0. The default value: `output` |
|
||||
|
||||
**Sub-nodes**: None
|
||||
|
||||
## Example Configuration File
|
||||
|
||||
The following code sample provides an example configuration file in XML
|
||||
format. For information on the configuration file structure, see
|
||||
format. For information on the configuration file structure, see the
|
||||
[Configuration File Format](#config-file-format).
|
||||
```xml
|
||||
<CustomLayer name="ReLU" type="SimpleGPU" version="1">
|
||||
@@ -169,22 +169,22 @@ For an example, see [Example Kernel](#example-kernel).
|
||||
|
||||
| Name | Value |
|
||||
|---|---|
|
||||
| `NUM_INPUTS` | Number of the input tensors bound to this kernel |
|
||||
| `GLOBAL_WORKSIZE` | An array of global work sizes used to execute this kernel |
|
||||
| `GLOBAL_WORKSIZE_SIZE` | The size of the `GLOBAL_WORKSIZE` array |
|
||||
| `LOCAL_WORKSIZE` | An array of local work sizes used to execute this kernel |
|
||||
| `LOCAL_WORKSIZE_SIZE` | The size of the `LOCAL_WORKSIZE` array |
|
||||
| `<TENSOR>_DIMS`| An array of the tensor dimension sizes. Always ordered as `BFYX` |
|
||||
| `NUM_INPUTS` | Number of the input tensors bound to this kernel. |
|
||||
| `GLOBAL_WORKSIZE` | An array of global work sizes used to execute this kernel. |
|
||||
| `GLOBAL_WORKSIZE_SIZE` | The size of the `GLOBAL_WORKSIZE` array. |
|
||||
| `LOCAL_WORKSIZE` | An array of local work sizes used to execute this kernel. |
|
||||
| `LOCAL_WORKSIZE_SIZE` | The size of the `LOCAL_WORKSIZE` array. |
|
||||
| `<TENSOR>_DIMS`| An array of the tensor dimension sizes. Always ordered as `BFYX`. |
|
||||
| `<TENSOR>_DIMS_SIZE`| The size of the `<TENSOR>_DIMS` array.|
|
||||
| `<TENSOR>_TYPE`| The datatype of the tensor: `float`, `half`, or `char`|
|
||||
| `<TENSOR>_TYPE`| The datatype of the tensor: `float`, `half`, or `char`. |
|
||||
| `<TENSOR>_FORMAT_<TENSOR_FORMAT>` | The format of the tensor, BFYX, BYXF, YXFB , FYXB, or ANY. The format is concatenated to the defined name. You can use the tensor format to define codepaths in your code with `#‍ifdef/#‍endif`. |
|
||||
| `<TENSOR>_LOWER_PADDING` | An array of padding elements used for the tensor dimensions before they start. Always ordered as BFYX.|
|
||||
| `<TENSOR>_LOWER_PADDING_SIZE` | The size of the `<TENSOR>_LOWER_PADDING` array |
|
||||
| `<TENSOR>_LOWER_PADDING_SIZE` | The size of the `<TENSOR>_LOWER_PADDING` array. |
|
||||
| `<TENSOR>_UPPER_PADDING` | An array of padding elements used for the tensor dimensions after they end. Always ordered as BFYX. |
|
||||
| `<TENSOR>_UPPER_PADDING_SIZE` | The size of the `<TENSOR>_UPPER_PADDING` array |
|
||||
| `<TENSOR>_PITCHES` | The offset (in elements) between adjacent elements in each dimension. Always ordered as BFYX.|
|
||||
| `<TENSOR>_PITCHES_SIZE`| The size of the `<TENSOR>_PITCHES` array |
|
||||
| `<TENSOR>_OFFSET`| The number of elements from the start of the tensor to the first valid element, bypassing the lower padding. |
|
||||
| `<TENSOR>_UPPER_PADDING_SIZE` | The size of the `<TENSOR>_UPPER_PADDING` array. |
|
||||
| `<TENSOR>_PITCHES` | The offset (in elements) between adjacent elements in each dimension. Always ordered as BFYX. |
|
||||
| `<TENSOR>_PITCHES_SIZE`| The size of the `<TENSOR>_PITCHES` array. |
|
||||
| `<TENSOR>_OFFSET`| The number of elements from the start of the tensor to the first valid element, bypassing the lower padding. |
|
||||
|
||||
All `<TENSOR>` values are automatically defined for every tensor
|
||||
bound to this operation, such as `INPUT0`, `INPUT1`, and `OUTPUT0`, as shown
|
||||
@@ -219,7 +219,7 @@ __kernel void example_relu_kernel(
|
||||
```
|
||||
|
||||
|
||||
> **NOTE**: As described in the previous section, all items like
|
||||
> **NOTE**: As described in the previous section, all items such as the
|
||||
> `INPUT0_TYPE` are actually defined as OpenCL (pre-)compiler inputs by
|
||||
> OpenVINO for efficiency reasons. See the [Debugging
|
||||
> Tips](#debugging-tips) below for information on debugging the results.
|
||||
@@ -234,5 +234,4 @@ your output can be truncated to fit the buffer. Also, because of
|
||||
buffering, you actually get an entire buffer of output when the
|
||||
execution ends.<br>
|
||||
|
||||
For more information, refer to the [printf
|
||||
Function](https://www.khronos.org/registry/OpenCL/sdk/1.2/docs/man/xhtml/printfFunction.html).
|
||||
For more information, refer to the [printf Function](https://www.khronos.org/registry/OpenCL/sdk/1.2/docs/man/xhtml/printfFunction.html).
|
||||
|
||||
@@ -11,6 +11,13 @@
|
||||
openvino_docs_Extensibility_UG_GPU
|
||||
openvino_docs_Extensibility_UG_VPU_Kernel
|
||||
openvino_docs_MO_DG_prepare_model_customize_model_optimizer_Customize_Model_Optimizer
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
openvino_docs_transformations
|
||||
OpenVINO Plugin Developer Guide <openvino_docs_ie_plugin_dg_overview>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
@@ -19,50 +26,49 @@ TensorFlow, PyTorch, ONNX, PaddlePaddle, Apache MXNet, Caffe, and Kaldi. The lis
|
||||
each of the supported frameworks. To see the operations supported by your framework, refer to
|
||||
[Supported Framework Operations](../MO_DG/prepare_model/Supported_Frameworks_Layers.md).
|
||||
|
||||
Custom operations, that is those not included in the list, are not recognized by OpenVINO™ out-of-the-box. The need for a custom operation may appear in two main cases:
|
||||
Custom operations, which are not included in the list, are not recognized by OpenVINO out-of-the-box. The need for custom operation may appear in two cases:
|
||||
|
||||
1. A regular framework operation that is new or rarely used, which is why it hasn’t been implemented in OpenVINO yet.
|
||||
1. A new or rarely used regular framework operation is not supported in OpenVINO yet.
|
||||
|
||||
2. A new user operation that was created for some specific model topology by a model author using framework extension capabilities.
|
||||
2. A new user operation that was created for some specific model topology by the author of the model using framework extension capabilities.
|
||||
|
||||
Importing models with such operations requires additional steps. This guide illustrates the workflow for running inference on models featuring custom operations, allowing you to plug in your own implementation for them. OpenVINO™ Extensibility API lets you add support for those custom operations and use one implementation for Model Optimizer and OpenVINO™ Runtime.
|
||||
Importing models with such operations requires additional steps. This guide illustrates the workflow for running inference on models featuring custom operations. This allows plugging in your own implementation for them. OpenVINO Extensibility API enables adding support for those custom operations and using one implementation for Model Optimizer and OpenVINO Runtime.
|
||||
|
||||
Defining a new custom operation basically consist of two parts:
|
||||
Defining a new custom operation basically consists of two parts:
|
||||
|
||||
1. Definition of operation semantics in OpenVINO, the code that describes how this operation should be inferred consuming input tensor(s) and producing output tensor(s). How to implement execution kernels for [GPU](./GPU_Extensibility.md) and [VPU](./VPU_Extensibility.md) is described in separate guides.
|
||||
1. Definition of operation semantics in OpenVINO, the code that describes how this operation should be inferred consuming input tensor(s) and producing output tensor(s). The implementation of execution kernels for [GPU](./GPU_Extensibility.md) and [VPU](./VPU_Extensibility.md) is described in separate guides.
|
||||
|
||||
2. Mapping rule that facilitates conversion of framework operation representation to OpenVINO defined operation semantics.
|
||||
|
||||
The first part is required for inference, the second part is required for successful import of a model containing such operations from the original framework model format. There are several options to implement each part, the next sections will describe them in detail.
|
||||
The first part is required for inference. The second part is required for successful import of a model containing such operations from the original framework model format. There are several options to implement each part. The following sections will describe them in detail.
|
||||
|
||||
## Definition of Operation Semantics
|
||||
|
||||
If the custom operation can be mathematically represented as a combination of exiting OpenVINO operations and such decomposition gives desired performance, then low-level operation implementation is not required. Refer to the latest OpenVINO operation set, when deciding feasibility of such decomposition. You can use any valid combination of exiting operations. The next section of this document describes the way to map a custom operation.
|
||||
|
||||
If the custom operation can be mathematically represented as a combination of exiting OpenVINO operations and such decomposition gives desired performance, then low-level operation implementation is not required. When deciding feasibility of such decomposition refer to the latest OpenVINO operation set. You can use any valid combination of exiting operations. How to map a custom operation is described in the next section of this document.
|
||||
If such decomposition is not possible or appears too bulky with a large number of constituent operations that do not perform well, then a new class for the custom operation should be implemented, as described in the [Custom Operation Guide](add_openvino_ops.md).
|
||||
|
||||
If such decomposition is not possible or appears too bulky with lots of consisting operations that are not performing well, then a new class for the custom operation should be implemented as described in the [Custom Operation Guide](add_openvino_ops.md).
|
||||
|
||||
Prefer implementing a custom operation class if you already have a generic C++ implementation of operation kernel. Otherwise try to decompose the operation first as described above and then after verifying correctness of inference and resulting performance, optionally invest to implementing bare metal C++ implementation.
|
||||
You might prefer implementing a custom operation class if you already have a generic C++ implementation of operation kernel. Otherwise, try to decompose the operation first, as described above. Then, after verifying correctness of inference and resulting performance, you may move on to optional implementation of Bare Metal C++.
|
||||
|
||||
## Mapping from Framework Operation
|
||||
|
||||
Depending on model format used for import, mapping of custom operation is implemented differently, choose one of:
|
||||
Mapping of custom operation is implemented differently, depending on model format used for import. You may choose one of the following:
|
||||
|
||||
1. If model is represented in ONNX (including models exported from Pytorch in ONNX) or PaddlePaddle formats, then one of the classes from [Frontend Extension API](frontend_extensions.md) should be used. It consists of several classes available in C++ which can be used with Model Optimizer `--extensions` option or when model is imported directly to OpenVINO run-time using read_model method. Python API is also available for run-time model importing.
|
||||
1. If a model is represented in the ONNX (including models exported from Pytorch in ONNX) or PaddlePaddle formats, then one of the classes from [Frontend Extension API](frontend_extensions.md) should be used. It consists of several classes available in C++ which can be used with the `--extensions` option in Model Optimizer or when a model is imported directly to OpenVINO runtime using the `read_model` method. Python API is also available for runtime model import.
|
||||
|
||||
2. If model is represented in TensorFlow, Caffe, Kaldi or MXNet formats, then [Model Optimizer Extensions](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) should be used. This approach is available for model conversion in Model Optimizer only.
|
||||
2. If a model is represented in the TensorFlow, Caffe, Kaldi or MXNet formats, then [Model Optimizer Extensions](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) should be used. This approach is available for model conversion in Model Optimizer only.
|
||||
|
||||
Existing of two approaches simultaneously is explained by two different types of frontends used for model conversion in OpenVINO: new frontends (ONNX, PaddlePaddle) and legacy frontends (TensorFlow, Caffe, Kaldi and Apache MXNet). Model Optimizer can use both front-ends in contrast to the direct import of model with `read_model` method which can use new frontends only. Follow one of the appropriate guides referenced above to implement mappings depending on framework frontend.
|
||||
|
||||
If you are implementing extensions for ONNX or PaddlePaddle new frontends and plan to use Model Optimizer `--extension` option for model conversion, then the extensions should be
|
||||
If you are implementing extensions for new ONNX or PaddlePaddle frontends and plan to use the `--extensions` option in Model Optimizer for model conversion, then the extensions should be:
|
||||
|
||||
1. Implemented in C++ only
|
||||
1. Implemented in C++ only.
|
||||
|
||||
2. Compiled as a separate shared library (see details how to do that later in this guide).
|
||||
2. Compiled as a separate shared library (see details on how to do this further in this guide).
|
||||
|
||||
You cannot write new frontend extensions using Python API if you plan to use them with Model Optimizer.
|
||||
Model Optimizer does not support new frontend extensions written in Python API.
|
||||
|
||||
Remaining part of this guide uses Frontend Extension API applicable for new frontends.
|
||||
Remaining part of this guide describes application of Frontend Extension API for new frontends.
|
||||
|
||||
## Registering Extensions
|
||||
|
||||
@@ -70,11 +76,11 @@ A custom operation class and a new mapping frontend extension class object shoul
|
||||
|
||||
> **NOTE**: This documentation is written based on the [Template extension](https://github.com/openvinotoolkit/openvino/tree/master/src/core/template_extension/new), which demonstrates extension development details based on minimalistic `Identity` operation that is a placeholder for your real custom operation. You can review the complete code, which is fully compliable, to see how it works.
|
||||
|
||||
To load the extensions to the `ov::Core` object, use the `ov::Core::add_extension` method, this method allows to load library with extensions or extensions from the code.
|
||||
Use the `ov::Core::add_extension` method to load the extensions to the `ov::Core` object. This method allows loading library with extensions or extensions from the code.
|
||||
|
||||
### Load extensions to core
|
||||
### Load Extensions to Core
|
||||
|
||||
Extensions can be loaded from code with `ov::Core::add_extension` method:
|
||||
Extensions can be loaded from a code with the `ov::Core::add_extension` method:
|
||||
|
||||
@sphinxtabset
|
||||
|
||||
@@ -92,7 +98,7 @@ Extensions can be loaded from code with `ov::Core::add_extension` method:
|
||||
|
||||
@endsphinxtabset
|
||||
|
||||
`Identity` is custom operation class defined in [Custom Operation Guide](add_openvino_ops.md). This is enough to enable reading IR which uses `Identity` extension operation emitted by Model Optimizer. To be able to load original model directly to the runtime, you need to add also a mapping extension:
|
||||
The `Identity` is a custom operation class defined in [Custom Operation Guide](add_openvino_ops.md). This is sufficient to enable reading OpenVINO IR which uses the `Identity` extension operation emitted by Model Optimizer. In order to load original model directly to the runtime, add a mapping extension:
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
@@ -109,33 +115,35 @@ Extensions can be loaded from code with `ov::Core::add_extension` method:
|
||||
:fragment: add_frontend_extension
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
When Python API is used there is no way to implement a custom OpenVINO operation. Also, even if custom OpenVINO operation is implemented in C++ and loaded to the runtime through a shared library, there is still no way to add a frontend mapping extension that refers to this custom operation. Use C++ shared library approach to implement both operations semantics and framework mapping in this case.
|
||||
|
||||
You still can use Python for operation mapping and decomposition in case if operations from the standard OpenVINO operation set is used only.
|
||||
When Python API is used, there is no way to implement a custom OpenVINO operation. Even if custom OpenVINO operation is implemented in C++ and loaded into the runtime by a shared library, there is still no way to add a frontend mapping extension that refers to this custom operation. In this case, use C++ shared library approach to implement both operations semantics and framework mapping.
|
||||
|
||||
### Create library with extensions
|
||||
Python can still be used to map and decompose operations when only operations from the standard OpenVINO operation set are used.
|
||||
|
||||
You need to create extension library in the following cases:
|
||||
- Convert model with custom operations in Model Optimizer
|
||||
- Load model with custom operations in Python application. It is applicable for both framework model and IR.
|
||||
- Loading models with custom operations in tools that support loading extensions from a library, for example `benchmark_app`.
|
||||
### Create a Library with Extensions
|
||||
|
||||
If you want to create an extension library, for example in order to load these extensions to the Model Optimizer, you need to do next steps:
|
||||
Create an entry point for extension library. OpenVINO™ provides an `OPENVINO_CREATE_EXTENSIONS()` macro, which allows to define an entry point to a library with OpenVINO™ Extensions.
|
||||
This macro should have a vector of all OpenVINO™ Extensions as an argument.
|
||||
An extension library should be created in the following cases:
|
||||
|
||||
Based on that, the declaration of an extension class can look as follows:
|
||||
- Conversion of a model with custom operations in Model Optimizer.
|
||||
- Loading a model with custom operations in a Python application. This applies to both framework model and OpenVINO IR.
|
||||
- Loading models with custom operations in tools that support loading extensions from a library, for example the `benchmark_app`.
|
||||
|
||||
To create an extension library, for example, to load the extensions into Model Optimizer, perform the following:
|
||||
|
||||
1. Create an entry point for extension library. OpenVINO provides the `OPENVINO_CREATE_EXTENSIONS()` macro, which allows to define an entry point to a library with OpenVINO Extensions.
|
||||
This macro should have a vector of all OpenVINO Extensions as an argument.
|
||||
|
||||
Based on that, the declaration of an extension class might look like the following:
|
||||
|
||||
@snippet template_extension/new/ov_extension.cpp ov_extension:entry_point
|
||||
|
||||
To configure the build of your extension library, use the following CMake script:
|
||||
2. Configure the build of your extension library, using the following CMake script:
|
||||
|
||||
@snippet template_extension/new/CMakeLists.txt cmake:extension
|
||||
|
||||
This CMake script finds the OpenVINO™ using the `find_package` CMake command.
|
||||
This CMake script finds OpenVINO, using the `find_package` CMake command.
|
||||
|
||||
To build the extension library, run the commands below:
|
||||
3. Build the extension library, running the commands below:
|
||||
|
||||
```sh
|
||||
$ cd src/core/template_extension/new
|
||||
@@ -145,7 +153,7 @@ $ cmake -DOpenVINO_DIR=<OpenVINO_DIR> ../
|
||||
$ cmake --build .
|
||||
```
|
||||
|
||||
After the build you can use path to your extension library to load your extensions to OpenVINO™ Runtime:
|
||||
4. After the build, you may use the path to your extension library to load your extensions to OpenVINO Runtime:
|
||||
|
||||
@sphinxtabset
|
||||
|
||||
@@ -168,4 +176,3 @@ After the build you can use path to your extension library to load your extensio
|
||||
* [OpenVINO Transformations](./ov_transformations.md)
|
||||
* [Using OpenVINO Runtime Samples](../OV_Runtime_UG/Samples_Overview.md)
|
||||
* [Hello Shape Infer SSD sample](../../samples/cpp/hello_reshape_ssd/README.md)
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
To enable operations not supported by OpenVINO™ out of the box, you need a custom extension for Model Optimizer, a custom nGraph operation set, and a custom kernel for the device you will target. This page describes custom kernel support for one the VPU, the Intel® Neural Compute Stick 2 device, which uses the MYRIAD device plugin.
|
||||
|
||||
> **NOTES:**
|
||||
> * OpenCL\* custom layer support is available in the preview mode.
|
||||
> **NOTE:**
|
||||
> * OpenCL custom layer support is available in the preview mode.
|
||||
> * This section assumes you are familiar with developing kernels using OpenCL.
|
||||
|
||||
To customize your topology with an OpenCL layer, carry out the tasks described on this page:
|
||||
|
||||
1. Write and compile your OpenCL code with the standalone offline OpenCL compiler (`clc`).
|
||||
@@ -13,9 +14,9 @@ To customize your topology with an OpenCL layer, carry out the tasks described o
|
||||
|
||||
## Compile OpenCL code for VPU (Intel® Neural Compute Stick 2)
|
||||
|
||||
> **NOTE**: OpenCL compiler, targeting Intel® Neural Compute Stick 2 for the SHAVE* processor only, is redistributed with OpenVINO.
|
||||
OpenCL support is provided by ComputeAorta* and is distributed under a license agreement between Intel® and Codeplay* Software Ltd.
|
||||
The OpenCL toolchain for the Intel® Neural Compute Stick 2 supports offline compilation only, so first compile OpenCL C code using the standalone `clc` compiler. You can find the compiler binary at `<INSTALL_DIR>/tools/cl_compiler`.
|
||||
> **NOTE**: OpenCL compiler, targeting Intel® Neural Compute Stick 2 for the SHAVE processor only, is redistributed with OpenVINO.
|
||||
OpenCL support is provided by ComputeAorta and is distributed under a license agreement between Intel® and Codeplay Software Ltd.
|
||||
The OpenCL toolchain for the Intel® Neural Compute Stick 2 supports offline compilation only. Start with compiling OpenCL C code, using the standalone `clc` compiler. You can find the compiler binary at `<INSTALL_DIR>/tools/cl_compiler`.
|
||||
|
||||
> **NOTE**: By design, custom OpenCL layers support any OpenCL kernels written assuming OpenCL version 1.2. It also supports half float extension and is optimized for this type, because it is a native type for Intel® Movidius™ VPUs.
|
||||
1. Prior to running a compilation, make sure that the following variables are set:
|
||||
@@ -63,7 +64,7 @@ Each custom layer is described with the `CustomLayer` node. It has the following
|
||||
- Node `Source` must contain the following attributes:
|
||||
- `filename` – The path to a compiled binary relative to the XML configuration file.
|
||||
- Sub-node `Parameters` – Describes parameters bindings. For more information, see the description below.
|
||||
- Sub-node `WorkSizes` – Describes local and global work group sizes and the source for dimension deduction as a pair `direction,port`. In the example above, the work group is described relatively to the dimension of the input tensor that comes through port 0 in the IR. `global` and `local` work group configurations support any simple math expressions with +,-,\*,/, and () from `B`(batch), `Y`(height), `X`(width) and `F`(channels).
|
||||
- Sub-node `WorkSizes` – Describes local and global work group sizes and the source for dimension deduction as a pair `direction,port`. In the example above, the work group is described relatively to the dimension of the input tensor that comes through port 0 in the OpenVINO IR. Work group configurations, namely `global` and `local` support any simple math expressions with +,-,\*,/, and () from `B`(batch), `Y`(height), `X`(width) and `F`(channels).
|
||||
- Sub-node `Where` – Allows to customize bindings with the `key="value"` attribute. For example, to substitute only 3x3 convolutions, write `<Where kernel="3,3"/>` in the binding xml.
|
||||
|
||||
Parameter description supports `Tensor` of one of tensor types such as `input`, `output`, `input_buffer`, `output_buffer` or `data`, `Scalar`, or `Data` nodes and has the following format:
|
||||
@@ -77,7 +78,7 @@ Each custom layer is described with the `CustomLayer` node. It has the following
|
||||
- `type` – Node type: `input_buffer` or `output_buffer`. Use the appropriate type to bind multiple kernels that correspond to different stages of the same layer.
|
||||
- `port-index` – The unique identifier to bind by.
|
||||
- `dim` – The dim source with the same `direction,port` format used for `WorkSizes` bindings.
|
||||
- `size` – Amount of bytes needed. Current expression syntax supports only expression over dimensions of over selected input/output tensor or constants and might be expended in the future.
|
||||
- `size` – Amount of bytes needed. Current expression syntax supports only expression over dimensions of over selected input/output tensor or constants and might be extended in the future.
|
||||
|
||||
Here is an example of multi-stage MVN layer binding:
|
||||
```xml
|
||||
@@ -107,7 +108,7 @@ Each custom layer is described with the `CustomLayer` node. It has the following
|
||||
<WorkSizes dim="output,0" global="((Y+7)/8)*8,F,1" local="8,1,1"/>
|
||||
</CustomLayer>
|
||||
```
|
||||
- Each `Tensor` node that has the type `data` must contain the following attributes:
|
||||
- Each `Tensor` node that has the `data` type must contain the following attributes:
|
||||
- `source` – A name of the blob as it is in the IR. Typical example is `weights` for convolution.
|
||||
- `format` – Specifies the channel order in the tensor. Optional conversion layers are generated if the custom layer format is not.
|
||||
```xml
|
||||
@@ -133,7 +134,7 @@ Each custom layer is described with the `CustomLayer` node. It has the following
|
||||
- Each `Data` node must contain the following attributes:
|
||||
- `arg-name` – The name of a kernel parameter in the kernel signature.
|
||||
- `type` – Node type. Currently, `local_data` is the only supported value, which defines buffer allocated in fast local on-chip memory. It is limited to 100KB for all `__local` and
|
||||
`__private` arrays defined inside the kernel as well as all `__local` parameters passed to the kernel. Note that a manual-DMA extension requires double buffering.
|
||||
`__private` arrays defined inside the kernel as well as all `__local` parameters passed to the kernel. A manual-DMA extension requires double buffering.
|
||||
If the custom layer is detected to run out of local memory, the inference fails.
|
||||
- `dim` – The dim source with the same `direction,port` format used for `WorkSizes` bindings.
|
||||
- `size` – Amount of bytes needed. The current expression syntax supports only expression over dimensions of over selected input/output tensor or constants and may be extended in the future.
|
||||
@@ -158,14 +159,13 @@ Each custom layer is described with the `CustomLayer` node. It has the following
|
||||
## Pass Configuration File to OpenVINO™ Runtime
|
||||
|
||||
> **NOTE**: If both native and custom layer implementations are present, the custom kernel has a priority over the native one.
|
||||
Before loading the network that features the custom layers, provide a separate configuration file and load it using the ov::Core::set_property() method with the "CONFIG_KEY" key and the configuration file name as a value before loading the network that uses custom operations to the plugin:
|
||||
Before loading the network that features the custom layers, provide a separate configuration file and load it using the `ov::Core::set_property()` method. Use the "CONFIG_KEY" key and the configuration file name as a value before loading the network that uses custom operations to the plugin:
|
||||
|
||||
@snippet docs/snippets/vpu/custom_op.cpp part0
|
||||
|
||||
## Optimizing Kernels with OpenCL for VPU (Intel® Neural Compute Stick 2)
|
||||
|
||||
This section provides optimization guidelines on writing custom layers with OpenCL for VPU devices. Knowledge about general OpenCL
|
||||
programming model and OpenCL kernel language is assumed and not a subject of this section. The OpenCL model mapping to VPU is described in the table below.
|
||||
This section provides optimization guidelines on writing custom layers with OpenCL for VPU devices. Knowledge about general OpenCL programming model and OpenCL kernel language is assumed and not a subject of this section. The OpenCL model mapping to VPU is described in the table below.
|
||||
|
||||
| OpenCL Model | VPU Mapping|
|
||||
|-----|----|
|
||||
@@ -175,41 +175,33 @@ programming model and OpenCL kernel language is assumed and not a subject of thi
|
||||
| Global memory | Mapped to DDR, used to pass execution preserved parameters for inputs, outputs, and blobs |
|
||||
| Work group | Executed on a single SHAVE core iterating over multiple work items |
|
||||
|
||||
Note that by the OpenCL specification, the work group execution order is not specified. This means that it is your
|
||||
responsibility to ensure that race conditions among work groups are not introduced. Custom layer runtime spits evenly
|
||||
work grid among available compute resources and executes them in an arbitrary order. This static scheduling approach works best if the load is evenly spread out across work groups, which is a typical case for Deep Learning kernels. The following guidelines are recommended to use for work group partitioning:
|
||||
The work group execution order is not defined in the OpenCL specifications. This means it is your responsibility to ensure that race conditions among work groups are not introduced. Custom layer runtime distributes work grid evenly among available compute resources and executes them in an arbitrary order. This static scheduling approach works best if the load is evenly spread out across work groups, which is a typical case for Deep Learning kernels. The following guidelines are recommended to use for work group partitioning:
|
||||
|
||||
1. Split work evenly across work groups.
|
||||
1. Distribute work evenly across work groups.
|
||||
2. Adjust work group granularity to maintain equal workload for all compute codes.
|
||||
3. Set the maximum number of cores using the `max-shaves` attribute for the `CustomLayer` node. This keeps more resources for the rest of topology. It is also useful if the kernel scalability reached its limits, which may happen while optimizing memory bound kernels or kernels with poor parallelization.
|
||||
4. Try an alternate data layout (`BFXY`/`BYXF`) for the kernel if it improves work group partitioning or data access patterns.
|
||||
Consider not just specific layer boost, but full topology performance because data conversion layers would be automatically inserted
|
||||
as appropriate.
|
||||
4. Try an alternate data layout (`BFXY`/`BYXF`) for the kernel to see if it improves work group partitioning or data access patterns.
|
||||
Consider not just specific layer boost, but also full topology performance because data conversion layers will be automatically inserted as appropriate.
|
||||
|
||||
Offline OpenCL compiler (`clc`) features automatic vectorization over `get_global_id(0)` usage, if uniform access is detected.
|
||||
For example, the kernel below could be automatically vectorized:
|
||||
```cpp
|
||||
__kernel void cvtf32f16(__global float* restrict inImage, __global half* restrict outImage,
|
||||
float scale, float bais)
|
||||
float scale, float bias)
|
||||
{
|
||||
int idx = get_global_id(0) + get_global_id(1) * get_global_size(0) + get_global_id(2) * get_global_size(0) * get_global_size(1);
|
||||
outImage[idx] = convert_half(inImage[idx]*scale+bais);
|
||||
outImage[idx] = convert_half(inImage[idx]*scale+bias);
|
||||
}
|
||||
```
|
||||
However, this work-group based vectorizer (WGV) conflicts with the default LLVM vectorizer based on superword level parallelism
|
||||
(SLP) for the current compiler version. Manual vectorization is recommended to provide the best performance for non-uniform code
|
||||
patterns. WGV works if and only if vector types are not used in the code.
|
||||
However, this work-group based vectorizer (WGV) conflicts with the default LLVM vectorizer based on superword level parallelism (SLP) for the current compiler version. Manual vectorization is recommended to provide the best performance for non-uniform code patterns. WGV works if and only if vector types are not used in the code.
|
||||
|
||||
Here is a short list of optimization tips:
|
||||
|
||||
1. Help auto-vectorizer ensure non-aliasing pointers for kernel parameters by putting `restrict` where possible.
|
||||
- This can give a performance boost, especially for kernels with unrolling, like `ocl_grn` from the example below.
|
||||
- Place `restrict` markers for kernels with manually vectorized codes. In the `ocl_grn` kernel below, the unrolled version without `restrict` is up to 20% slower than the most optimal one, which combines unrolling and `restrict`.
|
||||
2. Put `#‍pragma unroll N` to your loop header. The compiler does not trigger unrolling by default, so it is your responsibility to
|
||||
annotate the code with pragmas as appropriate. The `ocl_grn` version with `#‍pragma unroll 4` is up to 50% faster, most of which comes from unrolling the first loop, because LLVM, in general, is better in scheduling 3-stage loops (load-compute-store), while the fist loop
|
||||
`variance += (float)(src_data[c*H*W + y*W + x] * src_data[c*H*W + y*W + x]);` is only 2-stage (load-compute). Pay
|
||||
attention to unrolling such cases first. Unrolling factor is loop-dependent. Choose the smallest number that
|
||||
still improves performance as an optimum between the kernel size and execution speed. For this specific kernel, changing the unroll factor from `4` to `6` results in the same performance, so unrolling factor equal to 4 is an optimum. For Intel® Neural Compute Stick 2, unrolling is conjugated with the automatic software pipelining for load, store, and compute stages:
|
||||
1. Help auto-vectorizer ensure non-aliasing pointers for kernel parameters by putting the `restrict` markers where possible.
|
||||
- This can give a performance boost, especially for kernels with unrolling, like the `ocl_grn` from the example below.
|
||||
- Place `restrict` markers for kernels with manually vectorized codes. In the `ocl_grn` kernel below, the unrolled version without the `restrict` is up to 20% slower than the most optimal one, which combines both unrolling and `restrict`.
|
||||
2. Put `#‍pragma unroll N` to your loop header. The compiler does not trigger unrolling by default, so it is your responsibility to annotate the code with pragmas as appropriate. The `ocl_grn` version with `#‍pragma unroll 4` is up to 50% faster, most of which comes from unrolling the first loop, because LLVM, in general, is better in scheduling 3-stage loops (load-compute-store), while the first loop
|
||||
The `variance += (float)(src_data[c*H*W + y*W + x] * src_data[c*H*W + y*W + x]);` is only 2-stage (load-compute). Pay attention to unrolling such cases first. Unrolling factor is loop-dependent. Choose the smallest number that still improves performance as an optimum between the kernel size and execution speed. For this specific kernel, changing the unroll factor from `4` to `6` results in the same performance, so unrolling factor equal to 4 is an optimum. For Intel Neural Compute Stick 2, unrolling is conjugated with the automatic software pipelining for load, store, and compute stages:
|
||||
```cpp
|
||||
__kernel void ocl_grn(__global const half* restrict src_data, __global half* restrict dst_data, int C, float bias)
|
||||
{
|
||||
@@ -227,7 +219,7 @@ __kernel void ocl_grn(__global const half* restrict src_data, __global half* res
|
||||
dst_data[c*H*W + y*W + x] = (half)((float)src_data[c*H*W + y*W + x] * variance);
|
||||
}
|
||||
```
|
||||
To check the efficiency of WGV, you can compare performance of the kernel above with the kernel below, which is manually vectorized over width:
|
||||
To check the efficiency of WGV, compare performance of the kernel above with the kernel below, which is manually vectorized over width:
|
||||
```cpp
|
||||
__kernel void ocl_grn_line(__global const half* restrict src_data, __global half* restrict dst_data, int C, int W, float bias)
|
||||
{
|
||||
@@ -267,19 +259,14 @@ __kernel void ocl_grn_line(__global const half* restrict src_data, __global hal
|
||||
```
|
||||
Both versions perform the same, but the second one has more complex code.
|
||||
|
||||
3. If it is easy to predict the work group size, you can also use the `reqd_work_group_size` kernel attribute to ask the compiler
|
||||
to unroll the code up to the local size of the work group. Note that if the kernel is actually executed with the
|
||||
different work group configuration, the result is undefined.
|
||||
3. If it is easy to predict the work group size, use the `reqd_work_group_size` kernel attribute to ask the compiler to unroll the code up to the local size of the work group. If the kernel is actually executed with the different work group configuration, the result is undefined.
|
||||
|
||||
4. Prefer to use the `half` compute if it keeps reasonable accuracy. 16-bit float is a native type for Intel® Neural Compute Stick 2, most of the functions `half_*` are mapped to a single hardware instruction.
|
||||
4. Prefer to use the `half` compute if it keeps reasonable accuracy. A 16-bit float is a native type for Intel Neural Compute Stick 2, most of the `half_*` functions are mapped to a single hardware instruction.
|
||||
Use the standard `native_*` function for the rest of types.
|
||||
|
||||
5. Prefer to use the `convert_half` function over `vstore_half` if conversion to 32-bit float is required. `convert_half` is mapped to a single hardware instruction. For the `cvtf32f16` kernel above, the line `outImage[idx] = convert_half(inImage[idx]*scale+bais);` is eight times slower than the code with `vstore_half`.
|
||||
5. Prefer to use the `convert_half` function over the `vstore_half` if conversion to 32-bit float is required. The `convert_half` function is mapped to a single hardware instruction. For the `cvtf32f16` kernel above, the `outImage[idx] = convert_half(inImage[idx]*scale+bias);` code is eight times slower than the code with `vstore_half`.
|
||||
|
||||
6. Mind early exits. Early exit can be extremely costly for the current version of the `clc` compiler due to conflicts with the
|
||||
auto-vectorizer. The generic advice would be to setup local size by `x` dimension equal to inputs or/and outputs width.
|
||||
If it is impossible to define the work grid that exactly matches inputs or/and outputs to eliminate checks, for example,
|
||||
`if (get_global_id(0) >= width) return`, use line-wise kernel variant with manual vectorization.
|
||||
6. Be aware of early exits, as they can be extremely costly for the current version of the `clc` compiler due to conflicts with the auto-vectorizer. It is recommended to setup local size by `x` dimension equal to inputs or/and outputs width. If it is impossible to define the work grid that exactly matches inputs or/and outputs to eliminate checks, for example, `if (get_global_id(0) >= width) return`, use line-wise kernel variant with manual vectorization.
|
||||
The kernel example below demonstrates the impact of early exits on kernel performance.
|
||||
```cpp
|
||||
// Initial version
|
||||
@@ -302,8 +289,8 @@ The kernel example below demonstrates the impact of early exits on kernel perfor
|
||||
}
|
||||
```
|
||||
This `reorg` kernel is auto-vectorizable, but an input for YOLO v2 topology is `NCHW=<1,64,26,26>` and it is not multiple of vector width, which is `8` for `half` data type. As a result, the Inference Engine does not select the auto-vectorized kernel.
|
||||
To compare performance of auto-vectorized and scalar version of the kernel, change the input size to`NCHW=<1,64,26,32>`. This enables the auto-vectorized version to be selected by the Inference Engine and can give you about 30% uplift.
|
||||
Since the auto-vectorized version is faster, it makes sense to enable it for the YOLO v2 topology input size by setting the local size multiple of vector, for example, 32, and adjust global sizes accordingly. As a result, the execution work grid exceeds actual input dimension, so out-of-bound checks should be inserted. See the updated kernel version below:
|
||||
To compare performance of auto-vectorized and scalar version of the kernel, change the input size to `NCHW=<1,64,26,32>`. This enables the auto-vectorized version to be selected by the Inference Engine and can give you about 30% uplift.
|
||||
Since the auto-vectorized version is faster, it is recommended to enable it for the YOLO v2 topology input size by setting the local size multiple of vector, for example, `32`, and adjust global sizes accordingly. As a result, the execution work grid exceeds actual input dimension, so out-of-bound checks should be inserted. See the updated kernel version below:
|
||||
```cpp
|
||||
// Version with out-of-bound checks added
|
||||
__kernel void reorg(const __global half* restrict src, __global half* restrict out, int W, int stride)
|
||||
@@ -324,7 +311,7 @@ Since the auto-vectorized version is faster, it makes sense to enable it for the
|
||||
out[W*H*c + W*h + w] = src[W2*H2*c2 + W2*h2 + w2];
|
||||
}
|
||||
```
|
||||
This code performs the same as the initial kernel above (scalar) due to branching overhead. If you replace min/max expression `w = min(w, W-1);` with `if (w >= W) return;`, runtime increases up to 2x against to code without branching (initial version).<br>
|
||||
This code performs the same as the initial kernel above (scalar) due to branching overhead. If the `w = min(w, W-1);` min/max expression is replaced with the `if (w >= W) return;`, runtime increases up to 2x against to code without branching (initial version).<br>
|
||||
If branching is inevitable for your element-based kernel, it is recommended to change the scheme to line-based. See the kernel variant below:
|
||||
```cpp
|
||||
// Line-wise version
|
||||
@@ -347,8 +334,8 @@ __kernel void reorg(const __global half* restrict src, __global half* restrict o
|
||||
}
|
||||
```
|
||||
This decreases the execution time up to 40% against the best performing vectorized kernel without early exits (initial version).
|
||||
7. Reuse computations among work items by using line-based kernels or sharing values though `__local` memory.
|
||||
8. Improve data access locality. Most of custom kernels are memory bound while convolution and fully connected layers are hardware-implemented. The code below demonstrates a further optimized version of the `reorg` kernel unrolled by `stride`:
|
||||
7. Reuse computations among work items by using line-based kernels or sharing values through the `__local` memory.
|
||||
8. Improve data access locality. Most of custom kernels are memory bound while convolution and fully connected layers are hardware-implemented. The code below demonstrates a further optimized version of the `reorg` kernel unrolled by the `stride`:
|
||||
```cpp
|
||||
// Unrolled line-wise version
|
||||
__kernel void reorg_unrolled_by_stride(const __global half* restrict src, __global half* restrict dst,
|
||||
@@ -366,14 +353,11 @@ This decreases the execution time up to 40% against the best performing vectoriz
|
||||
dst[W*H*C2*(stride_y*stride+stride_x) + W*H*c2 + W*h + w] = src[W2*H2*c2 + W2*h*stride + W2*stride_y + w2 + stride_x];
|
||||
}
|
||||
```
|
||||
`scr` data in this case loaded only once. As the result, the cycle count drops up to 45% against the line-wise version.
|
||||
The `scr` data in this case is loaded only once. As the result, the cycle count drops up to 45% against the line-wise version.
|
||||
|
||||
9. Copy data from `__dlobal` to `__local` or `__private` memory if the data is accessed more than once. Access to
|
||||
`__dlobal` memory is orders of magnitude slower than access to `__local`/`__private` due to statically scheduled pipeline, which
|
||||
stalls completely on memory access without any prefetch. The same recommendation is applicable for scalar load/store
|
||||
from/to a `__blobal` pointer since work-group copying could be done in a vector fashion.
|
||||
9. Copy data from the `__dlobal` to the `__local` or `__private` memory if the data is accessed more than once. Access to the `__dlobal` memory is orders of magnitude slower than access to the `__local`/`__private` due to statically scheduled pipeline, which stalls completely on memory access without any prefetch. The same recommendation is applicable for scalar load/store from/to the `__blobal` pointer since work-group copying could be done in a vector fashion.
|
||||
|
||||
10. Use a manual DMA extension. Local (on-chip) memory throughput is up to 24x higher than DDR throughput. Starting from OpenVINO™ 2020.1, VPU OpenCL features manual-DMA kernel extension to copy sub-tensor used by work group into local memory and performing compute without DDR evolved. Here is the simple GRN kernel implementation that runs over DDR. Local size is in the form (width of the input tensor, 1, 1) to define a large enough work group to get code automatically vectorized and unrolled, while global size is (width of the input tensor, height of the input tensor, 1):
|
||||
10. Use a manual DMA extension. Local (on-chip) memory throughput is up to 24x higher than DDR throughput. Since the OpenVINO 2020.1 release, VPU OpenCL features manual-DMA kernel extension to copy sub-tensor used by a work group into local memory and performing compute without DDR evolved. Here is the simple GRN kernel implementation that runs over DDR. Local size is in the form (width of the input tensor, 1, 1) to define a large enough work group to get code automatically vectorized and unrolled, while global size is (width of the input tensor, height of the input tensor, 1):
|
||||
```cpp
|
||||
__kernel void grn_NCHW(
|
||||
__global const half* restrict src_data,
|
||||
@@ -398,7 +382,7 @@ from/to a `__blobal` pointer since work-group copying could be done in a vector
|
||||
}
|
||||
```
|
||||
|
||||
This kernel can be rewritten to introduce special data binding `__dma_preload` and `__dma_postwrite intrinsics`. This means that instead of one kernel, a group of three kernels should be implemented: `kernelName`, `__dma_preload_kernelName`, and `__dma_postwrite_kernelName`. `__dma_preload_kernelName` for a particular work group `n` is guaranteed to be executed before the `n`-th work group itself, while `__dma_postwrite_kernelName` is guaranteed to be executed after a corresponding work group. You can define one of those functions that are intended to be used to copy data from-to `__global` and `__local` memory. The syntactics requires exact functional signature match. The example below illustrates how to prepare your kernel for manual-DMA.
|
||||
This kernel can be rewritten to introduce the `__dma_preload` and `__dma_postwrite intrinsics` special data binding. This means that instead of one kernel, a group of three kernels should be implemented: `kernelName`, `__dma_preload_kernelName`, and `__dma_postwrite_kernelName`. The `__dma_preload_kernelName` kernel for a particular work group `n` is guaranteed to be executed before the `n`-th work group itself, while the `__dma_postwrite_kernelName` is guaranteed to be executed after a corresponding work group. One of those functions may be defined to copy data from-to `__global` and `__local` memory. The syntactics requires exact functional signature match. The example below illustrates how to prepare your kernel for manual-DMA.
|
||||
|
||||
```cpp
|
||||
__kernel void __dma_preload_grn_NCHW(
|
||||
@@ -557,9 +541,9 @@ __kernel void grn_NCHW(
|
||||
}
|
||||
```
|
||||
|
||||
Note the `get_local_size` and `get_local_id` usage inside the kernel. 21x speedup is expected for a kernel on enet-curbs setup because it was completely limited by memory usage.
|
||||
> **NOTE**: The `get_local_size` and `get_local_id` usage inside the kernel. 21x speedup is expected for a kernel on enet-curbs setup since it is completely limited by memory usage.
|
||||
|
||||
An alternative method to using DMA is to use work item copy extension. Those functions are executed inside a kernel and requires work groups equal to single work item.
|
||||
An alternative method to using DMA is to use work item copy extension. Those functions are executed inside a kernel and require work groups equal to single work item.
|
||||
|
||||
Here is the list of supported work item functions:
|
||||
```cpp
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# AvgPoolPrecisionPreserved attribute {#openvino_docs_OV_UG_lpt_AvgPoolPrecisionPreserved}
|
||||
# AvgPoolPrecisionPreserved Attribute {#openvino_docs_OV_UG_lpt_AvgPoolPrecisionPreserved}
|
||||
|
||||
ngraph::AvgPoolPrecisionPreservedAttribute class represents the `AvgPoolPrecisionPreserved` attribute.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# IntervalsAlignment attribute {#openvino_docs_OV_UG_lpt_IntervalsAlignment}
|
||||
# IntervalsAlignment Attribute {#openvino_docs_OV_UG_lpt_IntervalsAlignment}
|
||||
|
||||
ngraph::IntervalsAlignmentAttribute class represents the `IntervalsAlignment` attribute.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# PrecisionPreserved attribute {#openvino_docs_OV_UG_lpt_PrecisionPreserved}
|
||||
# PrecisionPreserved Attribute {#openvino_docs_OV_UG_lpt_PrecisionPreserved}
|
||||
|
||||
ngraph::PrecisionPreservedAttribute class represents the `PrecisionPreserved` attribute.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Precisions attribute {#openvino_docs_OV_UG_lpt_Precisions}
|
||||
# Precisions Attribute {#openvino_docs_OV_UG_lpt_Precisions}
|
||||
|
||||
ngraph::PrecisionsAttribute class represents the `Precisions` attribute.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# QuantizationAlignment attribute {#openvino_docs_OV_UG_lpt_QuantizationAlignment}
|
||||
# QuantizationAlignment Attribute {#openvino_docs_OV_UG_lpt_QuantizationAlignment}
|
||||
|
||||
ngraph::QuantizationAlignmentAttribute class represents the `QuantizationAlignment` attribute.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# QuantizationGranularity attribute {#openvino_docs_OV_UG_lpt_QuantizationGranularity}
|
||||
# QuantizationGranularity Attribute {#openvino_docs_OV_UG_lpt_QuantizationGranularity}
|
||||
|
||||
ngraph::QuantizationAttribute class represents the `QuantizationGranularity` attribute.
|
||||
|
||||
|
||||
@@ -54,4 +54,4 @@ Attributes usage by transformations:
|
||||
| IntervalsAlignment | AlignQuantizationIntervals | FakeQuantizeDecompositionTransformation |
|
||||
| QuantizationAlignment | AlignQuantizationParameters | FakeQuantizeDecompositionTransformation |
|
||||
|
||||
> **NOTE**: The same type of attribute instances can be created in different transformations. This approach is the result of the transformation single-responsibility principle. For example, `Precision` attribute instances are created in `MarkupCanBeQuantized` and `MarkupPrecisions` transformations, but the reasons for their creation are different.
|
||||
> **NOTE**: The same type of attribute instances can be created in different transformations. This approach is the result of the transformation single-responsibility principle. For example, `Precision` attribute instances are created in `MarkupCanBeQuantized` and `MarkupPrecisions` transformations, but the reasons for their creation are different.
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
openvino_docs_MO_DG_prepare_model_convert_model_Cutting_Model
|
||||
openvino_docs_MO_DG_Additional_Optimization_Use_Cases
|
||||
openvino_docs_MO_DG_FP16_Compression
|
||||
openvino_docs_MO_DG_Python_API
|
||||
openvino_docs_MO_DG_prepare_model_Model_Optimizer_FAQ
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:11579795c778b28d57cbf080dedc10149500d78cc8b16a74fe2b113c76a94f6b
|
||||
size 26152
|
||||
613
docs/MO_DG/img/FaceNet.svg
Normal file
@@ -0,0 +1,613 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="704"
|
||||
height="1008"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg12615"
|
||||
sodipodi:docname="FaceNet.svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview12617"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.88492063"
|
||||
inkscape:cx="352.00897"
|
||||
inkscape:cy="504"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1137"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg12615" />
|
||||
<defs
|
||||
id="defs12545">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="704"
|
||||
height="1008"
|
||||
id="rect12542" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g12613">
|
||||
<path
|
||||
id="rect12547"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 704 V 1008 H 0 Z" />
|
||||
<path
|
||||
d="M388.101 376.873 377.26 319.736 379.225 319.363 390.066 376.5ZM374.561 321.605 377 313 382.421 320.114Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12549" />
|
||||
<path
|
||||
d="M0.982542-0.186041 11.8255 57.0793 9.86045 57.4513-0.982542 0.186041ZM14.5251 55.211 12.0833 63.8155 6.66476 56.6994Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 309 376.816)"
|
||||
id="path12551" />
|
||||
<path
|
||||
d="M223.5 30.167C223.5 25.9326 226.933 22.5001 231.167 22.5001L474.833 22.5001C479.067 22.5001 482.5 25.9326 482.5 30.167L482.5 60.8331C482.5 65.0675 479.067 68.5 474.833 68.5L231.167 68.5C226.933 68.5 223.5 65.0675 223.5 60.8331Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path12553" />
|
||||
<g
|
||||
aria-label="embeddings"
|
||||
transform="translate(261.058 58)"
|
||||
id="text12555"
|
||||
style="font-size:34px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 9.724,-1.836 q 1.632,0 2.992,-0.68 1.394,-0.714 2.55,-2.142 l 1.666,1.53 Q 15.538,-1.292 13.668,-0.374 11.798,0.51 9.486,0.51 5.78,0.51 3.4,-1.904 1.054,-4.352 1.054,-8.16 q 0,-3.706 2.414,-6.188 2.414,-2.482 5.95,-2.482 3.604,0 5.848,2.346 2.244,2.346 2.244,6.12 0,0.272 -0.034,0.646 -0.034,0.34 -0.068,0.51 H 3.91 q 0.272,2.414 1.87,3.91 1.632,1.462 3.944,1.462 z M 9.452,-14.484 q -2.142,0 -3.672,1.394 -1.53,1.394 -1.87,3.74 h 10.812 q -0.306,-2.38 -1.734,-3.74 -1.428,-1.394 -3.536,-1.394 z"
|
||||
id="path12983" />
|
||||
<path
|
||||
d="m 20.298021,-16.422 h 2.72 v 1.972 q 0.918,-1.19 2.074,-1.768 1.19,-0.612 2.652,-0.612 1.7,0 3.026,0.714 1.326,0.714 2.108,2.074 0.918,-1.36 2.312,-2.074 1.428,-0.714 3.196,-0.714 2.89,0 4.59,1.87 1.734,1.87 1.734,4.862 V 0 h -2.788 v -9.928 q 0,-2.074 -1.054,-3.23 -1.02,-1.19 -2.89,-1.19 -1.836,0 -2.992,1.292 -1.122,1.258 -1.122,3.298 V 0 h -2.754 v -9.928 q 0,-2.074 -1.054,-3.23 -1.02,-1.19 -2.89,-1.19 -1.836,0 -2.992,1.292 -1.122,1.258 -1.122,3.298 V 0 h -2.754 z"
|
||||
id="path12985" />
|
||||
<path
|
||||
d="m 50.830048,0 h -2.72 v -23.8 h 2.754 v 9.928 q 1.088,-1.394 2.652,-2.142 1.598,-0.782 3.366,-0.782 3.468,0 5.78,2.414 2.346,2.38 2.346,6.12 0,3.774 -2.38,6.222 -2.38,2.414 -5.882,2.414 -1.734,0 -3.298,-0.748 -1.53,-0.748 -2.618,-2.074 z m 5.78,-14.348 q -2.448,0 -4.148,1.802 -1.7,1.768 -1.7,4.42 0,2.584 1.666,4.352 1.666,1.734 4.08,1.734 2.516,0 4.08,-1.666 1.598,-1.7 1.598,-4.488 0,-2.754 -1.564,-4.454 -1.53,-1.7 -4.012,-1.7 z"
|
||||
id="path12987" />
|
||||
<path
|
||||
d="m 75.786027,-1.836 q 1.632,0 2.992,-0.68 1.394,-0.714 2.55,-2.142 l 1.666,1.53 q -1.394,1.836 -3.264,2.754 -1.87,0.884 -4.182,0.884 -3.706,0 -6.086,-2.414 -2.346,-2.448 -2.346,-6.256 0,-3.706 2.414,-6.188 2.414,-2.482 5.95,-2.482 3.604,0 5.848,2.346 2.244,2.346 2.244,6.12 0,0.272 -0.034,0.646 -0.034,0.34 -0.068,0.51 h -13.498 q 0.272,2.414 1.87,3.91 1.632,1.462 3.944,1.462 z m -0.272,-12.648 q -2.142,0 -3.672,1.394 -1.53,1.394 -1.87,3.74 h 10.812 q -0.306,-2.38 -1.734,-3.74 -1.428,-1.394 -3.536,-1.394 z"
|
||||
id="path12989" />
|
||||
<path
|
||||
d="m 99.756044,-23.8 h 2.787996 V 0 h -2.753996 v -2.652 q -1.122,1.428 -2.72,2.21 -1.564,0.782 -3.366,0.782 -3.434,0 -5.78,-2.38 -2.312,-2.414 -2.312,-6.12 0,-3.74 2.38,-6.188 2.414,-2.482 5.916,-2.482 1.7,0 3.196,0.714 1.53,0.68 2.652,1.972 z m -5.746,21.726 q 2.482,0 4.148,-1.768 1.7,-1.802 1.7,-4.42 0,-2.652 -1.632,-4.386 -1.632,-1.734 -4.08,-1.734 -2.516,0 -4.114,1.7 -1.598,1.7 -1.598,4.454 0,2.754 1.564,4.454 1.564,1.7 4.012,1.7 z"
|
||||
id="path12991" />
|
||||
<path
|
||||
d="m 119.51003,-23.8 h 2.788 V 0 h -2.754 v -2.652 q -1.122,1.428 -2.72,2.21 -1.564,0.782 -3.366,0.782 -3.434,0 -5.78,-2.38 -2.312,-2.414 -2.312,-6.12 0,-3.74 2.38,-6.188 2.414,-2.482 5.916,-2.482 1.7,0 3.196,0.714 1.53,0.68 2.652,1.972 z m -5.746,21.726 q 2.482,0 4.148,-1.768 1.7,-1.802 1.7,-4.42 0,-2.652 -1.632,-4.386 -1.632,-1.734 -4.08,-1.734 -2.516,0 -4.114,1.7 -1.598,1.7 -1.598,4.454 0,2.754 1.564,4.454 1.564,1.7 4.012,1.7 z"
|
||||
id="path12993" />
|
||||
<path
|
||||
d="m 126.03801,-21.93 h 3.06 v 3.026 h -3.06 z m 0.136,21.93 v -16.422 h 2.788 V 0 Z"
|
||||
id="path12995" />
|
||||
<path
|
||||
d="m 132.87198,-16.422 h 2.72 v 2.55 q 0.986,-1.428 2.482,-2.176 1.53,-0.782 3.4,-0.782 3.264,0 5.1,2.108 1.836,2.108 1.836,5.78 V 0 h -2.754 v -8.772 q 0,-2.584 -1.292,-4.08 -1.258,-1.496 -3.536,-1.496 -2.346,0 -3.774,1.598 -1.428,1.598 -1.428,4.148 V 0 h -2.754 z"
|
||||
id="path12997" />
|
||||
<path
|
||||
d="m 165.23997,-1.938 v -1.156 q -1.122,1.36 -2.72,2.108 -1.564,0.748 -3.366,0.748 -3.468,0 -5.78,-2.312 -2.312,-2.312 -2.312,-5.95 0,-3.604 2.346,-5.95 2.38,-2.38 5.848,-2.38 1.768,0 3.332,0.748 1.564,0.748 2.686,2.108 v -2.448 h 2.754 V -2.04 q 0,4.08 -2.176,6.12 -2.176,2.04 -6.562,2.04 h -4.148 V 3.74 h 4.284 q 2.89,0 4.352,-1.428 1.462,-1.428 1.462,-4.25 z m -5.678,-0.748 q 2.448,0 4.114,-1.666 1.7,-1.7 1.7,-4.148 0,-2.516 -1.7,-4.182 -1.666,-1.7 -4.114,-1.7 -2.55,0 -4.114,1.598 -1.564,1.598 -1.564,4.25 0,2.618 1.564,4.25 1.598,1.598 4.114,1.598 z"
|
||||
id="path12999" />
|
||||
<path
|
||||
d="m 184.82399,-4.794 q 0,2.312 -1.972,3.774 -1.972,1.462 -5.134,1.462 -2.074,0 -3.91,-0.646 -1.802,-0.68 -3.264,-2.006 l 1.428,-2.108 q 1.564,1.292 2.924,1.87 1.36,0.578 2.89,0.578 1.972,0 3.162,-0.748 1.224,-0.782 1.224,-2.006 0,-1.258 -1.02,-1.802 -1.02,-0.578 -3.366,-0.612 -3.434,-0.136 -5.032,-1.258 -1.564,-1.122 -1.564,-3.502 0,-2.176 1.802,-3.604 1.836,-1.462 4.692,-1.462 1.938,0 3.638,0.612 1.7,0.612 3.094,1.802 l -1.326,2.074 q -1.394,-1.122 -2.686,-1.632 -1.292,-0.51 -2.754,-0.51 -1.666,0 -2.754,0.748 -1.054,0.748 -1.054,1.836 0,1.19 0.952,1.734 0.952,0.544 3.026,0.646 3.706,0.102 5.338,1.224 1.666,1.122 1.666,3.536 z"
|
||||
id="path13001" />
|
||||
</g>
|
||||
<path
|
||||
d="M223.5 158.501C223.5 141.38 237.38 127.5 254.501 127.5L451.499 127.5C468.621 127.5 482.5 141.38 482.5 158.501L482.5 282.499C482.5 299.621 468.621 313.5 451.499 313.5L254.501 313.5C237.38 313.5 223.5 299.621 223.5 282.499Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path12557" />
|
||||
<g
|
||||
aria-label="InceptionResent..."
|
||||
transform="translate(251.808 229)"
|
||||
id="text12559"
|
||||
style="font-size:26px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 1.872,0 V -18.2 H 4.108 V 0 Z"
|
||||
id="path13004" />
|
||||
<path
|
||||
d="m 7.3579881,-12.558 h 2.08 v 1.95 q 0.7539999,-1.092 1.8979999,-1.664 1.17,-0.598 2.6,-0.598 2.496,0 3.9,1.612 1.404,1.612 1.404,4.42 V 0 h -2.106 v -6.708 q 0,-1.976 -0.988,-3.12 -0.962,-1.144 -2.704,-1.144 -1.794,0 -2.886,1.222 -1.0919999,1.222 -1.0919999,3.172 V 0 h -2.106 z"
|
||||
id="path13006" />
|
||||
<path
|
||||
d="m 27.767967,0.39 q -2.886,0 -4.706,-1.82 -1.794,-1.846 -1.794,-4.81 0,-2.808 1.898,-4.732 1.898,-1.924 4.654,-1.924 2.002,0 3.406,0.936 1.43,0.91 2.158,2.652 l -1.794,0.858 q -0.598,-1.3 -1.56,-1.95 -0.936,-0.65 -2.236,-0.65 -1.846,0 -3.12,1.404 -1.248,1.378 -1.248,3.406 0,2.054 1.248,3.432 1.274,1.352 3.172,1.352 1.222,0 2.262,-0.65 1.066,-0.676 1.716,-1.872 l 1.638,0.884 q -0.754,1.612 -2.262,2.548 -1.508,0.936 -3.432,0.936 z"
|
||||
id="path13008" />
|
||||
<path
|
||||
d="m 41.625964,-1.404 q 1.248,0 2.288,-0.52 1.066,-0.546 1.95,-1.638 l 1.274,1.17 q -1.066,1.404 -2.496,2.106 -1.43,0.676 -3.198,0.676 -2.834,0 -4.654,-1.846 -1.794,-1.872 -1.794,-4.784 0,-2.834 1.846,-4.732 1.846,-1.898 4.55,-1.898 2.756,0 4.472,1.794 1.716,1.794 1.716,4.68 0,0.208 -0.026,0.494 -0.026,0.26 -0.052,0.39 h -10.322 q 0.208,1.846 1.43,2.99 1.248,1.118 3.016,1.118 z m -0.208,-9.672 q -1.638,0 -2.808,1.066 -1.17,1.066 -1.43,2.86 h 8.268 q -0.234,-1.82 -1.326,-2.86 -1.092,-1.066 -2.704,-1.066 z"
|
||||
id="path13010" />
|
||||
<path
|
||||
d="m 51.817981,4.68 h -2.106 v -17.238 h 2.08 v 2.028 q 0.832,-1.092 2.054,-1.69 1.222,-0.624 2.6,-0.624 2.652,0 4.42,1.846 1.768,1.82 1.768,4.68 0,2.886 -1.846,4.758 -1.82,1.846 -4.498,1.846 -1.274,0 -2.47,-0.52 -1.17,-0.546 -2.002,-1.534 z m 4.394,-15.652 q -1.898,0 -3.198,1.378 -1.274,1.352 -1.274,3.38 0,2.002 1.248,3.328 1.274,1.326 3.146,1.326 1.924,0 3.12,-1.274 1.222,-1.3 1.222,-3.432 0,-2.106 -1.196,-3.406 -1.17,-1.3 -3.068,-1.3 z"
|
||||
id="path13012" />
|
||||
<path
|
||||
d="m 65.389986,-3.354 v -7.332 h -1.716 v -1.872 h 1.716 v -3.848 h 2.106 v 3.848 h 2.912 v 1.872 h -2.912 v 7.15 q 0,0.832 0.364,1.248 0.39,0.39 1.17,0.39 h 1.378 V 0 h -1.69 q -1.742,0 -2.548,-0.806 -0.78,-0.806 -0.78,-2.548 z"
|
||||
id="path13014" />
|
||||
<path
|
||||
d="m 72.669987,-16.77 h 2.34 v 2.314 h -2.34 z m 0.104,16.77 v -12.558 h 2.132 V 0 Z"
|
||||
id="path13016" />
|
||||
<path
|
||||
d="m 90.401975,-6.266 q 0,2.834 -1.898,4.758 -1.898,1.898 -4.68,1.898 -2.782,0 -4.654,-1.898 -1.846,-1.898 -1.846,-4.732 0,-2.834 1.898,-4.732 1.898,-1.924 4.68,-1.924 2.782,0 4.628,1.898 1.872,1.898 1.872,4.732 z m -10.92,0.026 q 0,2.028 1.248,3.406 1.274,1.378 3.146,1.378 1.846,0 3.094,-1.378 1.274,-1.378 1.274,-3.432 0,-2.002 -1.274,-3.38 -1.274,-1.404 -3.12,-1.404 -1.846,0 -3.12,1.378 -1.248,1.378 -1.248,3.432 z"
|
||||
id="path13018" />
|
||||
<path
|
||||
d="m 92.585985,-12.558 h 2.08 v 1.95 q 0.754,-1.092 1.898,-1.664 1.17,-0.598 2.6,-0.598 2.495995,0 3.899995,1.612 1.404,1.612 1.404,4.42 V 0 h -2.106 v -6.708 q 0,-1.976 -0.988,-3.12 -0.962,-1.144 -2.703995,-1.144 -1.794,0 -2.886,1.222 -1.092,1.222 -1.092,3.172 V 0 h -2.106 z"
|
||||
id="path13020" />
|
||||
<path
|
||||
d="m 117.12996,-7.67 5.174,7.67 h -2.652 l -4.914,-7.41 h -5.122 V 0 h -2.236 v -18.2 h 8.008 q 3.068,0 4.784,1.43 1.716,1.404 1.716,3.874 0,2.028 -1.248,3.406 -1.248,1.378 -3.51,1.82 z m 2.496,-5.174 q 0,-1.586 -1.144,-2.47 -1.118,-0.91 -3.146,-0.91 h -5.72 v 6.864 h 5.538 q 2.054,0 3.25,-0.936 1.222,-0.962 1.222,-2.548 z"
|
||||
id="path13022" />
|
||||
<path
|
||||
d="m 130.10396,-1.404 q 1.248,0 2.288,-0.52 1.066,-0.546 1.95,-1.638 l 1.274,1.17 q -1.066,1.404 -2.496,2.106 -1.43,0.676 -3.198,0.676 -2.834,0 -4.654,-1.846 -1.794,-1.872 -1.794,-4.784 0,-2.834 1.846,-4.732 1.846,-1.898 4.55,-1.898 2.756,0 4.472,1.794 1.716,1.794 1.716,4.68 0,0.208 -0.026,0.494 -0.026,0.26 -0.052,0.39 h -10.322 q 0.208,1.846 1.43,2.99 1.248,1.118 3.016,1.118 z m -0.208,-9.672 q -1.638,0 -2.808,1.066 -1.17,1.066 -1.43,2.86 h 8.268 q -0.234,-1.82 -1.326,-2.86 -1.092,-1.066 -2.704,-1.066 z"
|
||||
id="path13024" />
|
||||
<path
|
||||
d="m 148.30398,-3.666 q 0,1.768 -1.508,2.886 -1.508,1.118 -3.926,1.118 -1.586,0 -2.99,-0.494 -1.378,-0.52 -2.496,-1.534 l 1.092,-1.612 q 1.196,0.988 2.236,1.43 1.04,0.442 2.21,0.442 1.508,0 2.418,-0.572 0.936,-0.598 0.936,-1.534 0,-0.962 -0.78,-1.378 -0.78,-0.442 -2.574,-0.468 -2.626,-0.104 -3.848,-0.962 -1.196,-0.858 -1.196,-2.678 0,-1.664 1.378,-2.756 1.404,-1.118 3.588,-1.118 1.482,0 2.782,0.468 1.3,0.468 2.366,1.378 l -1.014,1.586 q -1.066,-0.858 -2.054,-1.248 -0.988,-0.39 -2.106,-0.39 -1.274,0 -2.106,0.572 -0.806,0.572 -0.806,1.404 0,0.91 0.728,1.326 0.728,0.416 2.314,0.494 2.834,0.078 4.082,0.936 1.274,0.858 1.274,2.704 z"
|
||||
id="path13026" />
|
||||
<path
|
||||
d="m 156.494,-1.404 q 1.248,0 2.288,-0.52 1.066,-0.546 1.95,-1.638 l 1.274,1.17 q -1.066,1.404 -2.496,2.106 -1.43,0.676 -3.198,0.676 -2.834,0 -4.654,-1.846 -1.794,-1.872 -1.794,-4.784 0,-2.834 1.846,-4.732 1.846,-1.898 4.55,-1.898 2.756,0 4.472,1.794 1.716,1.794 1.716,4.68 0,0.208 -0.026,0.494 -0.026,0.26 -0.052,0.39 h -10.322 q 0.208,1.846 1.43,2.99 1.248,1.118 3.016,1.118 z m -0.208,-9.672 q -1.638,0 -2.808,1.066 -1.17,1.066 -1.43,2.86 h 8.268 q -0.234,-1.82 -1.326,-2.86 -1.092,-1.066 -2.704,-1.066 z"
|
||||
id="path13028" />
|
||||
<path
|
||||
d="m 164.58001,-12.558 h 2.08 v 1.95 q 0.754,-1.092 1.898,-1.664 1.17,-0.598 2.6,-0.598 2.496,0 3.9,1.612 1.404,1.612 1.404,4.42 V 0 h -2.106 v -6.708 q 0,-1.976 -0.988,-3.12 -0.962,-1.144 -2.704,-1.144 -1.794,0 -2.886,1.222 -1.092,1.222 -1.092,3.172 V 0 h -2.106 z"
|
||||
id="path13030" />
|
||||
<path
|
||||
d="m 179.76399,-3.354 v -7.332 h -1.716 v -1.872 h 1.716 v -3.848 h 2.106 v 3.848 h 2.912 v 1.872 h -2.912 v 7.15 q 0,0.832 0.364,1.248 0.39,0.39 1.17,0.39 h 1.378 V 0 h -1.69 q -1.742,0 -2.548,-0.806 -0.78,-0.806 -0.78,-2.548 z"
|
||||
id="path13032" />
|
||||
<path
|
||||
d="m 186.784,-2.626 h 2.626 V 0 h -2.626 z"
|
||||
id="path13034" />
|
||||
<path
|
||||
d="m 191.906,-2.626 h 2.626 V 0 h -2.626 z"
|
||||
id="path13036" />
|
||||
<path
|
||||
d="m 197.02801,-2.626 h 2.626 V 0 h -2.626 z"
|
||||
id="path13038" />
|
||||
</g>
|
||||
<path
|
||||
d="M1-1.78222e-06 1.00009 52.2413-0.999907 52.2413-1 1.78222e-06ZM4.00009 50.9079 0.000104987 58.9079-3.99991 50.9079Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 353 126.908)"
|
||||
id="path12561" />
|
||||
<g
|
||||
aria-label="2 tensors"
|
||||
transform="translate(316.822 104)"
|
||||
id="text12563"
|
||||
style="font-size:17px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 9.129,0 H 0.646 V -1.19 L 4.76,-4.964 q 1.462,-1.326 1.989,-2.108 0.527,-0.782 0.527,-1.666 0,-0.935 -0.697,-1.53 -0.68,-0.612 -1.7,-0.612 -1.258,0 -1.989,0.782 -0.714,0.765 -0.765,2.159 H 0.731 q 0.034,-1.938 1.156,-3.077 1.139,-1.156 3.026,-1.156 1.666,0 2.737,0.969 1.088,0.952 1.088,2.431 0,1.19 -0.663,2.21 -0.646,1.003 -2.465,2.601 l -3.009,2.686 h 6.528 z"
|
||||
id="path13041" />
|
||||
<path
|
||||
d="m 14.126991,-2.193 v -4.794 h -1.122 v -1.224 h 1.122 v -2.516 h 1.377 v 2.516 h 1.904 v 1.224 h -1.904 v 4.675 q 0,0.544 0.238,0.816 0.255,0.255 0.765,0.255 h 0.901 V 0 h -1.105 q -1.139,0 -1.666,-0.527 -0.51,-0.527 -0.51,-1.666 z"
|
||||
id="path13043" />
|
||||
<path
|
||||
d="m 22.627001,-0.918 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13045" />
|
||||
<path
|
||||
d="m 27.914012,-8.211 h 1.36 v 1.275 q 0.493,-0.714 1.241,-1.088 0.765,-0.391 1.7,-0.391 1.632,0 2.55,1.054 0.918,1.054 0.918,2.89 V 0 h -1.377 v -4.386 q 0,-1.292 -0.646,-2.04 -0.629,-0.748 -1.768,-0.748 -1.173,0 -1.887,0.799 -0.714,0.799 -0.714,2.074 V 0 h -1.377 z"
|
||||
id="path13047" />
|
||||
<path
|
||||
d="m 43.995998,-2.397 q 0,1.156 -0.986,1.887 -0.986,0.731 -2.567,0.731 -1.037,0 -1.955,-0.323 -0.901,-0.34 -1.632,-1.003 l 0.714,-1.054 q 0.782,0.646 1.462,0.935 0.68,0.289 1.445,0.289 0.986,0 1.581,-0.374 0.612,-0.391 0.612,-1.003 0,-0.629 -0.51,-0.901 -0.51,-0.289 -1.683,-0.306 -1.717,-0.068 -2.516,-0.629 -0.782,-0.561 -0.782,-1.751 0,-1.088 0.901,-1.802 0.918,-0.731 2.346,-0.731 0.969,0 1.819,0.306 0.85,0.306 1.547,0.901 l -0.663,1.037 q -0.697,-0.561 -1.343,-0.816 -0.646,-0.255 -1.377,-0.255 -0.833,0 -1.377,0.374 -0.527,0.374 -0.527,0.918 0,0.595 0.476,0.867 0.476,0.272 1.513,0.323 1.853,0.051 2.669,0.612 0.833,0.561 0.833,1.768 z"
|
||||
id="path13049" />
|
||||
<path
|
||||
d="m 53.56701,-4.097 q 0,1.853 -1.241,3.111 -1.241,1.241 -3.06,1.241 -1.819,0 -3.043,-1.241 -1.207,-1.241 -1.207,-3.094 0,-1.853 1.241,-3.094 1.241,-1.258 3.06,-1.258 1.819,0 3.026,1.241 1.224,1.241 1.224,3.094 z m -7.14,0.017 q 0,1.326 0.816,2.227 0.833,0.901 2.057,0.901 1.207,0 2.023,-0.901 0.833,-0.901 0.833,-2.244 0,-1.309 -0.833,-2.21 -0.833,-0.918 -2.04,-0.918 -1.207,0 -2.04,0.901 -0.816,0.901 -0.816,2.244 z"
|
||||
id="path13051" />
|
||||
<path
|
||||
d="m 54.995017,-8.211 h 1.36 v 1.037 q 0.374,-0.544 0.969,-0.816 0.595,-0.272 1.411,-0.272 h 0.748 v 1.309 h -0.765 q -1.173,0 -1.768,0.612 -0.578,0.612 -0.578,1.853 V 0 h -1.377 z"
|
||||
id="path13053" />
|
||||
<path
|
||||
d="m 67.218018,-2.397 q 0,1.156 -0.986,1.887 -0.986,0.731 -2.567,0.731 -1.037,0 -1.955,-0.323 -0.901,-0.34 -1.632,-1.003 l 0.714,-1.054 q 0.782,0.646 1.462,0.935 0.68,0.289 1.445,0.289 0.986,0 1.581,-0.374 0.612,-0.391 0.612,-1.003 0,-0.629 -0.51,-0.901 -0.51,-0.289 -1.683,-0.306 -1.717,-0.068 -2.516,-0.629 -0.782,-0.561 -0.782,-1.751 0,-1.088 0.901,-1.802 0.918,-0.731 2.346,-0.731 0.969,0 1.819,0.306 0.85,0.306 1.547,0.901 l -0.663,1.037 q -0.697,-0.561 -1.343,-0.816 -0.646,-0.255 -1.377,-0.255 -0.833,0 -1.377,0.374 -0.527,0.374 -0.527,0.918 0,0.595 0.476,0.867 0.476,0.272 1.513,0.323 1.853,0.051 2.669,0.612 0.833,0.561 0.833,1.768 z"
|
||||
id="path13055" />
|
||||
</g>
|
||||
<path
|
||||
d="M322.5 985.5C322.5 977.768 335.932 971.5 352.5 971.5 369.069 971.5 382.5 977.768 382.5 985.5 382.5 993.232 369.069 999.5 352.5 999.5 335.932 999.5 322.5 993.232 322.5 985.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12565" />
|
||||
<path
|
||||
d="M104.5 572.835C104.5 535.094 135.094 504.5 172.835 504.5L532.166 504.5C569.906 504.5 600.5 535.094 600.5 572.835L600.5 846.165C600.5 883.906 569.906 914.5 532.166 914.5L172.835 914.5C135.094 914.5 104.5 883.906 104.5 846.165Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path12567" />
|
||||
<path
|
||||
d="M279.5 391.5C279.5 383.768 292.932 377.5 309.5 377.5 326.069 377.5 339.5 383.768 339.5 391.5 339.5 399.232 326.069 405.5 309.5 405.5 292.932 405.5 279.5 399.232 279.5 391.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12569" />
|
||||
<path
|
||||
d="M359.5 391.5C359.5 383.768 373.155 377.5 390 377.5 406.845 377.5 420.5 383.768 420.5 391.5 420.5 399.232 406.845 405.5 390 405.5 373.155 405.5 359.5 399.232 359.5 391.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12571" />
|
||||
<path
|
||||
d="M232.5 452.5C232.5 444.768 245.932 438.5 262.5 438.5 279.069 438.5 292.5 444.768 292.5 452.5 292.5 460.232 279.069 466.5 262.5 466.5 245.932 466.5 232.5 460.232 232.5 452.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12573" />
|
||||
<path
|
||||
d="M383.5 452.5C383.5 444.768 396.932 438.5 413.5 438.5 430.069 438.5 443.5 444.768 443.5 452.5 443.5 460.232 430.069 466.5 413.5 466.5 396.932 466.5 383.5 460.232 383.5 452.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12575" />
|
||||
<path
|
||||
d="M1-1.85532e-06 1.00009 49.9202-0.999907 49.9202-1 1.85532e-06ZM4.00009 48.5869 0.000104987 56.5869-3.99991 48.5869Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 352 970.587)"
|
||||
id="path12577" />
|
||||
<path
|
||||
d="M321.5 805.5C321.5 797.768 335.155 791.5 352 791.5 368.845 791.5 382.5 797.768 382.5 805.5 382.5 813.232 368.845 819.5 352 819.5 335.155 819.5 321.5 813.232 321.5 805.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12579" />
|
||||
<path
|
||||
d="M177.5 853.5C177.5 845.768 190.932 839.5 207.5 839.5 224.069 839.5 237.5 845.768 237.5 853.5 237.5 861.232 224.069 867.5 207.5 867.5 190.932 867.5 177.5 861.232 177.5 853.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path12581" />
|
||||
<path
|
||||
d="M227.027 843.185 227.375 841.725 228.341 840.09 229.874 838.696 231.857 837.308 234.26 836.074 237.079 834.857 243.664 832.641 251.494 830.81 260.12 829.424 269.433 828.481 278.967 828.167 288.467 827.854 288.405 827.858 297.593 826.983 297.523 826.992 306.148 825.555 306.083 825.568 313.771 823.756 313.689 823.778 320.189 821.653 320.104 821.686 322.854 820.498 322.65 820.616 324.764 819.03 325.965 820.63 323.755 822.287 320.854 823.54 314.271 825.693 306.51 827.522 297.817 828.971 288.564 829.852 279.033 830.166 269.533 830.478 269.601 830.474 260.351 831.411 260.409 831.404 251.846 832.779 251.915 832.765 244.165 834.578 244.256 834.552 237.756 836.739 237.834 836.709 235.084 837.897 235.144 837.868 232.832 839.056 232.948 838.986 231.073 840.298 231.173 840.219 229.798 841.469 229.986 841.238 229.173 842.613 229.285 842.336 228.973 843.648ZM321.539 818.056 329.938 814.979 327.361 823.544Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12583" />
|
||||
<g
|
||||
aria-label="fifo_queue"
|
||||
transform="translate(160.443 826)"
|
||||
id="text12585"
|
||||
style="font-size:17px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 1.445,-6.987 H 0.323 v -1.224 h 1.122 v -1.547 q 0,-1.037 0.544,-1.581 Q 2.55,-11.9 3.587,-11.9 h 1.122 v 1.156 H 3.791 q -0.476,0 -0.714,0.255 -0.238,0.255 -0.238,0.765 v 1.513 h 1.87 v 1.224 H 2.839 V 0 H 1.445 Z m 4.437,-3.978 h 1.53 v 1.513 H 5.882 Z M 5.95,0 V -8.211 H 7.344 V 0 Z"
|
||||
id="path13058" />
|
||||
<path
|
||||
d="m 9.8429912,-6.987 h -1.122 v -1.224 h 1.122 v -1.547 q 0,-1.037 0.5439998,-1.581 0.561,-0.561 1.598,-0.561 h 1.122 v 1.156 h -0.918 q -0.476,0 -0.714,0.255 -0.238,0.255 -0.238,0.765 v 1.513 h 1.87 v 1.224 h -1.87 V 0 H 9.8429912 Z"
|
||||
id="path13060" />
|
||||
<path
|
||||
d="m 22.201989,-4.097 q 0,1.853 -1.241,3.111 -1.241,1.241 -3.06,1.241 -1.819,0 -3.043,-1.241 -1.207,-1.241 -1.207,-3.094 0,-1.853 1.241,-3.094 1.241,-1.258 3.06,-1.258 1.819,0 3.026,1.241 1.224,1.241 1.224,3.094 z m -7.14,0.017 q 0,1.326 0.816,2.227 0.833,0.901 2.057,0.901 1.207,0 2.023,-0.901 0.833,-0.901 0.833,-2.244 0,-1.309 -0.833,-2.21 -0.833,-0.918 -2.04,-0.918 -1.207,0 -2.04,0.901 -0.816,0.901 -0.816,2.244 z"
|
||||
id="path13062" />
|
||||
<path
|
||||
d="M 22.694998,2.193 V 0.952 h 8.636 v 1.241 z"
|
||||
id="path13064" />
|
||||
<path
|
||||
d="m 38.912993,-8.211 h 1.377 V 3.06 h -1.394 v -4.318 q -0.544,0.68 -1.343,1.054 -0.782,0.374 -1.666,0.374 -1.734,0 -2.907,-1.19 -1.156,-1.207 -1.156,-3.06 0,-1.887 1.19,-3.111 1.207,-1.224 2.941,-1.224 0.867,0 1.649,0.374 0.782,0.374 1.309,1.037 z m -2.89,7.174 q 1.241,0 2.074,-0.884 0.85,-0.901 0.85,-2.21 0,-1.326 -0.816,-2.193 -0.816,-0.867 -2.04,-0.867 -1.258,0 -2.057,0.85 -0.799,0.85 -0.799,2.227 0,1.377 0.782,2.227 0.782,0.85 2.006,0.85 z"
|
||||
id="path13066" />
|
||||
<path
|
||||
d="m 49.758981,0 h -1.377 v -1.258 q -0.493,0.714 -1.241,1.088 -0.748,0.357 -1.7,0.357 -1.615,0 -2.55,-1.054 -0.918,-1.054 -0.918,-2.873 v -4.471 h 1.394 v 4.386 q 0,1.292 0.629,2.04 0.646,0.748 1.785,0.748 1.156,0 1.87,-0.799 0.714,-0.799 0.714,-2.074 v -4.301 h 1.394 z"
|
||||
id="path13068" />
|
||||
<path
|
||||
d="m 55.504967,-0.918 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13070" />
|
||||
<path
|
||||
d="m 68.47598,0 h -1.377 v -1.258 q -0.493,0.714 -1.241,1.088 -0.748,0.357 -1.7,0.357 -1.615,0 -2.55,-1.054 -0.918,-1.054 -0.918,-2.873 v -4.471 h 1.394 v 4.386 q 0,1.292 0.629,2.04 0.646,0.748 1.785,0.748 1.156,0 1.87,-0.799 0.714,-0.799 0.714,-2.074 v -4.301 h 1.394 z"
|
||||
id="path13072" />
|
||||
<path
|
||||
d="m 74.22197,-0.918 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13074" />
|
||||
</g>
|
||||
<path
|
||||
d="M321.288 805.944 318.331 805.484 315.352 804.063 312.459 801.827 309.548 798.787 306.799 795.015 304.007 790.509 301.354 785.267 298.647 779.411 296.068 772.807 293.495 765.654 290.985 757.871 288.603 749.472 286.223 740.58 283.905 731.12 281.714 721.164 279.586 710.775 277.521 699.95 275.518 688.683 273.703 677.043 271.951 665.029 268.761 640.008 266.071 613.923 263.882 586.964 262.256 559.316 261.255 531.23 261.016 509.634 263.016 509.611 263.254 531.195 263.254 531.171 264.254 559.233 264.253 559.21 265.878 586.835 265.876 586.813 268.064 613.75 268.062 613.729 270.749 639.791 270.746 639.767 273.934 664.767 273.932 664.749 275.682 676.749 275.68 676.74 277.493 688.365 277.489 688.344 279.489 699.594 279.487 699.581 281.549 710.394 281.547 710.381 283.672 720.756 283.669 720.741 285.856 730.679 285.851 730.656 288.163 740.093 288.158 740.073 290.533 748.948 290.529 748.933 292.904 757.308 292.894 757.274 295.394 765.024 295.383 764.993 297.945 772.118 297.936 772.093 300.499 778.655 300.475 778.599 303.162 784.412 303.147 784.38 305.772 789.567 305.729 789.492 308.479 793.929 308.438 793.867 311.125 797.555 311.039 797.452 313.852 800.39 313.741 800.29 316.491 802.415 316.31 802.304 319.06 803.616 318.783 803.531 321.596 803.968ZM258.031 511 261.942 502.956 266.03 510.912Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12587" />
|
||||
<path
|
||||
d="M0.220261-0.975441 2.34199-0.496342 4.53404 0.988601 6.60594 3.261 8.55084 6.24318 10.5298 10.0096 12.4315 14.5102 14.323 19.7435 16.1467 25.5918 17.9674 32.1214 19.7234 39.2706 21.4778 47.0404 23.1694 55.373 24.7976 64.2652 26.3621 73.6526 27.9263 83.6003 29.4284 93.9274 30.8048 104.751 32.1806 116.008 33.4944 127.582 34.6832 139.594 36.8718 164.483 38.748 190.436 40.2489 217.327 41.3745 244.905 42.0624 272.858 42.2533 294.387 40.2534 294.405 40.0625 272.884 40.0628 272.9 39.3753 244.962 39.3758 244.978 38.2508 217.416 38.2516 217.431 36.7516 190.556 36.7526 190.572 34.8776 164.635 34.8788 164.65 32.6913 139.775 32.6924 139.786 31.5049 127.786 31.5064 127.8 30.1939 116.238 30.1949 116.246 28.8199 104.996 28.8205 105.001 27.4455 94.1887 27.4479 94.2064 25.9479 83.8939 25.9496 83.9053 24.3871 73.9678 24.3886 73.9769 22.8261 64.6019 22.8289 64.6176 21.2039 55.7426 21.2075 55.7614 19.52 47.4489 19.5246 47.4703 17.7746 39.7203 17.7789 39.7385 16.0289 32.6135 16.0367 32.6436 14.2242 26.1436 14.2328 26.1727 12.4203 20.3602 12.4345 20.4024 10.5595 15.2149 10.5789 15.2642 8.70385 10.8267 8.73976 10.9026 6.80226 7.21513 6.84989 7.29627 4.97489 4.42127 5.07354 4.54875 3.13604 2.42375 3.31415 2.57792 1.37665 1.26542 1.71724 1.41294-0.220261 0.975441ZM45.2414 293.027 41.3125 301.062 37.2417 293.098Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 382 805.039)"
|
||||
id="path12589" />
|
||||
<path
|
||||
d="M261 503.312 261.063 496.437 261.063 496.446 261.063 493.247 261.125 490.422 261.188 488.045 261.188 488.071 261.188 486.232 261.254 484.974 261.323 484.492 261.385 484.055 261.375 484.196 261.375 482.991 261.438 481.162 261.438 481.17 261.5 478.795 261.5 478.821 261.5 476.002 261.544 472.6 263.544 472.626 263.5 476.021 263.5 476.008 263.5 478.834 263.437 481.226 263.374 483.043 263.375 483.008 263.375 484.267 263.303 484.775 263.24 485.212 263.249 485.124 263.186 486.311 263.188 486.258 263.188 488.084 263.125 490.472 263.125 490.468 263.062 493.281 263.063 493.258 263.063 496.451 263 503.33ZM258.541 473.935 262.563 465.946 266.54 473.957Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12591" />
|
||||
<path
|
||||
d="M0.82845-0.560062 22.176 31.0174 20.5191 32.1375-0.82845 0.560062ZM23.9146 28.2326 25.0813 37.1005 17.287 32.7131Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 262 438.1)"
|
||||
id="path12593" />
|
||||
<g
|
||||
aria-label="(batch_jo...batch_sizebatch_joinlabel_bat...Image_b...Inputphase_tr..."
|
||||
transform="translate(311.168 781)"
|
||||
id="text12605"
|
||||
style="font-size:17px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 0.697,-5.933 q 0,-2.448 1.258,-4.539 1.258,-2.108 3.604,-3.57 l 0.697,0.986 q -2.006,1.343 -3.077,3.162 -1.054,1.819 -1.054,3.927 0,2.108 1.054,3.927 1.071,1.819 3.077,3.162 L 5.559,2.108 Q 3.213,0.646 1.955,-1.428 0.697,-3.519 0.697,-5.933 Z"
|
||||
id="path13077" />
|
||||
<path
|
||||
d="m 8.8060039,0 h -1.36 v -11.9 h 1.377 v 4.964 q 0.544,-0.697 1.3260001,-1.071 0.799,-0.391 1.683,-0.391 1.734,0 2.89,1.207 1.173,1.19 1.173,3.06 0,1.887 -1.19,3.111 -1.19,1.207 -2.941,1.207 -0.867,0 -1.649,-0.374 -0.7650001,-0.374 -1.3090001,-1.037 z m 2.8900001,-7.174 q -1.224,0 -2.0740001,0.901 -0.85,0.884 -0.85,2.21 0,1.292 0.833,2.176 0.8330001,0.867 2.0400001,0.867 1.258,0 2.04,-0.833 0.799,-0.85 0.799,-2.244 0,-1.377 -0.782,-2.227 -0.765,-0.85 -2.006,-0.85 z"
|
||||
id="path13079" />
|
||||
<path
|
||||
d="m 24.377993,0 h -1.343 v -1.343 q -0.493,0.714 -1.343,1.122 -0.833,0.391 -1.802,0.391 -1.36,0 -2.21,-0.68 -0.85,-0.697 -0.85,-1.819 0,-1.241 0.952,-1.87 0.952,-0.646 2.822,-0.646 h 2.414 q -0.017,-1.156 -0.612,-1.785 -0.578,-0.629 -1.666,-0.629 -0.646,0 -1.275,0.272 -0.629,0.272 -1.241,0.833 l -0.731,-0.918 q 0.714,-0.68 1.564,-1.02 0.85,-0.357 1.819,-0.357 1.649,0 2.567,0.986 0.935,0.969 0.935,2.686 z m -1.36,-3.774 h -2.397 q -1.241,0 -1.836,0.34 -0.595,0.34 -0.595,1.071 0,0.612 0.51,1.02 0.527,0.408 1.343,0.408 1.224,0 2.057,-0.782 0.85,-0.799 0.918,-2.057 z"
|
||||
id="path13081" />
|
||||
<path
|
||||
d="m 26.451997,-2.193 v -4.794 h -1.122 v -1.224 h 1.122 v -2.516 h 1.377 v 2.516 h 1.904 v 1.224 h -1.904 v 4.675 q 0,0.544 0.238,0.816 0.255,0.255 0.765,0.255 h 0.901 V 0 h -1.105 q -1.139,0 -1.666,-0.527 -0.51,-0.527 -0.51,-1.666 z"
|
||||
id="path13083" />
|
||||
<path
|
||||
d="m 34.867008,0.255 q -1.887,0 -3.077,-1.19 -1.173,-1.207 -1.173,-3.145 0,-1.836 1.241,-3.094 1.241,-1.258 3.043,-1.258 1.309,0 2.227,0.612 0.935,0.595 1.411,1.734 l -1.173,0.561 q -0.391,-0.85 -1.02,-1.275 -0.612,-0.425 -1.462,-0.425 -1.207,0 -2.04,0.918 -0.816,0.901 -0.816,2.227 0,1.343 0.816,2.244 0.833,0.884 2.074,0.884 0.799,0 1.479,-0.425 0.697,-0.442 1.122,-1.224 l 1.071,0.578 q -0.493,1.054 -1.479,1.666 -0.986,0.612 -2.244,0.612 z"
|
||||
id="path13085" />
|
||||
<path
|
||||
d="m 39.967006,-11.9 h 1.377 v 4.93 q 0.493,-0.697 1.224,-1.071 0.748,-0.374 1.666,-0.374 1.649,0 2.567,1.054 0.935,1.054 0.935,2.89 V 0 h -1.377 v -4.386 q 0,-1.309 -0.646,-2.04 -0.646,-0.748 -1.802,-0.748 -1.156,0 -1.87,0.799 -0.697,0.799 -0.697,2.074 V 0 h -1.377 z"
|
||||
id="path13087" />
|
||||
<path
|
||||
d="M 48.500992,2.193 V 0.952 h 8.636 v 1.241 z"
|
||||
id="path13089" />
|
||||
<path
|
||||
d="m 58.105989,-10.965 h 1.53 v 1.513 h -1.53 z m 1.462,2.754 V 0.85 q 0,1.071 -0.561,1.632 -0.544,0.578 -1.598,0.578 h -0.986 V 1.904 h 0.799 q 0.476,0 0.714,-0.272 0.238,-0.255 0.238,-0.765 v -9.078 z"
|
||||
id="path13091" />
|
||||
<path
|
||||
d="m 69.700002,-4.097 q 0,1.853 -1.241,3.111 -1.241,1.241 -3.06,1.241 -1.819,0 -3.043,-1.241 -1.207,-1.241 -1.207,-3.094 0,-1.853 1.241,-3.094 1.241,-1.258 3.06,-1.258 1.819,0 3.026,1.241 1.224,1.241 1.224,3.094 z m -7.14,0.017 q 0,1.326 0.816,2.227 0.833,0.901 2.057,0.901 1.207,0 2.023,-0.901 0.833,-0.901 0.833,-2.244 0,-1.309 -0.833,-2.21 -0.833,-0.918 -2.04,-0.918 -1.207,0 -2.04,0.901 -0.816,0.901 -0.816,2.244 z"
|
||||
id="path13093" />
|
||||
<path
|
||||
d="m 70.924009,-1.717 h 1.717 V 0 h -1.717 z"
|
||||
id="path13095" />
|
||||
<path
|
||||
d="m 74.273016,-1.717 h 1.717 V 0 h -1.717 z"
|
||||
id="path13097" />
|
||||
<path
|
||||
d="m 77.62203,-1.717 h 1.717 V 0 h -1.717 z"
|
||||
id="path13099" />
|
||||
<path
|
||||
d="m 52.692,177 h -1.36 v -11.9 h 1.377 v 4.964 q 0.544,-0.697 1.326,-1.071 0.799,-0.391 1.683,-0.391 1.734,0 2.89,1.207 1.173,1.19 1.173,3.06 0,1.887 -1.19,3.111 -1.19,1.207 -2.941,1.207 -0.867,0 -1.649,-0.374 -0.765,-0.374 -1.309,-1.037 z m 2.89,-7.174 q -1.224,0 -2.074,0.901 -0.85,0.884 -0.85,2.21 0,1.292 0.833,2.176 0.833,0.867 2.04,0.867 1.258,0 2.04,-0.833 0.799,-0.85 0.799,-2.244 0,-1.377 -0.782,-2.227 -0.765,-0.85 -2.006,-0.85 z"
|
||||
id="path13101" />
|
||||
<path
|
||||
d="m 68.263989,177 h -1.343 v -1.343 q -0.493,0.714 -1.343,1.122 -0.833,0.391 -1.802,0.391 -1.36,0 -2.21,-0.68 -0.85,-0.697 -0.85,-1.819 0,-1.241 0.952,-1.87 0.952,-0.646 2.822,-0.646 h 2.414 q -0.017,-1.156 -0.612,-1.785 -0.578,-0.629 -1.666,-0.629 -0.646,0 -1.275,0.272 -0.629,0.272 -1.241,0.833 l -0.731,-0.918 q 0.714,-0.68 1.564,-1.02 0.85,-0.357 1.819,-0.357 1.649,0 2.567,0.986 0.935,0.969 0.935,2.686 z m -1.36,-3.774 h -2.397 q -1.241,0 -1.836,0.34 -0.595,0.34 -0.595,1.071 0,0.612 0.51,1.02 0.527,0.408 1.343,0.408 1.224,0 2.057,-0.782 0.85,-0.799 0.918,-2.057 z"
|
||||
id="path13103" />
|
||||
<path
|
||||
d="m 70.337993,174.807 v -4.794 h -1.122 v -1.224 h 1.122 v -2.516 h 1.377 v 2.516 h 1.904 v 1.224 h -1.904 v 4.675 q 0,0.544 0.238,0.816 0.255,0.255 0.765,0.255 h 0.901 V 177 h -1.105 q -1.139,0 -1.666,-0.527 -0.51,-0.527 -0.51,-1.666 z"
|
||||
id="path13105" />
|
||||
<path
|
||||
d="m 78.753004,177.255 q -1.887,0 -3.077,-1.19 -1.173,-1.207 -1.173,-3.145 0,-1.836 1.241,-3.094 1.241,-1.258 3.043,-1.258 1.309,0 2.227,0.612 0.935,0.595 1.411,1.734 l -1.173,0.561 q -0.391,-0.85 -1.02,-1.275 -0.612,-0.425 -1.462,-0.425 -1.207,0 -2.04,0.918 -0.816,0.901 -0.816,2.227 0,1.343 0.816,2.244 0.833,0.884 2.074,0.884 0.799,0 1.479,-0.425 0.697,-0.442 1.122,-1.224 l 1.071,0.578 q -0.493,1.054 -1.479,1.666 -0.986,0.612 -2.244,0.612 z"
|
||||
id="path13107" />
|
||||
<path
|
||||
d="m 83.853003,165.1 h 1.377 v 4.93 q 0.493,-0.697 1.224,-1.071 0.748,-0.374 1.666,-0.374 1.649,0 2.567,1.054 0.935,1.054 0.935,2.89 V 177 h -1.377 v -4.386 q 0,-1.309 -0.646,-2.04 -0.646,-0.748 -1.802,-0.748 -1.156,0 -1.87,0.799 -0.697,0.799 -0.697,2.074 V 177 h -1.377 z"
|
||||
id="path13109" />
|
||||
<path
|
||||
d="m 92.38699,179.193 v -1.241 h 8.636 v 1.241 z"
|
||||
id="path13111" />
|
||||
<path
|
||||
d="m 108.50298,174.603 q 0,1.156 -0.986,1.887 -0.986,0.731 -2.567,0.731 -1.037,0 -1.955,-0.323 -0.901,-0.34 -1.632,-1.003 l 0.714,-1.054 q 0.782,0.646 1.462,0.935 0.68,0.289 1.445,0.289 0.986,0 1.581,-0.374 0.612,-0.391 0.612,-1.003 0,-0.629 -0.51,-0.901 -0.51,-0.289 -1.683,-0.306 -1.717,-0.068 -2.516,-0.629 -0.782,-0.561 -0.782,-1.751 0,-1.088 0.901,-1.802 0.918,-0.731 2.346,-0.731 0.969,0 1.819,0.306 0.85,0.306 1.547,0.901 l -0.663,1.037 q -0.697,-0.561 -1.343,-0.816 -0.646,-0.255 -1.377,-0.255 -0.833,0 -1.377,0.374 -0.527,0.374 -0.527,0.918 0,0.595 0.476,0.867 0.476,0.272 1.513,0.323 1.853,0.051 2.669,0.612 0.833,0.561 0.833,1.768 z"
|
||||
id="path13113" />
|
||||
<path
|
||||
d="m 109.98199,166.035 h 1.53 v 1.513 h -1.53 z m 0.068,10.965 v -8.211 h 1.394 V 177 Z"
|
||||
id="path13115" />
|
||||
<path
|
||||
d="m 120.06299,177 h -7.191 v -1.088 l 5.236,-5.967 h -5.032 v -1.156 h 6.766 v 1.088 l -5.185,5.967 h 5.406 z"
|
||||
id="path13117" />
|
||||
<path
|
||||
d="m 124.993,176.082 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13119" />
|
||||
<path
|
||||
d="m -34.847898,-244 h -2.4 v -21 h 2.43 v 8.76 q 0.96,-1.23 2.34,-1.89 1.41,-0.69 2.97,-0.69 3.06,0 5.1,2.13 2.07,2.1 2.07,5.4 0,3.33 -2.1,5.49 -2.1,2.13 -5.19,2.13 -1.53,0 -2.91,-0.66 -1.35,-0.66 -2.31,-1.83 z m 5.1,-12.66 q -2.16,0 -3.66,1.59 -1.5,1.56 -1.5,3.9 0,2.28 1.47,3.84 1.47,1.53 3.6,1.53 2.22,0 3.6,-1.47 1.41,-1.5 1.41,-3.96 0,-2.43 -1.38,-3.93 -1.35,-1.5 -3.54,-1.5 z"
|
||||
style="font-size:30px"
|
||||
id="path13121" />
|
||||
<path
|
||||
d="m -7.367917,-244 h -2.37 v -2.37 q -0.87,1.26 -2.37,1.98 -1.47,0.69 -3.18,0.69 -2.4,0 -3.9,-1.2 -1.5,-1.23 -1.5,-3.21 0,-2.19 1.68,-3.3 1.68,-1.14 4.98,-1.14 h 4.26 q -0.03,-2.04 -1.08,-3.15 -1.02,-1.11 -2.94,-1.11 -1.14,0 -2.25,0.48 -1.11,0.48 -2.19,1.47 l -1.29,-1.62 q 1.26,-1.2 2.76,-1.8 1.5,-0.63 3.21,-0.63 2.91,0 4.53,1.74 1.65,1.71 1.65,4.74 z m -2.4,-6.66 h -4.23 q -2.19,0 -3.24,0.6 -1.05,0.6 -1.05,1.89 0,1.08 0.9,1.8 0.93,0.72 2.37,0.72 2.16,0 3.63,-1.38 1.5,-1.41 1.62,-3.63 z"
|
||||
style="font-size:30px"
|
||||
id="path13123" />
|
||||
<path
|
||||
d="m -3.7079102,-247.87 v -8.46 h -1.98 v -2.16 h 1.98 v -4.44 h 2.43 v 4.44 h 3.36 v 2.16 h -3.36 v 8.25 q 0,0.96 0.42000004,1.44 0.45,0.45 1.35,0.45 H 2.0820898 V -244 H 0.13208984 q -2.01000004,0 -2.94000004,-0.93 -0.9,-0.93 -0.9,-2.94 z"
|
||||
style="font-size:30px"
|
||||
id="path13125" />
|
||||
<path
|
||||
d="m 11.142109,-243.55 q -3.3300004,0 -5.4300004,-2.1 -2.07,-2.13 -2.07,-5.55 0,-3.24 2.19,-5.46 2.19,-2.22 5.3700004,-2.22 2.31,0 3.93,1.08 1.65,1.05 2.49,3.06 l -2.07,0.99 q -0.69,-1.5 -1.8,-2.25 -1.08,-0.75 -2.58,-0.75 -2.1300004,0 -3.6000004,1.62 -1.44,1.59 -1.44,3.93 0,2.37 1.44,3.96 1.47,1.56 3.6600004,1.56 1.41,0 2.61,-0.75 1.23,-0.78 1.98,-2.16 l 1.89,1.02 q -0.87,1.86 -2.61,2.94 -1.74,1.08 -3.96,1.08 z"
|
||||
style="font-size:30px"
|
||||
id="path13127" />
|
||||
<path
|
||||
d="m 20.142105,-265 h 2.43 v 8.7 q 0.87,-1.23 2.16,-1.89 1.32,-0.66 2.94,-0.66 2.91,0 4.53,1.86 1.65,1.86 1.65,5.1 v 7.89 h -2.43 v -7.74 q 0,-2.31 -1.14,-3.6 -1.14,-1.32 -3.18,-1.32 -2.04,0 -3.3,1.41 -1.23,1.41 -1.23,3.66 v 7.59 h -2.43 z"
|
||||
style="font-size:30px"
|
||||
id="path13129" />
|
||||
<path
|
||||
d="m 35.202077,-240.13 v -2.19 h 15.24 v 2.19 z"
|
||||
style="font-size:30px"
|
||||
id="path13131" />
|
||||
<path
|
||||
d="m 52.152072,-263.35 h 2.7 v 2.67 h -2.7 z m 2.58,4.86 v 15.99 q 0,1.89 -0.99,2.88 -0.96,1.02 -2.82,1.02 h -1.74 v -2.04 h 1.41 q 0.84,0 1.26,-0.48 0.42,-0.45 0.42,-1.35 v -16.02 z"
|
||||
style="font-size:30px"
|
||||
id="path13133" />
|
||||
<path
|
||||
d="m 72.612094,-251.23 q 0,3.27 -2.19,5.49 -2.19,2.19 -5.4,2.19 -3.21,0 -5.37,-2.19 -2.13,-2.19 -2.13,-5.46 0,-3.27 2.19,-5.46 2.19,-2.22 5.4,-2.22 3.21,0 5.34,2.19 2.16,2.19 2.16,5.46 z m -12.6,0.03 q 0,2.34 1.44,3.93 1.47,1.59 3.63,1.59 2.13,0 3.57,-1.59 1.47,-1.59 1.47,-3.96 0,-2.31 -1.47,-3.9 -1.47,-1.62 -3.6,-1.62 -2.13,0 -3.6,1.59 -1.44,1.59 -1.44,3.96 z"
|
||||
style="font-size:30px"
|
||||
id="path13135" />
|
||||
<path
|
||||
d="m 75.282107,-263.35 h 2.7 v 2.67 h -2.7 z m 0.12,19.35 v -14.49 h 2.46 V -244 Z"
|
||||
style="font-size:30px"
|
||||
id="path13137" />
|
||||
<path
|
||||
d="m 81.312092,-258.49 h 2.4 v 2.25 q 0.87,-1.26 2.19,-1.92 1.35,-0.69 3,-0.69 2.88,0 4.5,1.86 1.62,1.86 1.62,5.1 v 7.89 h -2.43 v -7.74 q 0,-2.28 -1.14,-3.6 -1.11,-1.32 -3.12,-1.32 -2.07,0 -3.33,1.41 -1.26,1.41 -1.26,3.66 v 7.59 h -2.43 z"
|
||||
style="font-size:30px"
|
||||
id="path13139" />
|
||||
<path
|
||||
d="m 67.570098,-363.9 h 1.394 v 11.9 h -1.394 z"
|
||||
id="path13141" />
|
||||
<path
|
||||
d="m 77.974087,-352 h -1.343 v -1.343 q -0.493,0.714 -1.343,1.122 -0.833,0.391 -1.802,0.391 -1.36,0 -2.21,-0.68 -0.85,-0.697 -0.85,-1.819 0,-1.241 0.952,-1.87 0.952,-0.646 2.822,-0.646 h 2.414 q -0.017,-1.156 -0.612,-1.785 -0.578,-0.629 -1.666,-0.629 -0.646,0 -1.275,0.272 -0.629,0.272 -1.241,0.833 l -0.731,-0.918 q 0.714,-0.68 1.564,-1.02 0.85,-0.357 1.819,-0.357 1.649,0 2.567,0.986 0.935,0.969 0.935,2.686 z m -1.36,-3.774 h -2.397 q -1.241,0 -1.836,0.34 -0.595,0.34 -0.595,1.071 0,0.612 0.51,1.02 0.527,0.408 1.343,0.408 1.224,0 2.057,-0.782 0.85,-0.799 0.918,-2.057 z"
|
||||
id="path13143" />
|
||||
<path
|
||||
d="m 81.034077,-352 h -1.36 v -11.9 h 1.377 v 4.964 q 0.544,-0.697 1.326,-1.071 0.799,-0.391 1.683,-0.391 1.734,0 2.89,1.207 1.173,1.19 1.173,3.06 0,1.887 -1.19,3.111 -1.19,1.207 -2.941,1.207 -0.867,0 -1.649,-0.374 -0.765,-0.374 -1.309,-1.037 z m 2.89,-7.174 q -1.224,0 -2.074,0.901 -0.85,0.884 -0.85,2.21 0,1.292 0.833,2.176 0.833,0.867 2.04,0.867 1.258,0 2.04,-0.833 0.799,-0.85 0.799,-2.244 0,-1.377 -0.782,-2.227 -0.765,-0.85 -2.006,-0.85 z"
|
||||
id="path13145" />
|
||||
<path
|
||||
d="m 93.512066,-352.918 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13147" />
|
||||
<path
|
||||
d="m 98.952077,-363.9 h 1.394003 v 11.9 h -1.394003 z"
|
||||
id="path13149" />
|
||||
<path
|
||||
d="m 101.36607,-349.807 v -1.241 h 8.636 v 1.241 z"
|
||||
id="path13151" />
|
||||
<path
|
||||
d="m 112.22906,-352 h -1.36 v -11.9 h 1.377 v 4.964 q 0.544,-0.697 1.326,-1.071 0.799,-0.391 1.683,-0.391 1.734,0 2.89,1.207 1.173,1.19 1.173,3.06 0,1.887 -1.19,3.111 -1.19,1.207 -2.941,1.207 -0.867,0 -1.649,-0.374 -0.765,-0.374 -1.309,-1.037 z m 2.89,-7.174 q -1.224,0 -2.074,0.901 -0.85,0.884 -0.85,2.21 0,1.292 0.833,2.176 0.833,0.867 2.04,0.867 1.258,0 2.04,-0.833 0.799,-0.85 0.799,-2.244 0,-1.377 -0.782,-2.227 -0.765,-0.85 -2.006,-0.85 z"
|
||||
id="path13153" />
|
||||
<path
|
||||
d="m 127.80105,-352 h -1.343 v -1.343 q -0.493,0.714 -1.343,1.122 -0.833,0.391 -1.802,0.391 -1.36,0 -2.21,-0.68 -0.85,-0.697 -0.85,-1.819 0,-1.241 0.952,-1.87 0.952,-0.646 2.822,-0.646 h 2.414 q -0.017,-1.156 -0.612,-1.785 -0.578,-0.629 -1.666,-0.629 -0.646,0 -1.275,0.272 -0.629,0.272 -1.241,0.833 l -0.731,-0.918 q 0.714,-0.68 1.564,-1.02 0.85,-0.357 1.819,-0.357 1.649,0 2.567,0.986 0.935,0.969 0.935,2.686 z m -1.36,-3.774 h -2.397 q -1.241,0 -1.836,0.34 -0.595,0.34 -0.595,1.071 0,0.612 0.51,1.02 0.527,0.408 1.343,0.408 1.224,0 2.057,-0.782 0.85,-0.799 0.918,-2.057 z"
|
||||
id="path13155" />
|
||||
<path
|
||||
d="m 129.87505,-354.193 v -4.794 h -1.122 v -1.224 h 1.122 v -2.516 h 1.377 v 2.516 h 1.904 v 1.224 h -1.904 v 4.675 q 0,0.544 0.238,0.816 0.255,0.255 0.765,0.255 h 0.901 V -352 h -1.105 q -1.139,0 -1.666,-0.527 -0.51,-0.527 -0.51,-1.666 z"
|
||||
id="path13157" />
|
||||
<path
|
||||
d="m 134.46506,-353.717 h 1.717 V -352 h -1.717 z"
|
||||
id="path13159" />
|
||||
<path
|
||||
d="m 137.81406,-353.717 h 1.717 V -352 h -1.717 z"
|
||||
id="path13161" />
|
||||
<path
|
||||
d="m 141.16308,-353.717 h 1.717 V -352 h -1.717 z"
|
||||
id="path13163" />
|
||||
<path
|
||||
d="m -88.121497,-347 v -11.9 h 1.462 v 11.9 z"
|
||||
id="path13165" />
|
||||
<path
|
||||
d="m -84.534505,-355.211 h 1.36 v 0.986 q 0.459,-0.595 1.037,-0.884 0.595,-0.306 1.326,-0.306 0.85,0 1.513,0.357 0.663,0.357 1.054,1.037 0.459,-0.68 1.156,-1.037 0.714,-0.357 1.598,-0.357 1.445,0 2.295,0.935 0.867,0.935 0.867,2.431 V -347 h -1.394 v -4.964 q 0,-1.037 -0.527,-1.615 -0.51,-0.595 -1.445,-0.595 -0.918,0 -1.496,0.646 -0.561,0.629 -0.561,1.649 V -347 h -1.377 v -4.964 q 0,-1.037 -0.527,-1.615 -0.51,-0.595 -1.445,-0.595 -0.918,0 -1.496,0.646 -0.561,0.629 -0.561,1.649 V -347 h -1.377 z"
|
||||
id="path13167" />
|
||||
<path
|
||||
d="m -63.573491,-347 h -1.343 v -1.343 q -0.493,0.714 -1.343,1.122 -0.833,0.391 -1.802,0.391 -1.36,0 -2.21,-0.68 -0.85,-0.697 -0.85,-1.819 0,-1.241 0.952,-1.87 0.952,-0.646 2.822,-0.646 h 2.414 q -0.017,-1.156 -0.612,-1.785 -0.578,-0.629 -1.666,-0.629 -0.646,0 -1.275,0.272 -0.629,0.272 -1.241,0.833 l -0.731,-0.918 q 0.714,-0.68 1.564,-1.02 0.85,-0.357 1.819,-0.357 1.649,0 2.567,0.986 0.935,0.969 0.935,2.686 z m -1.36,-3.774 h -2.397 q -1.241,0 -1.836,0.34 -0.595,0.34 -0.595,1.071 0,0.612 0.51,1.02 0.527,0.408 1.343,0.408 1.224,0 2.057,-0.782 0.85,-0.799 0.918,-2.057 z"
|
||||
id="path13169" />
|
||||
<path
|
||||
d="m -55.158502,-347.969 v -0.578 q -0.561,0.68 -1.36,1.054 -0.782,0.374 -1.683,0.374 -1.734,0 -2.89,-1.156 -1.156,-1.156 -1.156,-2.975 0,-1.802 1.173,-2.975 1.19,-1.19 2.924,-1.19 0.884,0 1.666,0.374 0.782,0.374 1.343,1.054 v -1.224 h 1.377 v 7.191 q 0,2.04 -1.088,3.06 -1.088,1.02 -3.281,1.02 h -2.074 v -1.19 h 2.142 q 1.445,0 2.176,-0.714 0.731,-0.714 0.731,-2.125 z m -2.839,-0.374 q 1.224,0 2.057,-0.833 0.85,-0.85 0.85,-2.074 0,-1.258 -0.85,-2.091 -0.833,-0.85 -2.057,-0.85 -1.275,0 -2.057,0.799 -0.782,0.799 -0.782,2.125 0,1.309 0.782,2.125 0.799,0.799 2.057,0.799 z"
|
||||
id="path13171" />
|
||||
<path
|
||||
d="m -48.018489,-347.918 q 0.816,0 1.496,-0.34 0.697,-0.357 1.275,-1.071 l 0.833,0.765 q -0.697,0.918 -1.632,1.377 -0.935,0.442 -2.091,0.442 -1.853,0 -3.043,-1.207 -1.173,-1.224 -1.173,-3.128 0,-1.853 1.207,-3.094 1.207,-1.241 2.975,-1.241 1.802,0 2.924,1.173 1.122,1.173 1.122,3.06 0,0.136 -0.017,0.323 -0.017,0.17 -0.034,0.255 h -6.749 q 0.136,1.207 0.935,1.955 0.816,0.731 1.972,0.731 z m -0.136,-6.324 q -1.071,0 -1.836,0.697 -0.765,0.697 -0.935,1.87 h 5.406 q -0.153,-1.19 -0.867,-1.87 -0.714,-0.697 -1.768,-0.697 z"
|
||||
id="path13173" />
|
||||
<path
|
||||
d="m -43.666477,-344.807 v -1.241 h 8.636 v 1.241 z"
|
||||
id="path13175" />
|
||||
<path
|
||||
d="m -32.803484,-347 h -1.36 v -11.9 h 1.377 v 4.964 q 0.544,-0.697 1.326,-1.071 0.799,-0.391 1.683,-0.391 1.734,0 2.89,1.207 1.173,1.19 1.173,3.06 0,1.887 -1.19,3.111 -1.19,1.207 -2.941,1.207 -0.867,0 -1.649,-0.374 -0.765,-0.374 -1.309,-1.037 z m 2.89,-7.174 q -1.224,0 -2.074,0.901 -0.85,0.884 -0.85,2.21 0,1.292 0.833,2.176 0.833,0.867 2.04,0.867 1.258,0 2.04,-0.833 0.799,-0.85 0.799,-2.244 0,-1.377 -0.782,-2.227 -0.765,-0.85 -2.006,-0.85 z"
|
||||
id="path13177" />
|
||||
<path
|
||||
d="m -24.490496,-348.717 h 1.717 V -347 h -1.717 z"
|
||||
id="path13179" />
|
||||
<path
|
||||
d="m -21.141481,-348.717 h 1.717 V -347 h -1.717 z"
|
||||
id="path13181" />
|
||||
<path
|
||||
d="m -17.792475,-348.717 h 1.717 V -347 h -1.717 z"
|
||||
id="path13183" />
|
||||
<path
|
||||
d="m -10.7361,-414 v -10.5 h 1.2899999 v 10.5 z"
|
||||
style="font-size:15px"
|
||||
id="path13185" />
|
||||
<path
|
||||
d="m -7.571107,-421.245 h 1.2 v 1.125 q 0.435,-0.63 1.095,-0.96 0.675,-0.345 1.5,-0.345 1.44,0 2.25,0.93 0.81000001,0.93 0.81000001,2.55 V -414 H -1.931107 v -3.87 q 0,-1.14 -0.57,-1.8 -0.555,-0.66 -1.56,-0.66 -1.035,0 -1.665,0.705 -0.63,0.705 -0.63,1.83 V -414 h -1.215 z"
|
||||
style="font-size:15px"
|
||||
id="path13187" />
|
||||
<path
|
||||
d="M 1.9988811,-411.3 H 0.78388111 v -9.945 H 1.9838811 v 1.17 q 0.48,-0.63 1.185,-0.975 0.705,-0.36 1.5,-0.36 1.53,0 2.55,1.065 1.02,1.05 1.02,2.7 0,1.665 -1.065,2.745 -1.05,1.065 -2.595,1.065 -0.735,0 -1.425,-0.3 -0.675,-0.315 -1.155,-0.885 z m 2.535,-9.03 q -1.095,0 -1.845,0.795 -0.735,0.78 -0.735,1.95 0,1.155 0.72,1.92 0.735,0.765 1.815,0.765 1.11,0 1.8,-0.735 0.705,-0.75 0.705,-1.98 0,-1.215 -0.69,-1.965 -0.675,-0.75 -1.77,-0.75 z"
|
||||
style="font-size:15px"
|
||||
id="path13189" />
|
||||
<path
|
||||
d="m 16.278872,-414 h -1.215 v -1.11 q -0.435,0.63 -1.095,0.96 -0.66,0.315 -1.5,0.315 -1.425,0 -2.25,-0.93 -0.8100003,-0.93 -0.8100003,-2.535 v -3.945 h 1.2300003 v 3.87 q 0,1.14 0.555,1.8 0.57,0.66 1.575,0.66 1.02,0 1.65,-0.705 0.63,-0.705 0.63,-1.83 v -3.795 h 1.23 z"
|
||||
style="font-size:15px"
|
||||
id="path13191" />
|
||||
<path
|
||||
d="m 18.25886,-415.935 v -4.23 h -0.99 v -1.08 h 0.99 v -2.22 h 1.215 v 2.22 h 1.68 v 1.08 h -1.68 v 4.125 q 0,0.48 0.21,0.72 0.225,0.225 0.675,0.225 h 0.795 V -414 h -0.975 q -1.005,0 -1.47,-0.465 -0.45,-0.465 -0.45,-1.47 z"
|
||||
style="font-size:15px"
|
||||
id="path13193" />
|
||||
<path
|
||||
d="m 23.598861,-411.3 h -1.215 v -9.945 h 1.2 v 1.17 q 0.48,-0.63 1.185,-0.975 0.705,-0.36 1.5,-0.36 1.53,0 2.55,1.065 1.02,1.05 1.02,2.7 0,1.665 -1.065,2.745 -1.05,1.065 -2.595,1.065 -0.735,0 -1.425,-0.3 -0.675,-0.315 -1.155,-0.885 z m 2.535,-9.03 q -1.095,0 -1.845,0.795 -0.735,0.78 -0.735,1.95 0,1.155 0.72,1.92 0.735,0.765 1.815,0.765 1.11,0 1.8,-0.735 0.705,-0.75 0.705,-1.98 0,-1.215 -0.69,-1.965 -0.675,-0.75 -1.77,-0.75 z"
|
||||
style="font-size:15px"
|
||||
id="path13195" />
|
||||
<path
|
||||
d="m 31.098849,-424.5 h 1.215 v 4.35 q 0.435,-0.615 1.08,-0.945 0.66,-0.33 1.47,-0.33 1.455,0 2.265,0.93 0.825,0.93 0.825,2.55 V -414 h -1.215 v -3.87 q 0,-1.155 -0.57,-1.8 -0.57,-0.66 -1.59,-0.66 -1.02,0 -1.65,0.705 -0.615,0.705 -0.615,1.83 V -414 h -1.215 z"
|
||||
style="font-size:15px"
|
||||
id="path13197" />
|
||||
<path
|
||||
d="m 45.678837,-414 h -1.185 v -1.185 q -0.435,0.63 -1.185,0.99 -0.735,0.345 -1.59,0.345 -1.2,0 -1.95,-0.6 -0.75,-0.615 -0.75,-1.605 0,-1.095 0.84,-1.65 0.84,-0.57 2.49,-0.57 h 2.13 q -0.015,-1.02 -0.54,-1.575 -0.51,-0.555 -1.47,-0.555 -0.57,0 -1.125,0.24 -0.555,0.24 -1.095,0.735 l -0.645,-0.81 q 0.63,-0.6 1.38,-0.9 0.75,-0.315 1.605,-0.315 1.455,0 2.265,0.87 0.825,0.855 0.825,2.37 z m -1.2,-3.33 h -2.115 q -1.095,0 -1.62,0.3 -0.525,0.3 -0.525,0.945 0,0.54 0.45,0.9 0.465,0.36 1.185,0.36 1.08,0 1.815,-0.69 0.75,-0.705 0.81,-1.815 z"
|
||||
style="font-size:15px"
|
||||
id="path13199" />
|
||||
<path
|
||||
d="m 53.013828,-416.115 q 0,1.02 -0.87,1.665 -0.87,0.645 -2.265,0.645 -0.915,0 -1.725,-0.285 -0.795,-0.3 -1.44,-0.885 l 0.63,-0.93 q 0.69,0.57 1.29,0.825 0.6,0.255 1.275,0.255 0.87,0 1.395,-0.33 0.54,-0.345 0.54,-0.885 0,-0.555 -0.45,-0.795 -0.45,-0.255 -1.485,-0.27 -1.515,-0.06 -2.22,-0.555 -0.69,-0.495 -0.69,-1.545 0,-0.96 0.795,-1.59 0.81,-0.645 2.07,-0.645 0.855,0 1.605,0.27 0.75,0.27 1.365,0.795 l -0.585,0.915 q -0.615,-0.495 -1.185,-0.72 -0.57,-0.225 -1.215,-0.225 -0.735,0 -1.215,0.33 -0.465,0.33 -0.465,0.81 0,0.525 0.42,0.765 0.42,0.24 1.335,0.285 1.635,0.045 2.355,0.54 0.735,0.495 0.735,1.56 z"
|
||||
style="font-size:15px"
|
||||
id="path13201" />
|
||||
<path
|
||||
d="m 57.738838,-414.81 q 0.72,0 1.32,-0.3 0.615,-0.315 1.125,-0.945 l 0.735,0.675 q -0.615,0.81 -1.44,1.215 -0.825,0.39 -1.845,0.39 -1.635,0 -2.685,-1.065 -1.035,-1.08 -1.035,-2.76 0,-1.635 1.065,-2.73 1.065,-1.095 2.625,-1.095 1.59,0 2.58,1.035 0.99,1.035 0.99,2.7 0,0.12 -0.015,0.285 -0.015,0.15 -0.03,0.225 h -5.955 q 0.12,1.065 0.825,1.725 0.72,0.645 1.74,0.645 z m -0.12,-5.58 q -0.945,0 -1.62,0.615 -0.675,0.615 -0.825,1.65 h 4.77 q -0.135,-1.05 -0.765,-1.65 -0.63,-0.615 -1.56,-0.615 z"
|
||||
style="font-size:15px"
|
||||
id="path13203" />
|
||||
<path
|
||||
d="m 61.57885,-412.065 v -1.095 h 7.62 v 1.095 z"
|
||||
style="font-size:15px"
|
||||
id="path13205" />
|
||||
<path
|
||||
d="m 70.368847,-415.935 v -4.23 h -0.99 v -1.08 h 0.99 v -2.22 h 1.215 v 2.22 h 1.68 v 1.08 h -1.68 v 4.125 q 0,0.48 0.21,0.72 0.225,0.225 0.675,0.225 h 0.795 V -414 h -0.975 q -1.005,0 -1.47,-0.465 -0.45,-0.465 -0.45,-1.47 z"
|
||||
style="font-size:15px"
|
||||
id="path13207" />
|
||||
<path
|
||||
d="m 74.493846,-421.245 h 1.2 v 0.915 q 0.33,-0.48 0.855,-0.72 0.525,-0.24 1.245,-0.24 h 0.66 v 1.155 h -0.675 q -1.035,0 -1.56,0.54 -0.51,0.54 -0.51,1.635 v 3.96 h -1.215 z"
|
||||
style="font-size:15px"
|
||||
id="path13209" />
|
||||
<path
|
||||
d="m 78.438843,-415.515 h 1.515 V -414 h -1.515 z"
|
||||
style="font-size:15px"
|
||||
id="path13211" />
|
||||
<path
|
||||
d="m 81.393844,-415.515 h 1.515 V -414 h -1.515 z"
|
||||
style="font-size:15px"
|
||||
id="path13213" />
|
||||
<path
|
||||
d="m 84.348854,-415.515 h 1.515 V -414 h -1.515 z"
|
||||
style="font-size:15px"
|
||||
id="path13215" />
|
||||
</g>
|
||||
<path
|
||||
d="M350.578 914.595 350.04 825.673 352.04 825.66 352.578 914.583ZM347.048 827.024 351 819 355.048 826.976Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12607" />
|
||||
<g
|
||||
aria-label="0 tensors"
|
||||
transform="translate(352.246 339)"
|
||||
id="text12609"
|
||||
style="font-size:15px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 0.555,-4.995 v -0.48 q 0,-2.415 1.155,-3.84 1.155,-1.425 3.09,-1.425 1.935,0 3.06,1.41 Q 9,-7.92 9,-5.505 v 0.48 Q 9,-2.61 7.845,-1.185 6.705,0.24 4.77,0.24 2.835,0.24 1.695,-1.17 0.555,-2.58 0.555,-4.995 Z M 4.77,-9.6 q -1.38,0 -2.16,1.08 -0.765,1.08 -0.765,3.015 v 0.48 q 0,1.95 0.78,3.045 0.78,1.08 2.16,1.08 1.38,0 2.145,-1.08 0.78,-1.08 0.78,-3.015 v -0.48 q 0,-1.95 -0.78,-3.03 Q 6.15,-9.6 4.77,-9.6 Z"
|
||||
id="path13218" />
|
||||
<path
|
||||
d="m 13.529979,-1.935 v -4.23 h -0.99 v -1.08 h 0.99 v -2.22 h 1.215 v 2.22 h 1.68 v 1.08 h -1.68 v 4.125 q 0,0.48 0.21,0.72 0.225,0.225 0.675,0.225 h 0.795 V 0 h -0.975 q -1.005,0 -1.47,-0.465 -0.45,-0.465 -0.45,-1.47 z"
|
||||
id="path13220" />
|
||||
<path
|
||||
d="m 21.029988,-0.81 q 0.72,0 1.32,-0.3 0.615,-0.315 1.125,-0.945 l 0.735,0.675 q -0.615,0.81 -1.44,1.215 -0.825,0.39 -1.845,0.39 -1.635,0 -2.685,-1.065 -1.035,-1.08 -1.035,-2.76 0,-1.635 1.065,-2.73 1.065,-1.095 2.625,-1.095 1.59,0 2.58,1.035 0.99,1.035 0.99,2.7 0,0.12 -0.015,0.285 -0.015,0.15 -0.03,0.225 h -5.955 q 0.12,1.065 0.825,1.725 0.72,0.645 1.74,0.645 z m -0.12,-5.58 q -0.945,0 -1.62,0.615 -0.675,0.615 -0.825,1.65 h 4.77 q -0.135,-1.05 -0.765,-1.65 -0.63,-0.615 -1.56,-0.615 z"
|
||||
id="path13222" />
|
||||
<path
|
||||
d="m 25.694998,-7.245 h 1.2 v 1.125 q 0.435,-0.63 1.095,-0.96 0.675,-0.345 1.5,-0.345 1.44,0 2.25,0.93 0.81,0.93 0.81,2.55 V 0 h -1.215 v -3.87 q 0,-1.14 -0.57,-1.8 -0.555,-0.66 -1.56,-0.66 -1.035,0 -1.665,0.705 -0.63,0.705 -0.63,1.83 V 0 h -1.215 z"
|
||||
id="path13224" />
|
||||
<path
|
||||
d="m 39.884986,-2.115 q 0,1.02 -0.87,1.665 -0.87,0.645 -2.265,0.645 -0.915,0 -1.725,-0.285 -0.795,-0.3 -1.44,-0.885 l 0.63,-0.93 q 0.69,0.57 1.29,0.825 0.6,0.255 1.275,0.255 0.87,0 1.395,-0.33 0.54,-0.345 0.54,-0.885 0,-0.555 -0.45,-0.795 -0.45,-0.255 -1.485,-0.27 -1.515,-0.06 -2.22,-0.555 -0.69,-0.495 -0.69,-1.545 0,-0.96 0.795,-1.59 0.81,-0.645 2.07,-0.645 0.855,0 1.605,0.27 0.75,0.27 1.365,0.795 l -0.585,0.915 q -0.615,-0.495 -1.185,-0.72 -0.57,-0.225 -1.215,-0.225 -0.735,0 -1.215,0.33 -0.465,0.33 -0.465,0.81 0,0.525 0.42,0.765 0.42,0.24 1.335,0.285 1.635,0.045 2.355,0.54 0.735,0.495 0.735,1.56 z"
|
||||
id="path13226" />
|
||||
<path
|
||||
d="m 48.329996,-3.615 q 0,1.635 -1.095,2.745 -1.095,1.095 -2.7,1.095 -1.605,0 -2.685,-1.095 -1.065,-1.095 -1.065,-2.73 0,-1.635 1.095,-2.73 1.095,-1.11 2.7,-1.11 1.605,0 2.67,1.095 1.08,1.095 1.08,2.73 z m -6.3,0.015 q 0,1.17 0.72,1.965 0.735,0.795 1.815,0.795 1.065,0 1.785,-0.795 0.735,-0.795 0.735,-1.98 0,-1.155 -0.735,-1.95 -0.735,-0.81 -1.8,-0.81 -1.065,0 -1.8,0.795 -0.72,0.795 -0.72,1.98 z"
|
||||
id="path13228" />
|
||||
<path
|
||||
d="m 49.590006,-7.245 h 1.2 v 0.915 q 0.33,-0.48 0.855,-0.72 0.525,-0.24 1.245,-0.24 h 0.66 v 1.155 h -0.675 q -1.035,0 -1.56,0.54 -0.51,0.54 -0.51,1.635 V 0 h -1.215 z"
|
||||
id="path13230" />
|
||||
<path
|
||||
d="m 60.375003,-2.115 q 0,1.02 -0.87,1.665 -0.87,0.645 -2.265,0.645 -0.915,0 -1.725,-0.285 -0.795,-0.3 -1.44,-0.885 l 0.63,-0.93 q 0.69,0.57 1.29,0.825 0.6,0.255 1.275,0.255 0.87,0 1.395,-0.33 0.54,-0.345 0.54,-0.885 0,-0.555 -0.45,-0.795 -0.45,-0.255 -1.485,-0.27 -1.515,-0.06 -2.22,-0.555 -0.69,-0.495 -0.69,-1.545 0,-0.96 0.795,-1.59 0.81,-0.645 2.07,-0.645 0.855,0 1.605,0.27 0.75,0.27 1.365,0.795 l -0.585,0.915 q -0.615,-0.495 -1.185,-0.72 -0.57,-0.225 -1.215,-0.225 -0.735,0 -1.215,0.33 -0.465,0.33 -0.465,0.81 0,0.525 0.42,0.765 0.42,0.24 1.335,0.285 1.635,0.045 2.355,0.54 0.735,0.495 0.735,1.56 z"
|
||||
id="path13232" />
|
||||
</g>
|
||||
<path
|
||||
d="M421.502 504.15 413.646 472.71 415.586 472.225 423.442 503.665ZM411.059 474.731 413 466 418.82 472.792Z"
|
||||
fill="#AEAEAE"
|
||||
id="path12611" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 56 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1a570510808fb2997ee0d51af6f92c5a4a8f8a59dbd275000489f856e89124d5
|
||||
size 120211
|
||||
3315
docs/MO_DG/img/NCF_start.svg
Normal file
|
After Width: | Height: | Size: 316 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:344b2fcb9b7a180a8d8047e65b4aad3ca2651cfc7d5e1e408710a5a3730fed09
|
||||
size 20851
|
||||
290
docs/MO_DG/img/inception_v1_first_block.svg
Normal file
@@ -0,0 +1,290 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="704"
|
||||
height="528"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg16471"
|
||||
sodipodi:docname="inception_v1_first_block.svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview16473"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="1.6893939"
|
||||
inkscape:cx="351.90135"
|
||||
inkscape:cy="264"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1137"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg16471" />
|
||||
<defs
|
||||
id="defs16427">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="704"
|
||||
height="528"
|
||||
id="rect16424" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g16469">
|
||||
<path
|
||||
id="rect16429"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 704 V 528 H 0 Z" />
|
||||
<path
|
||||
d="M515.574 644.035 412.041 498.017 413.672 496.86 517.206 642.878ZM410.364 500.84 409 492 416.89 496.212Z"
|
||||
fill="#AEAEAE"
|
||||
id="path16431" />
|
||||
<path
|
||||
d="M1.87266-0.702235 72.8024 188.447 69.057 189.852-1.87266 0.702235ZM75.8454 185.17 74.4408 198.513 64.6094 189.383Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 228 690.513)"
|
||||
id="path16433" />
|
||||
<path
|
||||
d="M81 112.002C81 70.0271 115.027 36 157.002 36L546.998 36C588.973 36 623 70.0271 623 112.002L623 415.998C623 457.973 588.973 492 546.998 492L157.002 492C115.027 492 81 457.973 81 415.998Z"
|
||||
stroke="#E96115"
|
||||
stroke-width="6"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path16435" />
|
||||
<path
|
||||
d="M354-77 354 25.7768 350 25.7768 350-77ZM358 23.7768 352 35.7768 346 23.7768Z"
|
||||
fill="#AEAEAE"
|
||||
id="path16437" />
|
||||
<path
|
||||
d="M190.5 175C190.5 165.887 207.737 158.5 229 158.5 250.263 158.5 267.5 165.887 267.5 175 267.5 184.113 250.263 191.5 229 191.5 207.737 191.5 190.5 184.113 190.5 175Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path16439" />
|
||||
<path
|
||||
d="M162.5 243.334C162.5 239.007 166.007 235.5 170.334 235.5L286.667 235.5C290.993 235.5 294.5 239.007 294.5 243.334L294.5 274.667C294.5 278.993 290.993 282.5 286.667 282.5L170.334 282.5C166.007 282.5 162.5 278.993 162.5 274.667Z"
|
||||
stroke="#708541"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#B1D272"
|
||||
fill-rule="evenodd"
|
||||
id="path16441" />
|
||||
<path
|
||||
d="M256.5 346C256.5 336.887 273.737 329.5 295 329.5 316.263 329.5 333.5 336.887 333.5 346 333.5 355.113 316.263 362.5 295 362.5 273.737 362.5 256.5 355.113 256.5 346Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path16443" />
|
||||
<path
|
||||
d="M227.001 157.423 227.377 147.517 228.384 137.637 230.083 127.884 232.347 118.384 235.179 109.261 238.514 100.452 242.29 92.1441 246.449 84.331 251.054 77.1396 255.917 70.697 261.106 65.0024 266.502 60.1141 272.112 56.2251 277.937 53.2806 283.798 51.4774 289.743 50.8381 301.28 50.4619 301.223 50.4653 312.411 49.4653 312.352 49.4724 322.789 47.9099 322.729 47.9209 332.104 45.9209 332.036 45.9379 336.161 44.7504 336.121 44.7627 339.871 43.5127 339.819 43.5317 343.132 42.2192 342.962 42.3058 346.559 40.0114 347.635 41.6975 343.957 44.0434 340.53 45.4013 336.734 46.6666 332.555 47.8696 323.116 49.8833 312.619 51.4548 301.373 52.4599 289.845 52.8358 289.919 52.8306 284.107 53.4556 284.294 53.4172 278.607 55.1672 278.764 55.1038 273.076 57.9788 273.195 57.9082 267.695 61.7207 267.796 61.64 262.484 66.4525 262.552 66.3849 257.427 72.0099 257.486 71.9389 252.673 78.3139 252.717 78.2506 248.155 85.3756 248.195 85.3062 244.07 93.0562 244.098 93.0002 240.348 101.25 240.373 101.19 237.06 109.94 237.08 109.883 234.268 118.945 234.285 118.881 232.035 128.318 232.048 128.258 230.36 137.945 230.37 137.875 229.37 147.688 229.374 147.624 228.999 157.499ZM343.256 39.1163 351.625 35.9614 349.127 44.5498Z"
|
||||
fill="#AEAEAE"
|
||||
id="path16445" />
|
||||
<path
|
||||
d="M1-2.4027e-06 1.00009 37.0288-0.999911 37.0288-1 2.4027e-06ZM4.00009 35.6954 0.000104987 43.6954-3.99991 35.6954Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 228 234.695)"
|
||||
id="path16447" />
|
||||
<path
|
||||
d="M0.0995037-0.995037 4.5675-0.548237 8.98329 0.750524 13.0682 2.69573 16.7663 5.29085 19.8882 8.34774 22.3144 11.7576 23.9097 15.4799 24.4281 19.2384 24.9278 22.7985 24.8566 22.5436 26.3566 26.0436 26.2523 25.8577 28.5648 29.1077 28.4496 28.973 31.4496 31.9105 31.3244 31.8064 34.8869 34.3064 34.7369 34.2195 38.7369 36.0945 38.5334 36.0247 40.5441 36.4802 40.1022 38.4307 37.986 37.9514 33.8093 35.9935 30.1087 33.3966 26.9868 30.3398 24.5606 26.9299 22.9658 23.2087 22.4469 19.5116 21.9469 15.8866 22.0184 16.1439 20.5184 12.6439 20.6227 12.8298 18.3102 9.57975 18.4254 9.71451 15.4254 6.77701 15.5506 6.88106 11.9881 4.38106 12.1326 4.46536 8.19507 2.59036 8.34283 2.64687 4.09283 1.39687 4.2755 1.43254-0.0995037 0.995037ZM39.752 33.2779 46.875 38.6875 38.2736 41.1401Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-1.83697e-16 -1 -1 1.83697e-16 294.644 328.851)"
|
||||
id="path16449" />
|
||||
<path
|
||||
d="M414.493 501.445 324.808 364.129 326.483 363.035 416.167 500.351ZM323.026 366.885 322 358 329.724 362.511Z"
|
||||
fill="#AEAEAE"
|
||||
id="path16451" />
|
||||
<path
|
||||
d="M299.948 500.063 293.336 368.709 295.334 368.608 301.946 499.963ZM290.407 370.191 294 362 298.397 369.789Z"
|
||||
fill="#AEAEAE"
|
||||
id="path16453" />
|
||||
<path
|
||||
d="M0.058722-0.998274 5.43593-0.681968 10.7955 0.338892 16.0835 1.86798 21.2441 4.03413 26.1989 6.70214 30.9593 9.81222 35.4657 13.3666 39.6506 17.2979 43.5185 21.5462 47.0092 26.1159 50.0556 30.9393 52.6597 35.957 54.7576 41.17 56.3501 46.5207 57.3075 52.0097 57.6233 57.442 57.6207 57.4074 57.8707 60.0949 57.8438 59.9397 58.5313 62.6272 58.4849 62.4889 59.6099 65.1764 59.5468 65.051 61.1093 67.676 61.0454 67.5815 63.0454 70.2065 62.9915 70.1416 65.3665 72.7666 65.3155 72.7141 68.0655 75.3391 67.9997 75.2816 71.1247 77.7816 71.0644 77.737 78.3769 82.737 78.29 82.6839 86.915 87.3714 86.8544 87.341 96.6669 91.841 96.614 91.8186 107.489 96.0686 107.432 96.0483 119.245 99.8608 119.205 99.8488 131.83 103.349 131.786 103.338 145.161 106.4 145.123 106.392 159.06 109.017 159.016 109.01 173.454 111.073 173.418 111.068 188.168 112.631 188.125 112.627 203.187 113.564 203.15 113.563 211.61 113.773 211.561 115.772 203.082 115.562 187.979 114.622 173.189 113.055 158.712 110.987 144.733 108.354 131.317 105.282 118.65 101.771 106.789 97.9424 95.8592 93.6709 85.9898 89.1448 77.2902 84.4168 69.9046 79.3668 66.7162 76.816 63.908 74.1356 61.4801 71.4521 59.4199 68.7481 57.7922 66.0135 56.6118 63.1937 55.8866 60.3588 55.6277 57.5754 55.3142 52.183 55.3274 52.2968 54.3899 46.9218 54.4165 47.0353 52.854 41.7853 52.8848 41.8733 50.8223 36.7483 50.8624 36.8356 48.2999 31.8981 48.342 31.9715 45.342 27.2215 45.3928 27.2945 41.9553 22.7945 42.0106 22.8607 38.1981 18.6732 38.2528 18.7288 34.1278 14.8538 34.1932 14.9102 29.7557 11.4102 29.8281 11.4622 25.1406 8.39967 25.2134 8.44297 20.3384 5.81797 20.4255 5.85956 15.363 3.73456 15.4722 3.77315 10.2847 2.27315 10.3754 2.29484 5.12539 1.29484 5.25378 1.31077-0.058722 0.998274ZM210.352 110.74 218.25 114.937 210.153 118.738Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-1.83697e-16 -1 -1 1.83697e-16 297.917 500.22)"
|
||||
id="path16455" />
|
||||
<g
|
||||
aria-label="Conv2d_1a_7x7ReluBatchNorm3 tensorsconvoluti…"
|
||||
transform="translate(274.777 85)"
|
||||
id="text16467"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 3,-8.424 q 0,3 1.8,4.992 1.8,1.968 4.512,1.968 1.824,0 3.192,-0.84 1.392,-0.84 2.376,-2.52 l 1.584,0.912 q -1.08,2.064 -2.952,3.192 -1.848,1.104 -4.248,1.104 -3.648,0 -6.024,-2.472 -2.352,-2.472 -2.352,-6.288 0,-3.744 2.448,-6.264 2.472,-2.544 6.144,-2.544 2.328,0 4.104,1.104 1.8,1.08 2.808,3.096 L 14.736,-12 q -0.936,-1.704 -2.232,-2.52 -1.296,-0.816 -3.072,-0.816 -2.76,0 -4.608,1.968 Q 3,-11.4 3,-8.424 Z"
|
||||
id="path16839" />
|
||||
<path
|
||||
d="m 30.144003,-5.784 q 0,2.616 -1.752,4.392 -1.752,1.752 -4.32,1.752 -2.568,0 -4.296,-1.752 -1.704,-1.752 -1.704,-4.368 0,-2.616 1.752,-4.368 1.752,-1.776 4.32,-1.776 2.568,0 4.272,1.752 1.728,1.752 1.728,4.368 z m -10.08,0.024 q 0,1.872 1.152,3.144 1.176,1.272 2.904,1.272 1.704,0 2.856,-1.272 1.176,-1.272 1.176,-3.168 0,-1.848 -1.176,-3.12 -1.176,-1.296 -2.88,-1.296 -1.704,0 -2.88,1.272 -1.152,1.272 -1.152,3.168 z"
|
||||
id="path16841" />
|
||||
<path
|
||||
d="m 32.160016,-11.592 h 1.92 v 1.8 q 0.696,-1.008 1.752,-1.536 1.08,-0.552 2.4,-0.552 2.304,0 3.6,1.488 1.296,1.488 1.296,4.08 V 0 h -1.944 v -6.192 q 0,-1.824 -0.912,-2.88 -0.888,-1.056 -2.496,-1.056 -1.656,0 -2.664,1.128 -1.008,1.128 -1.008,2.928 V 0 h -1.944 z"
|
||||
id="path16843" />
|
||||
<path
|
||||
d="m 49.055986,0 -4.968,-11.592 h 2.136 l 3.768,9.264 3.744,-9.264 h 2.016 L 50.831986,0 Z"
|
||||
id="path16845" />
|
||||
<path
|
||||
d="m 68.951965,0 h -11.976 v -1.68 l 5.808,-5.328 q 2.064,-1.872 2.808,-2.976 0.744,-1.104 0.744,-2.352 0,-1.32 -0.984,-2.16 -0.96,-0.864 -2.4,-0.864 -1.776,0 -2.808,1.104 -1.008,1.08 -1.08,3.048 h -1.968 q 0.048,-2.736 1.632,-4.344 1.608,-1.632 4.272,-1.632 2.352,0 3.864,1.368 1.536,1.344 1.536,3.432 0,1.68 -0.936,3.12 -0.912,1.416 -3.48,3.672 l -4.248,3.792 h 9.216 z"
|
||||
id="path16847" />
|
||||
<path
|
||||
d="m 80.375964,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path16849" />
|
||||
<path
|
||||
d="M 83.543949,3.096 V 1.344 h 12.192 v 1.752 z"
|
||||
id="path16851" />
|
||||
<path
|
||||
d="m 98.927943,-14.976 h -2.592 V -16.8 h 4.607997 V 0 h -2.015997 z"
|
||||
id="path16853" />
|
||||
<path
|
||||
d="m 113.90395,0 h -1.896 v -1.896 q -0.696,1.008 -1.896,1.584 -1.176,0.552 -2.544,0.552 -1.92,0 -3.12,-0.96 -1.2,-0.984 -1.2,-2.568 0,-1.752 1.344,-2.64 1.344,-0.912 3.984,-0.912 h 3.408 q -0.024,-1.632 -0.864,-2.52 -0.816,-0.888 -2.352,-0.888 -0.912,0 -1.8,0.384 -0.888,0.384 -1.752,1.176 l -1.032,-1.296 q 1.008,-0.96 2.208,-1.44 1.2,-0.504 2.568,-0.504 2.328,0 3.624,1.392 1.32,1.368 1.32,3.792 z m -1.92,-5.328 h -3.384 q -1.752,0 -2.592,0.48 -0.84,0.48 -0.84,1.512 0,0.864 0.72,1.44 0.744,0.576 1.896,0.576 1.728,0 2.904,-1.104 1.2,-1.128 1.296,-2.904 z"
|
||||
id="path16855" />
|
||||
<path
|
||||
d="M 114.98394,3.096 V 1.344 h 12.192 v 1.752 z"
|
||||
id="path16857" />
|
||||
<path
|
||||
d="M 139.19993,-15.168 132.02393,0 h -2.232 l 7.344,-15 h -9.768 v -1.8 h 11.832 z"
|
||||
id="path16859" />
|
||||
<path
|
||||
d="m 142.10393,0 h -2.184 l 4.512,-5.976 -4.2,-5.616 h 2.304 l 3.048,4.248 3.096,-4.248 h 2.16 l -4.2,5.592 4.584,6 h -2.28 l -3.408,-4.68 z"
|
||||
id="path16861" />
|
||||
<path
|
||||
d="M 163.51194,-15.168 156.33594,0 h -2.232 l 7.344,-15 h -9.768 v -1.8 h 11.832 z"
|
||||
id="path16863" />
|
||||
<path
|
||||
d="m -58.670502,59.395 3.781,5.605 h -1.938 l -3.591,-5.415 h -3.743 V 65 h -1.634 V 51.7 h 5.852 q 2.242,0 3.496,1.045 1.254,1.026 1.254,2.831 0,1.482 -0.912,2.489 -0.912,1.007 -2.565,1.33 z m 1.824,-3.781 q 0,-1.159 -0.836,-1.805 -0.817,-0.665 -2.299,-0.665 h -4.18 v 5.016 h 4.047 q 1.501,0 2.375,-0.684 0.893,-0.703 0.893,-1.862 z"
|
||||
style="font-size:19px"
|
||||
id="path16865" />
|
||||
<path
|
||||
d="m -49.189505,63.974 q 0.912,0 1.672,-0.38 0.779,-0.399 1.425,-1.197 l 0.931,0.855 q -0.779,1.026 -1.824,1.539 -1.045,0.494 -2.337,0.494 -2.071,0 -3.401,-1.349 -1.311,-1.368 -1.311,-3.496 0,-2.071 1.349,-3.458 1.349,-1.387 3.325,-1.387 2.014,0 3.268,1.311 1.254,1.311 1.254,3.42 0,0.152 -0.019,0.361 -0.019,0.19 -0.038,0.285 h -7.543 q 0.152,1.349 1.045,2.185 0.912,0.817 2.204,0.817 z m -0.152,-7.068 q -1.197,0 -2.052,0.779 -0.855,0.779 -1.045,2.09 h 6.042 q -0.171,-1.33 -0.969,-2.09 -0.798,-0.779 -1.976,-0.779 z"
|
||||
style="font-size:19px"
|
||||
id="path16867" />
|
||||
<path
|
||||
d="m -43.109493,51.7 h 1.558 V 65 h -1.558 z"
|
||||
style="font-size:19px"
|
||||
id="path16869" />
|
||||
<path
|
||||
d="m -30.778505,65 h -1.539 v -1.406 q -0.551,0.798 -1.387,1.216 -0.836,0.399 -1.9,0.399 -1.805,0 -2.85,-1.178 -1.026,-1.178 -1.026,-3.211 v -4.997 h 1.558 v 4.902 q 0,1.444 0.703,2.28 0.722,0.836 1.995,0.836 1.292,0 2.09,-0.893 0.798,-0.893 0.798,-2.318 v -4.807 h 1.558 z"
|
||||
style="font-size:19px"
|
||||
id="path16871" />
|
||||
<path
|
||||
d="m -100.02,181 v -14.7 h 6.362998 q 2.457,0 3.801,1.008 1.344,1.008 1.344,2.856 0,1.176 -0.609,2.037 -0.609,0.84 -1.827,1.302 1.365,0.357 2.037,1.239 0.693,0.861 0.693,2.226 0,1.806 -1.407,2.919 -1.407,1.113 -3.717,1.113 z m 9.701998,-10.752 q 0,-1.155 -0.861,-1.743 -0.861,-0.609 -2.52,-0.609 h -4.494 v 4.893 h 4.767 q 1.365,0 2.226,-0.714 0.882,-0.714 0.882,-1.827 z m 0.294,6.699 q 0,-1.176 -0.798,-1.869 -0.798,-0.693 -2.163,-0.693 h -5.208 v 5.019 h 4.977 q 1.449,0 2.31,-0.672 0.882,-0.672 0.882,-1.785 z"
|
||||
style="font-size:21px"
|
||||
id="path16873" />
|
||||
<path
|
||||
d="m -77.423995,181 h -1.659 v -1.659 q -0.609,0.882 -1.659,1.386 -1.029,0.483 -2.226,0.483 -1.68,0 -2.73,-0.84 -1.05,-0.861 -1.05,-2.247 0,-1.533 1.176,-2.31 1.176,-0.798 3.486,-0.798 h 2.982 q -0.021,-1.428 -0.756,-2.205 -0.714,-0.777 -2.058,-0.777 -0.798,0 -1.575,0.336 -0.777,0.336 -1.533,1.029 l -0.903,-1.134 q 0.882,-0.84 1.932,-1.26 1.05,-0.441 2.247,-0.441 2.037,0 3.171,1.218 1.155,1.197 1.155,3.318 z m -1.68,-4.662 h -2.961 q -1.533,0 -2.268,0.42 -0.735,0.42 -0.735,1.323 0,0.756 0.63,1.26 0.651,0.504 1.659,0.504 1.512,0 2.541,-0.966 1.05,-0.987 1.134,-2.541 z"
|
||||
style="font-size:21px"
|
||||
id="path16875" />
|
||||
<path
|
||||
d="m -74.86199,178.291 v -5.922 h -1.386 v -1.512 h 1.386 v -3.108 h 1.701 v 3.108 h 2.352 v 1.512 h -2.352 v 5.775 q 0,0.672 0.294,1.008 0.315,0.315 0.945,0.315 h 1.113 V 181 h -1.365 q -1.407,0 -2.058,-0.651 -0.63,-0.651 -0.63,-2.058 z"
|
||||
style="font-size:21px"
|
||||
id="path16877" />
|
||||
<path
|
||||
d="m -64.466977,181.315 q -2.331,0 -3.801,-1.47 -1.449,-1.491 -1.449,-3.885 0,-2.268 1.533,-3.822 1.533,-1.554 3.759,-1.554 1.617,0 2.751,0.756 1.155,0.735 1.743,2.142 l -1.449,0.693 q -0.483,-1.05 -1.26,-1.575 -0.756,-0.525 -1.806,-0.525 -1.491,0 -2.52,1.134 -1.008,1.113 -1.008,2.751 0,1.659 1.008,2.772 1.029,1.092 2.562,1.092 0.987,0 1.827,-0.525 0.861,-0.546 1.386,-1.512 l 1.323,0.714 q -0.609,1.302 -1.827,2.058 -1.218,0.756 -2.772,0.756 z"
|
||||
style="font-size:21px"
|
||||
id="path16879" />
|
||||
<path
|
||||
d="m -58.16698,166.3 h 1.701 v 6.09 q 0.609,-0.861 1.512,-1.323 0.924,-0.462 2.058,-0.462 2.037,0 3.171,1.302 1.155,1.302 1.155,3.57 V 181 h -1.701 v -5.418 q 0,-1.617 -0.798,-2.52 -0.798,-0.924 -2.226,-0.924 -1.428,0 -2.31,0.987 -0.861,0.987 -0.861,2.562 V 181 h -1.701 z"
|
||||
style="font-size:21px"
|
||||
id="path16881" />
|
||||
<path
|
||||
d="M -44.453996,168.925 V 181 h -1.764 v -14.7 h 1.932 l 9.177,12.012 V 166.3 h 1.764 V 181 h -1.89 z"
|
||||
style="font-size:21px"
|
||||
id="path16883" />
|
||||
<path
|
||||
d="m -20.765996,175.939 q 0,2.289 -1.533,3.843 -1.533,1.533 -3.78,1.533 -2.247,0 -3.759,-1.533 -1.491,-1.533 -1.491,-3.822 0,-2.289 1.533,-3.822 1.533,-1.554 3.78,-1.554 2.247,0 3.738,1.533 1.512,1.533 1.512,3.822 z m -8.82,0.021 q 0,1.638 1.008,2.751 1.029,1.113 2.541,1.113 1.491,0 2.499,-1.113 1.029,-1.113 1.029,-2.772 0,-1.617 -1.029,-2.73 -1.029,-1.134 -2.52,-1.134 -1.491,0 -2.52,1.113 -1.008,1.113 -1.008,2.772 z"
|
||||
style="font-size:21px"
|
||||
id="path16885" />
|
||||
<path
|
||||
d="m -19.001983,170.857 h 1.68 v 1.281 q 0.462,-0.672 1.197,-1.008 0.735,-0.336 1.743,-0.336 h 0.924 v 1.617 h -0.945 q -1.449,0 -2.184,0.756 -0.714,0.756 -0.714,2.289 V 181 h -1.701 z"
|
||||
style="font-size:21px"
|
||||
id="path16887" />
|
||||
<path
|
||||
d="m -12.07199,170.857 h 1.68 v 1.218 q 0.5670001,-0.735 1.2810001,-1.092 0.735,-0.378 1.638,-0.378 1.05,0 1.869,0.441 0.819,0.441 1.302,1.281 0.567,-0.84 1.428,-1.281 0.882,-0.441 1.97399997,-0.441 1.785,0 2.83500003,1.155 1.071,1.155 1.071,3.003 V 181 h -1.722 v -6.132 q 0,-1.281 -0.65100003,-1.995 -0.63,-0.735 -1.78499997,-0.735 -1.134,0 -1.848,0.798 -0.693,0.777 -0.693,2.037 V 181 h -1.701 v -6.132 q 0,-1.281 -0.651,-1.995 -0.63,-0.735 -1.785,-0.735 -1.134,0 -1.848,0.798 -0.6930001,0.777 -0.6930001,2.037 V 181 h -1.701 z"
|
||||
style="font-size:21px"
|
||||
id="path16889" />
|
||||
<path
|
||||
d="m -100.673,294.28 q 0.851999,0 1.415999,-0.468 0.576,-0.48 0.576,-1.188 0,-0.744 -0.588,-1.176 -0.576,-0.432 -1.607999,-0.432 h -0.528 v -0.852 h 0.504 q 0.947999,0 1.499999,-0.408 0.552,-0.42 0.552,-1.068 0,-0.6 -0.504,-0.984 -0.504,-0.384 -1.319999,-0.384 -0.864,0 -1.38,0.468 -0.504,0.456 -0.564,1.26 h -0.984 q 0.036,-1.128 0.852,-1.884 0.828,-0.756 2.124,-0.756 1.235999,0 2.015999,0.636 0.792,0.624 0.792,1.56 0,0.696 -0.408,1.212 -0.396,0.516 -1.152,0.78 0.864,0.24 1.296,0.756 0.432,0.516 0.432,1.32 0,1.068 -0.864,1.8 -0.864,0.72 -2.183999,0.72 -1.404,0 -2.232,-0.756 -0.816,-0.768 -0.828,-2.1 h 0.972 q 0.048,0.936 0.588,1.44 0.552,0.504 1.524,0.504 z"
|
||||
style="font-size:12px"
|
||||
id="path16891" />
|
||||
<path
|
||||
d="m -94.037002,293.452 v -3.384 h -0.792 v -0.864 h 0.792 v -1.776 h 0.972 v 1.776 h 1.344 v 0.864 h -1.344 v 3.3 q 0,0.384 0.168,0.576 0.18,0.18 0.54,0.18 h 0.636 V 295 h -0.78 q -0.804,0 -1.176,-0.372 -0.36,-0.372 -0.36,-1.176 z"
|
||||
style="font-size:12px"
|
||||
id="path16893" />
|
||||
<path
|
||||
d="m -88.036994,294.352 q 0.576,0 1.056,-0.24 0.492,-0.252 0.9,-0.756 l 0.588,0.54 q -0.492,0.648 -1.152,0.972 -0.66,0.312 -1.476,0.312 -1.308,0 -2.148,-0.852 -0.828,-0.864 -0.828,-2.208 0,-1.308 0.852,-2.184 0.852,-0.876 2.1,-0.876 1.272,0 2.064,0.828 0.792,0.828 0.792,2.16 0,0.096 -0.012,0.228 -0.012,0.12 -0.024,0.18 h -4.764 q 0.096,0.852 0.66,1.38 0.576,0.516 1.392,0.516 z m -0.096,-4.464 q -0.756,0 -1.296,0.492 -0.54,0.492 -0.66,1.32 h 3.816 q -0.108,-0.84 -0.612,-1.32 -0.504,-0.492 -1.248,-0.492 z"
|
||||
style="font-size:12px"
|
||||
id="path16895" />
|
||||
<path
|
||||
d="m -84.304987,289.204 h 0.96 v 0.9 q 0.348,-0.504 0.876,-0.768 0.54,-0.276 1.2,-0.276 1.152,0 1.8,0.744 0.648,0.744 0.648,2.04 V 295 h -0.972 v -3.096 q 0,-0.912 -0.456,-1.44 -0.444,-0.528 -1.248,-0.528 -0.828,0 -1.332,0.564 -0.504,0.564 -0.504,1.464 V 295 h -0.972 z"
|
||||
style="font-size:12px"
|
||||
id="path16897" />
|
||||
<path
|
||||
d="m -72.952996,293.308 q 0,0.816 -0.696,1.332 -0.696,0.516 -1.812,0.516 -0.732,0 -1.38,-0.228 -0.636,-0.24 -1.152,-0.708 l 0.504,-0.744 q 0.552,0.456 1.032,0.66 0.48,0.204 1.02,0.204 0.696,0 1.116,-0.264 0.432,-0.276 0.432,-0.708 0,-0.444 -0.36,-0.636 -0.36,-0.204 -1.188,-0.216 -1.212,-0.048 -1.776,-0.444 -0.552,-0.396 -0.552,-1.236 0,-0.768 0.636,-1.272 0.648,-0.516 1.656,-0.516 0.684,0 1.284,0.216 0.6,0.216 1.092,0.636 l -0.468,0.732 q -0.492,-0.396 -0.948,-0.576 -0.456,-0.18 -0.972,-0.18 -0.588,0 -0.972,0.264 -0.372,0.264 -0.372,0.648 0,0.42 0.336,0.612 0.336,0.192 1.068,0.228 1.308,0.036 1.884,0.432 0.588,0.396 0.588,1.248 z"
|
||||
style="font-size:12px"
|
||||
id="path16899" />
|
||||
<path
|
||||
d="m -66.196988,292.108 q 0,1.308 -0.876,2.196 -0.876,0.876 -2.16,0.876 -1.284,0 -2.148,-0.876 -0.852,-0.876 -0.852,-2.184 0,-1.308 0.876,-2.184 0.876,-0.888 2.16,-0.888 1.284,0 2.136,0.876 0.864,0.876 0.864,2.184 z m -5.04,0.012 q 0,0.936 0.576,1.572 0.588,0.636 1.452,0.636 0.852,0 1.428,-0.636 0.588,-0.636 0.588,-1.584 0,-0.924 -0.588,-1.56 -0.588,-0.648 -1.44,-0.648 -0.852,0 -1.44,0.636 -0.576,0.636 -0.576,1.584 z"
|
||||
style="font-size:12px"
|
||||
id="path16901" />
|
||||
<path
|
||||
d="m -65.188982,289.204 h 0.96 v 0.732 q 0.264,-0.384 0.684,-0.576 0.42,-0.192 0.996,-0.192 h 0.528 v 0.924 h -0.54 q -0.828,0 -1.248,0.432 -0.408,0.432 -0.408,1.308 V 295 h -0.972 z"
|
||||
style="font-size:12px"
|
||||
id="path16903" />
|
||||
<path
|
||||
d="m -56.560983,293.308 q 0,0.816 -0.696,1.332 -0.696,0.516 -1.812,0.516 -0.732,0 -1.38,-0.228 -0.636,-0.24 -1.152,-0.708 l 0.504,-0.744 q 0.552,0.456 1.032,0.66 0.48,0.204 1.02,0.204 0.696,0 1.116,-0.264 0.432,-0.276 0.432,-0.708 0,-0.444 -0.36,-0.636 -0.36,-0.204 -1.188,-0.216 -1.212,-0.048 -1.776,-0.444 -0.552,-0.396 -0.552,-1.236 0,-0.768 0.636,-1.272 0.648,-0.516 1.656,-0.516 0.684,0 1.284,0.216 0.6,0.216 1.092,0.636 l -0.468,0.732 q -0.492,-0.396 -0.948,-0.576 -0.456,-0.18 -0.972,-0.18 -0.588,0 -0.972,0.264 -0.372,0.264 -0.372,0.648 0,0.42 0.336,0.612 0.336,0.192 1.068,0.228 1.308,0.036 1.884,0.432 0.588,0.396 0.588,1.248 z"
|
||||
style="font-size:12px"
|
||||
id="path16905" />
|
||||
<path
|
||||
d="m -8.6353245,236.49707 q 0,0.15772 -0.00928,0.27832 -0.00928,0.11133 -0.037109,0.19482 -0.018555,0.0742 -0.055664,0.13916 -0.027832,0.0557 -0.1484375,0.17627 -0.1113281,0.11133 -0.3896484,0.2876 -0.2783203,0.16699 -0.6308594,0.30615 -0.3432614,0.12989 -0.7514644,0.21338 -0.408204,0.0835 -0.844239,0.0835 -0.899902,0 -1.595703,-0.29688 -0.695801,-0.29687 -1.168945,-0.86279 -0.463867,-0.57519 -0.714356,-1.40088 -0.241211,-0.83496 -0.241211,-1.92041 0,-1.23388 0.296875,-2.11523 0.306153,-0.89063 0.825684,-1.45654 0.528809,-0.56592 1.233887,-0.83497 0.714355,-0.27832 1.540039,-0.27832 0.398926,0 0.770019,0.0742 0.380371,0.0742 0.6958011,0.19483 0.3154297,0.1206 0.5566407,0.27832 0.2504883,0.15771 0.3618164,0.26904 0.1113281,0.11133 0.1484375,0.17627 0.046387,0.0649 0.074219,0.15771 0.027832,0.0835 0.037109,0.19483 0.00928,0.11133 0.00928,0.27832 0,0.36182 -0.083496,0.51025 -0.083496,0.13916 -0.2041016,0.13916 -0.1391602,0 -0.324707,-0.14843 -0.1762696,-0.15772 -0.4545899,-0.34327 -0.2783206,-0.18554 -0.6772456,-0.33398 -0.389649,-0.15772 -0.927735,-0.15772 -1.104004,0 -1.697754,0.85352 -0.584472,0.84424 -0.584472,2.4585 0,0.80713 0.148437,1.41943 0.157715,0.60303 0.45459,1.01123 0.296875,0.4082 0.723633,0.61231 0.436035,0.19482 0.992676,0.19482 0.528808,0 0.927734,-0.16699 0.3989257,-0.167 0.6865233,-0.36182 0.296875,-0.2041 0.4916993,-0.36182 0.2041015,-0.16699 0.3154296,-0.16699 0.064941,0 0.1113282,0.0371 0.046387,0.0371 0.074219,0.12988 0.037109,0.0835 0.046387,0.22266 0.018555,0.12988 0.018555,0.31543 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16907" />
|
||||
<path
|
||||
d="m 1.0038357,233.50049 q 0,1.02051 -0.26904299,1.8833 -0.26904297,0.85351 -0.80712891,1.4751 -0.52880859,0.62158 -1.3359375,0.97412 -0.7978516,0.34326 -1.8554687,0.34326 -1.0297852,0 -1.7998047,-0.30615 -0.7607422,-0.30616 -1.2709961,-0.89063 -0.5102539,-0.58447 -0.7607422,-1.41943 -0.2504883,-0.83496 -0.2504883,-1.89258 0,-1.02051 0.2597656,-1.87402 0.269043,-0.8628 0.7978516,-1.48438 0.5380859,-0.62158 1.3359375,-0.96484 0.7978516,-0.34326 1.8647461,-0.34326 1.0297851,0 1.7905273,0.30615 0.77001956,0.30615 1.28027347,0.89062 0.5102539,0.58448 0.76074218,1.41944 0.25976565,0.83496 0.25976565,1.8833 z m -1.61425783,0.10205 q 0,-0.67725 -0.12988282,-1.28027 -0.12060547,-0.60303 -0.40820315,-1.05762 -0.2875976,-0.45459 -0.7792968,-0.71436 -0.4916993,-0.26904 -1.2246094,-0.26904 -0.6772461,0 -1.1689453,0.24121 -0.4824219,0.24121 -0.7978516,0.68652 -0.3154297,0.43604 -0.4731445,1.03907 -0.1484375,0.60302 -0.1484375,1.31738 0,0.68652 0.1206054,1.28955 0.1298829,0.60303 0.4174805,1.05762 0.296875,0.44531 0.7885742,0.71435 0.4916992,0.25977 1.2246094,0.25977 0.6679688,0 1.159668,-0.24121 0.4916992,-0.24121 0.8071289,-0.67725 0.31542967,-0.43603 0.46386717,-1.03906 0.1484375,-0.60303 0.1484375,-1.32666 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16909" />
|
||||
<path
|
||||
d="m 10.448172,237.76807 q 0,0.0742 -0.03711,0.12988 -0.03711,0.0464 -0.120605,0.0835 -0.0835,0.0371 -0.231934,0.0557 -0.1484373,0.0185 -0.3710936,0.0185 -0.2319336,0 -0.3803711,-0.0185 -0.1484375,-0.0186 -0.2319336,-0.0557 -0.083496,-0.0371 -0.1206054,-0.0835 -0.037109,-0.0557 -0.037109,-0.12988 v -4.88916 q 0,-0.71436 -0.1113281,-1.15039 -0.1113282,-0.43604 -0.3247071,-0.75147 -0.2133789,-0.31543 -0.5566406,-0.48242 -0.3339844,-0.16699 -0.7792969,-0.16699 -0.5751953,0 -1.1503906,0.4082 -0.5751953,0.4082 -1.2060547,1.19678 v 5.83545 q 0,0.0742 -0.037109,0.12988 -0.037109,0.0464 -0.1206055,0.0835 -0.083496,0.0371 -0.2319336,0.0557 -0.1484375,0.0185 -0.3803711,0.0185 -0.2226563,0 -0.3710938,-0.0185 -0.1484375,-0.0186 -0.2412109,-0.0557 -0.083496,-0.0371 -0.1206055,-0.0835 -0.027832,-0.0557 -0.027832,-0.12988 v -8.34961 q 0,-0.0742 0.027832,-0.12061 0.027832,-0.0557 0.1113282,-0.0928 0.083496,-0.0464 0.2133789,-0.0557 0.1298828,-0.0185 0.3432617,-0.0185 0.2041016,0 0.3339844,0.0185 0.1391601,0.009 0.2133789,0.0557 0.074219,0.0371 0.1020508,0.0928 0.037109,0.0464 0.037109,0.12061 v 1.104 q 0.7050782,-0.78857 1.4008789,-1.15039 0.7050782,-0.37109 1.4194336,-0.37109 0.834961,0 1.4008789,0.28759 0.5751953,0.27832 0.9277344,0.75147 0.3525388,0.47314 0.5009768,1.11328 0.157715,0.63086 0.157715,1.52148 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16911" />
|
||||
<path
|
||||
d="m 19.771902,229.3999 q 0,0.0371 -0.0093,0.0835 0,0.0371 -0.0093,0.0835 -0.0093,0.0464 -0.02783,0.10206 -0.0093,0.0557 -0.02783,0.1206 l -2.848144,7.93213 q -0.03711,0.10205 -0.102051,0.16699 -0.05566,0.0649 -0.17627,0.10205 -0.120605,0.0371 -0.306152,0.0464 -0.185547,0.0185 -0.463867,0.0185 -0.27832,0 -0.463867,-0.0185 -0.185547,-0.0186 -0.306153,-0.0557 -0.111328,-0.0371 -0.176269,-0.10206 -0.06494,-0.0649 -0.102051,-0.15771 l -2.838867,-7.93213 q -0.03711,-0.11133 -0.06494,-0.19482 -0.01855,-0.0835 -0.02783,-0.12061 0,-0.0464 0,-0.0742 0,-0.0742 0.03711,-0.12988 0.03711,-0.0557 0.120605,-0.0835 0.09277,-0.0371 0.231934,-0.0464 0.148437,-0.009 0.361816,-0.009 0.269043,0 0.426758,0.0185 0.166992,0.009 0.250488,0.0464 0.09277,0.0371 0.129883,0.0928 0.04639,0.0557 0.0835,0.13916 l 2.356446,6.88379 0.03711,0.11133 0.02783,-0.11133 2.328613,-6.88379 q 0.01855,-0.0835 0.06494,-0.13916 0.04639,-0.0557 0.129883,-0.0928 0.09277,-0.0371 0.24121,-0.0464 0.157715,-0.0185 0.408204,-0.0185 0.213379,0 0.352539,0.009 0.13916,0.009 0.213379,0.0464 0.0835,0.0371 0.111328,0.0928 0.03711,0.0464 0.03711,0.1206 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16913" />
|
||||
<path
|
||||
d="m 29.095633,233.50049 q 0,1.02051 -0.269043,1.8833 -0.269043,0.85351 -0.807129,1.4751 -0.528809,0.62158 -1.335938,0.97412 -0.797851,0.34326 -1.855469,0.34326 -1.029785,0 -1.799804,-0.30615 -0.760742,-0.30616 -1.270996,-0.89063 -0.510254,-0.58447 -0.760743,-1.41943 -0.250488,-0.83496 -0.250488,-1.89258 0,-1.02051 0.259766,-1.87402 0.269043,-0.8628 0.797851,-1.48438 0.538086,-0.62158 1.335938,-0.96484 0.797851,-0.34326 1.864746,-0.34326 1.029785,0 1.790527,0.30615 0.77002,0.30615 1.280274,0.89062 0.510254,0.58448 0.760742,1.41944 0.259766,0.83496 0.259766,1.8833 z m -1.614258,0.10205 q 0,-0.67725 -0.129883,-1.28027 -0.120606,-0.60303 -0.408203,-1.05762 -0.287598,-0.45459 -0.779297,-0.71436 -0.491699,-0.26904 -1.224609,-0.26904 -0.677247,0 -1.168946,0.24121 -0.482422,0.24121 -0.797851,0.68652 -0.31543,0.43604 -0.473145,1.03907 -0.148437,0.60302 -0.148437,1.31738 0,0.68652 0.120605,1.28955 0.129883,0.60303 0.417481,1.05762 0.296875,0.44531 0.788574,0.71435 0.491699,0.25977 1.224609,0.25977 0.667969,0 1.159668,-0.24121 0.491699,-0.24121 0.807129,-0.67725 0.31543,-0.43603 0.463867,-1.03906 0.148438,-0.60303 0.148438,-1.32666 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16915" />
|
||||
<path
|
||||
d="m 32.880789,237.76807 q 0,0.0742 -0.03711,0.12988 -0.03711,0.0464 -0.120605,0.0835 -0.0835,0.0371 -0.231934,0.0557 -0.148437,0.0185 -0.380371,0.0185 -0.222656,0 -0.371093,-0.0185 -0.148438,-0.0186 -0.241211,-0.0557 -0.0835,-0.0371 -0.120606,-0.0835 -0.02783,-0.0557 -0.02783,-0.12988 v -12.39453 q 0,-0.0742 0.02783,-0.12989 0.03711,-0.0557 0.120606,-0.0928 0.09277,-0.0371 0.241211,-0.0557 0.148437,-0.0186 0.371093,-0.0186 0.231934,0 0.380371,0.0186 0.148438,0.0186 0.231934,0.0557 0.0835,0.0371 0.120605,0.0928 0.03711,0.0557 0.03711,0.12989 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16917" />
|
||||
<path
|
||||
d="m 42.863211,237.76807 q 0,0.0742 -0.03711,0.12988 -0.02783,0.0464 -0.111328,0.0835 -0.0835,0.0371 -0.222656,0.0557 -0.129883,0.0185 -0.324707,0.0185 -0.213379,0 -0.352539,-0.0185 -0.129883,-0.0186 -0.213379,-0.0557 -0.07422,-0.0371 -0.102051,-0.0835 -0.02783,-0.0557 -0.02783,-0.12988 v -1.10401 q -0.714355,0.78858 -1.410156,1.15039 -0.695801,0.36182 -1.410156,0.36182 -0.834961,0 -1.410157,-0.27832 -0.565918,-0.27832 -0.918457,-0.75147 -0.352539,-0.48242 -0.510254,-1.11328 -0.148437,-0.64013 -0.148437,-1.54931 v -5.06543 q 0,-0.0742 0.02783,-0.12061 0.03711,-0.0557 0.129883,-0.0928 0.09277,-0.0464 0.241211,-0.0557 0.148437,-0.0185 0.371093,-0.0185 0.222657,0 0.371094,0.0185 0.148438,0.009 0.231934,0.0557 0.09277,0.0371 0.129883,0.0928 0.03711,0.0464 0.03711,0.12061 v 4.86133 q 0,0.73291 0.102051,1.17822 0.111328,0.43603 0.324707,0.75146 0.222656,0.30615 0.55664,0.48242 0.333985,0.167 0.779297,0.167 0.575196,0 1.141114,-0.40821 0.575195,-0.4082 1.215332,-1.19677 v -5.83545 q 0,-0.0742 0.02783,-0.12061 0.03711,-0.0557 0.129882,-0.0928 0.09277,-0.0464 0.231934,-0.0557 0.148438,-0.0185 0.380371,-0.0185 0.222656,0 0.371094,0.0185 0.148437,0.009 0.231933,0.0557 0.0835,0.0371 0.120606,0.0928 0.04639,0.0464 0.04639,0.12061 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16919" />
|
||||
<path
|
||||
d="m 52.956961,229.17725 q 0.27832,0 0.380371,0.12988 0.102051,0.1206 0.102051,0.36182 v 8.09912 q 0,0.0742 -0.03711,0.12988 -0.03711,0.0464 -0.129883,0.0835 -0.0835,0.0371 -0.231933,0.0557 -0.148438,0.0185 -0.361817,0.0185 -0.231933,0 -0.380371,-0.0185 -0.148437,-0.0186 -0.241211,-0.0557 -0.0835,-0.0371 -0.120605,-0.0835 -0.03711,-0.0557 -0.03711,-0.12988 v -7.31983 h -4.313964 v 4.65723 q 0,0.86279 0.250488,1.3081 0.259766,0.43604 0.918457,0.43604 0.213379,0 0.380371,-0.0371 0.166992,-0.0464 0.296875,-0.0928 0.129883,-0.0464 0.222656,-0.0835 0.09277,-0.0464 0.166992,-0.0464 0.04639,0 0.0835,0.0278 0.04639,0.0186 0.06494,0.0835 0.02783,0.0649 0.04639,0.17627 0.01855,0.11133 0.01855,0.27832 0,0.26904 -0.03711,0.42676 -0.03711,0.14844 -0.111328,0.23193 -0.07422,0.0742 -0.222656,0.13916 -0.148438,0.0649 -0.343262,0.10205 -0.185547,0.0464 -0.398926,0.0742 -0.213379,0.0278 -0.426758,0.0278 -0.649414,0 -1.113281,-0.16699 -0.463867,-0.17627 -0.760742,-0.51953 -0.296875,-0.35254 -0.436035,-0.88135 -0.129883,-0.53808 -0.129883,-1.26172 v -4.87988 h -1.168945 q -0.139161,0 -0.222657,-0.14844 -0.0835,-0.14843 -0.0835,-0.48242 0,-0.17627 0.01855,-0.29687 0.02783,-0.12061 0.06494,-0.19483 0.03711,-0.0835 0.09277,-0.11132 0.06494,-0.0371 0.13916,-0.0371 h 1.159668 v -1.98536 q 0,-0.0649 0.02783,-0.1206 0.03711,-0.0557 0.120605,-0.0928 0.09277,-0.0464 0.241211,-0.065 0.148438,-0.0186 0.371094,-0.0186 0.231934,0 0.380371,0.0186 0.148438,0.0186 0.231934,0.065 0.0835,0.0371 0.120605,0.0928 0.03711,0.0557 0.03711,0.1206 v 1.98536 z m 0.658691,-2.57911 q 0,0.53809 -0.204101,0.73291 -0.204102,0.19483 -0.751465,0.19483 -0.538086,0 -0.742188,-0.18555 -0.194824,-0.19482 -0.194824,-0.72363 0,-0.53809 0.204102,-0.73291 0.204101,-0.19483 0.751464,-0.19483 0.538086,0 0.732911,0.19483 0.204101,0.18555 0.204101,0.71435 z m -3.126465,2.57911 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16921" />
|
||||
<path
|
||||
d="m 57.318623,236.99805 q 0,0.64941 -0.222657,0.87207 -0.213379,0.21338 -0.770019,0.21338 -0.538086,0 -0.751465,-0.21338 -0.213379,-0.21338 -0.213379,-0.84424 0,-0.64942 0.213379,-0.86279 0.222656,-0.22266 0.779297,-0.22266 0.538086,0 0.751465,0.21338 0.213379,0.21338 0.213379,0.84424 z m 4.184082,0 q 0,0.64941 -0.213379,0.87207 -0.213379,0.21338 -0.77002,0.21338 -0.538086,0 -0.751465,-0.21338 -0.213379,-0.21338 -0.213379,-0.84424 0,-0.64942 0.213379,-0.86279 0.222657,-0.22266 0.77002,-0.22266 0.547363,0 0.751465,0.21338 0.213379,0.21338 0.213379,0.84424 z m 4.193359,0 q 0,0.64941 -0.222656,0.87207 -0.213379,0.21338 -0.77002,0.21338 -0.538086,0 -0.751465,-0.21338 -0.213379,-0.21338 -0.213379,-0.84424 0,-0.64942 0.213379,-0.86279 0.222657,-0.22266 0.779297,-0.22266 0.538086,0 0.751465,0.21338 0.213379,0.21338 0.213379,0.84424 z"
|
||||
style="font-size:19px;font-family:Calibri, Calibri_MSFontService, sans-serif"
|
||||
id="path16923" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 33 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:78a73487434f4178f111595eb34b344b35af14bd4ccb03e6a5b00509f86e19c5
|
||||
size 5348
|
||||
134
docs/MO_DG/img/inception_v1_std_input.svg
Normal file
@@ -0,0 +1,134 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="224"
|
||||
height="288"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg17402"
|
||||
sodipodi:docname="inception_v1_std_input.svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview17404"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="3.0972222"
|
||||
inkscape:cx="111.87444"
|
||||
inkscape:cy="144"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1137"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg17402" />
|
||||
<defs
|
||||
id="defs17386">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="224"
|
||||
height="288"
|
||||
id="rect17383" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g17400">
|
||||
<path
|
||||
id="rect17388"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 224 V 288 H 0 Z" />
|
||||
<path
|
||||
d="M30.5001 55.1673C30.5001 40.9917 41.9917 29.5001 56.1673 29.5001L159.833 29.5001C174.008 29.5001 185.5 40.9917 185.5 55.1673L185.5 157.833C185.5 172.008 174.008 183.5 159.833 183.5L56.1673 183.5C41.9917 183.5 30.5001 172.008 30.5001 157.833Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path17390" />
|
||||
<g
|
||||
aria-label="InceptionV1"
|
||||
transform="translate(54.1683 113)"
|
||||
id="text17392"
|
||||
style="font-size:21px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 1.512,0 V -14.7 H 3.318 V 0 Z"
|
||||
id="path17770" />
|
||||
<path
|
||||
d="m 5.9429904,-10.143 h 1.68 v 1.575 q 0.609,-0.882 1.533,-1.344 0.9449996,-0.483 2.0999996,-0.483 2.016,0 3.15,1.302 1.134,1.302 1.134,3.57 V 0 h -1.701 v -5.418 q 0,-1.596 -0.798,-2.52 -0.777,-0.924 -2.184,-0.924 -1.4489996,0 -2.3309996,0.987 -0.882,0.987 -0.882,2.562 V 0 h -1.701 z"
|
||||
id="path17772" />
|
||||
<path
|
||||
d="m 22.427974,0.315 q -2.331,0 -3.801,-1.47 -1.449,-1.491 -1.449,-3.885 0,-2.268 1.533,-3.822 1.533,-1.554 3.759,-1.554 1.617,0 2.751,0.756 1.155,0.735 1.743,2.142 l -1.449,0.693 q -0.483,-1.05 -1.26,-1.575 -0.756,-0.525 -1.806,-0.525 -1.491,0 -2.52,1.134 -1.008,1.113 -1.008,2.751 0,1.659 1.008,2.772 1.029,1.092 2.562,1.092 0.987,0 1.827,-0.525 0.861,-0.546 1.386,-1.512 l 1.323,0.714 q -0.609,1.302 -1.827,2.058 -1.218,0.756 -2.772,0.756 z"
|
||||
id="path17774" />
|
||||
<path
|
||||
d="m 33.620971,-1.134 q 1.008,0 1.848,-0.42 0.861,-0.441 1.575,-1.323 l 1.029,0.945 q -0.861,1.134 -2.016,1.701 -1.155,0.546 -2.583,0.546 -2.289,0 -3.759,-1.491 -1.449,-1.512 -1.449,-3.864 0,-2.289 1.491,-3.822 1.491,-1.533 3.675,-1.533 2.226,0 3.612,1.449 1.386,1.449 1.386,3.78 0,0.168 -0.021,0.399 -0.021,0.21 -0.042,0.315 h -8.337 q 0.168,1.491 1.155,2.415 1.008,0.903 2.436,0.903 z m -0.168,-7.812 q -1.323,0 -2.268,0.861 -0.945,0.861 -1.155,2.31 h 6.678 q -0.189,-1.47 -1.071,-2.31 -0.882,-0.861 -2.184,-0.861 z"
|
||||
id="path17776" />
|
||||
<path
|
||||
d="m 41.852986,3.78 h -1.701 v -13.923 h 1.68 v 1.638 q 0.672,-0.882 1.659,-1.365 0.987,-0.504 2.1,-0.504 2.142,0 3.57,1.491 1.428,1.47 1.428,3.78 0,2.331 -1.491,3.843 -1.47,1.491 -3.633,1.491 -1.029,0 -1.995,-0.42 -0.945,-0.441 -1.617,-1.239 z m 3.549,-12.642 q -1.533,0 -2.583,1.113 -1.029,1.092 -1.029,2.73 0,1.617 1.008,2.688 1.029,1.071 2.541,1.071 1.554,0 2.52,-1.029 0.987,-1.05 0.987,-2.772 0,-1.701 -0.966,-2.751 -0.945,-1.05 -2.478,-1.05 z"
|
||||
id="path17778" />
|
||||
<path
|
||||
d="m 52.814987,-2.709 v -5.922 h -1.386 v -1.512 h 1.386 v -3.108 h 1.701 v 3.108 h 2.352 v 1.512 h -2.352 v 5.775 q 0,0.672 0.294,1.008 0.315,0.315 0.945,0.315 h 1.113 V 0 h -1.365 q -1.407,0 -2.058,-0.651 -0.63,-0.651 -0.63,-2.058 z"
|
||||
id="path17780" />
|
||||
<path
|
||||
d="m 58.69499,-13.545 h 1.89 v 1.869 h -1.89 z M 58.77899,0 v -10.143 h 1.722 V 0 Z"
|
||||
id="path17782" />
|
||||
<path
|
||||
d="m 73.016979,-5.061 q 0,2.289 -1.533,3.843 -1.533,1.533 -3.78,1.533 -2.247,0 -3.759,-1.533 -1.491,-1.533 -1.491,-3.822 0,-2.289 1.533,-3.822 1.533,-1.554 3.78,-1.554 2.247,0 3.738,1.533 1.512,1.533 1.512,3.822 z m -8.82,0.021 q 0,1.638 1.008,2.751 1.029,1.113 2.541,1.113 1.491,0 2.499,-1.113 1.029,-1.113 1.029,-2.772 0,-1.617 -1.029,-2.73 -1.029,-1.134 -2.52,-1.134 -1.491,0 -2.52,1.113 -1.008,1.113 -1.008,2.772 z"
|
||||
id="path17784" />
|
||||
<path
|
||||
d="m 74.780984,-10.143 h 1.68 v 1.575 q 0.609,-0.882 1.533,-1.344 0.945,-0.483 2.1,-0.483 2.016,0 3.15,1.302 1.134,1.302 1.134,3.57 V 0 h -1.701 v -5.418 q 0,-1.596 -0.798,-2.52 -0.777,-0.924 -2.184,-0.924 -1.449,0 -2.331,0.987 -0.882,0.987 -0.882,2.562 V 0 h -1.701 z"
|
||||
id="path17786" />
|
||||
<path
|
||||
d="m 91.097987,0 -6.51,-14.7 h 1.974 l 5.481,12.39 5.418,-12.39 h 1.932 l -6.51,14.7 z"
|
||||
id="path17788" />
|
||||
<path
|
||||
d="m 102.522,-13.104 h -2.268 V -14.7 h 4.032 V 0 h -1.764 z"
|
||||
id="path17790" />
|
||||
</g>
|
||||
<path
|
||||
d="M82.5001 252.5C82.5001 245.32 93.9168 239.5 108 239.5 122.083 239.5 133.5 245.32 133.5 252.5 133.5 259.68 122.083 265.5 108 265.5 93.9168 265.5 82.5001 259.68 82.5001 252.5Z"
|
||||
stroke="#E96115"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path17394" />
|
||||
<path
|
||||
d="M1-1.87829e-06 1.00009 49.2281-0.999907 49.2281-1 1.87829e-06ZM4.00009 47.8948 0.000104987 55.8948-3.99991 47.8948Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 108 238.895)"
|
||||
id="path17396" />
|
||||
<g
|
||||
aria-label="input"
|
||||
transform="translate(87.5787 235)"
|
||||
id="text17398"
|
||||
style="font-size:18px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 1.044,-11.61 h 1.62 v 1.602 H 1.044 Z M 1.116,0 V -8.694 H 2.592 V 0 Z"
|
||||
id="path17793" />
|
||||
<path
|
||||
d="m 4.6619887,-8.694 h 1.44 v 1.35 q 0.522,-0.756 1.314,-1.152 0.81,-0.414 1.8,-0.414 1.7280003,0 2.7000003,1.116 0.972,1.116 0.972,3.06 V 0 h -1.458 v -4.644 q 0,-1.368 -0.684,-2.16 -0.666,-0.792 -1.8720003,-0.792 -1.242,0 -1.998,0.846 -0.756,0.846 -0.756,2.196 V 0 h -1.458 z"
|
||||
id="path17795" />
|
||||
<path
|
||||
d="m 16.145974,3.24 h -1.458 V -8.694 h 1.44 v 1.404 q 0.576,-0.756 1.422,-1.17 0.846,-0.432 1.8,-0.432 1.836,0 3.06,1.278 1.224,1.26 1.224,3.24 0,1.998 -1.278,3.294 -1.26,1.278 -3.114,1.278 -0.882,0 -1.71,-0.36 -0.81,-0.378 -1.386,-1.062 z m 3.042,-10.836 q -1.314,0 -2.214,0.954 -0.882,0.936 -0.882,2.34 0,1.386 0.864,2.304 0.882,0.918 2.178,0.918 1.332,0 2.16,-0.882 0.846,-0.9 0.846,-2.376 0,-1.458 -0.828,-2.358 -0.81,-0.9 -2.124,-0.9 z"
|
||||
id="path17797" />
|
||||
<path
|
||||
d="m 33.281963,0 h -1.458 v -1.332 q -0.522,0.756 -1.314,1.152 -0.792,0.378 -1.8,0.378 -1.71,0 -2.7,-1.116 -0.972,-1.116 -0.972,-3.042 v -4.734 h 1.476 v 4.644 q 0,1.368 0.666,2.16 0.684,0.792 1.89,0.792 1.224,0 1.98,-0.846 0.756,-0.846 0.756,-2.196 v -4.554 h 1.476 z"
|
||||
id="path17799" />
|
||||
<path
|
||||
d="m 35.657949,-2.322 v -5.076 h -1.188 v -1.296 h 1.188 v -2.664 h 1.458 v 2.664 h 2.016 v 1.296 h -2.016 v 4.95 q 0,0.576 0.252,0.864 0.27,0.27 0.81,0.27 h 0.954 V 0 h -1.17 q -1.206,0 -1.764,-0.558 -0.54,-0.558 -0.54,-1.764 z"
|
||||
id="path17801" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 7.6 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:939e1aa0d2ba28dab1c930c6271a9f4063fd9f8c539d4713c0bd0f87c34f66c3
|
||||
size 15020
|
||||
281
docs/MO_DG/img/inception_v1_std_output.svg
Normal file
@@ -0,0 +1,281 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="723"
|
||||
height="480"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg18304"
|
||||
sodipodi:docname="inception_v1_std_output.svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview18306"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="1.7634855"
|
||||
inkscape:cx="361.5"
|
||||
inkscape:cy="239.86588"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1137"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg18304" />
|
||||
<defs
|
||||
id="defs18258">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="723"
|
||||
height="480"
|
||||
id="rect18255" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g18302">
|
||||
<path
|
||||
id="rect18260"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 723 V 480.11099 H 0 Z" />
|
||||
<path
|
||||
d="M28.5 97.1906C28.5 57.6002 60.5869 25.5058 100.168 25.5058L623.832 25.5058C663.413 25.5058 695.5 57.6002 695.5 97.1906L695.5 383.92C695.5 423.511 663.413 455.605 623.832 455.605L100.168 455.605C60.5869 455.605 28.5 423.511 28.5 383.92Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33303"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path18262" />
|
||||
<g
|
||||
aria-label="Predictions"
|
||||
transform="matrix(1 0 0 1.00023 289.976 57)"
|
||||
id="text18264"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 1.56,0 v -16.8 h 7.296 q 2.76,0 4.344,1.392 1.608,1.368 1.608,3.72 0,2.376 -1.728,3.816 -1.728,1.416 -4.608,1.416 H 3.624 V 0 Z m 11.136,-11.64 q 0,-1.536 -1.032,-2.4 -1.032,-0.888 -2.88,-0.888 h -5.16 v 6.624 h 4.992 q 1.872,0 2.976,-0.888 1.104,-0.912 1.104,-2.448 z"
|
||||
id="path18672" />
|
||||
<path
|
||||
d="m 16.559979,-11.592 h 1.92 v 1.464 q 0.528,-0.768 1.368,-1.152 0.84,-0.384 1.992,-0.384 h 1.056 v 1.848 h -1.08 q -1.656,0 -2.496,0.864 -0.816,0.864 -0.816,2.616 V 0 h -1.944 z"
|
||||
id="path18674" />
|
||||
<path
|
||||
d="m 29.759966,-1.296 q 1.152,0 2.112,-0.48 0.984,-0.504 1.8,-1.512 l 1.176,1.08 q -0.984,1.296 -2.304,1.944 -1.32,0.624 -2.952,0.624 -2.616,0 -4.296,-1.704 -1.656,-1.728 -1.656,-4.416 0,-2.616 1.704,-4.368 1.704,-1.752 4.2,-1.752 2.544,0 4.128,1.656 1.584,1.656 1.584,4.32 0,0.192 -0.024,0.456 -0.024,0.24 -0.048,0.36 h -9.528 q 0.192,1.704 1.32,2.76 1.152,1.032 2.784,1.032 z m -0.192,-8.928 q -1.512,0 -2.592,0.984 -1.08,0.984 -1.32,2.64 h 7.632 q -0.216,-1.68 -1.224,-2.64 -1.008,-0.984 -2.496,-0.984 z"
|
||||
id="path18676" />
|
||||
<path
|
||||
d="m 46.679981,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path18678" />
|
||||
<path
|
||||
d="m 51.287966,-15.48 h 2.16 v 2.136 h -2.16 z m 0.096,15.48 v -11.592 h 1.968 V 0 Z"
|
||||
id="path18680" />
|
||||
<path
|
||||
d="m 61.583951,0.36 q -2.664,0 -4.344,-1.68 -1.656,-1.704 -1.656,-4.44 0,-2.592 1.752,-4.368 1.752,-1.776 4.296,-1.776 1.848,0 3.144,0.864 1.32,0.84 1.992,2.448 l -1.656,0.792 q -0.552,-1.2 -1.44,-1.8 -0.864,-0.6 -2.064,-0.6 -1.704,0 -2.88,1.296 -1.152,1.272 -1.152,3.144 0,1.896 1.152,3.168 1.176,1.248 2.928,1.248 1.128,0 2.088,-0.6 0.984,-0.624 1.584,-1.728 l 1.512,0.816 q -0.696,1.488 -2.088,2.352 -1.392,0.864 -3.168,0.864 z"
|
||||
id="path18682" />
|
||||
<path
|
||||
d="m 69.431948,-3.096 v -6.768 h -1.584 v -1.728 h 1.584 v -3.552 h 1.944 v 3.552 h 2.688 v 1.728 h -2.688 v 6.6 q 0,0.768 0.336,1.152 0.36,0.36 1.08,0.36 h 1.272 V 0 h -1.56 q -1.608,0 -2.352,-0.744 -0.72,-0.744 -0.72,-2.352 z"
|
||||
id="path18684" />
|
||||
<path
|
||||
d="m 76.151949,-15.48 h 2.16 v 2.136 h -2.16 z m 0.096,15.48 v -11.592 h 1.968 V 0 Z"
|
||||
id="path18686" />
|
||||
<path
|
||||
d="m 92.519934,-5.784 q 0,2.616 -1.752,4.392 -1.752,1.752 -4.32,1.752 -2.568,0 -4.296,-1.752 -1.704,-1.752 -1.704,-4.368 0,-2.616 1.752,-4.368 1.752,-1.776 4.32,-1.776 2.568,0 4.272,1.752 1.728,1.752 1.728,4.368 z m -10.08,0.024 q 0,1.872 1.152,3.144 1.176,1.272 2.904,1.272 1.704,0 2.856,-1.272 1.176,-1.272 1.176,-3.168 0,-1.848 -1.176,-3.12 -1.176,-1.296 -2.88,-1.296 -1.704,0 -2.88,1.272 -1.152,1.272 -1.152,3.168 z"
|
||||
id="path18688" />
|
||||
<path
|
||||
d="m 94.535947,-11.592 h 1.92 v 1.8 q 0.696,-1.008 1.752,-1.536 1.08,-0.552 2.400003,-0.552 2.304,0 3.6,1.488 1.296,1.488 1.296,4.08 V 0 h -1.944 v -6.192 q 0,-1.824 -0.912,-2.88 -0.888,-1.056 -2.496,-1.056 -1.656003,0 -2.664003,1.128 -1.008,1.128 -1.008,2.928 V 0 h -1.944 z"
|
||||
id="path18690" />
|
||||
<path
|
||||
d="m 117.23993,-3.384 q 0,1.632 -1.392,2.664 -1.392,1.032 -3.624,1.032 -1.464,0 -2.76,-0.456 -1.272,-0.48 -2.304,-1.416 l 1.008,-1.488 q 1.104,0.912 2.064,1.32 0.96,0.408 2.04,0.408 1.392,0 2.232,-0.528 0.864,-0.552 0.864,-1.416 0,-0.888 -0.72,-1.272 -0.72,-0.408 -2.376,-0.432 -2.424,-0.096 -3.552,-0.888 -1.104,-0.792 -1.104,-2.472 0,-1.536 1.272,-2.544 1.296,-1.032 3.312,-1.032 1.368,0 2.568,0.432 1.2,0.432 2.184,1.272 l -0.936,1.464 q -0.984,-0.792 -1.896,-1.152 -0.912,-0.36 -1.944,-0.36 -1.176,0 -1.944,0.528 -0.744,0.528 -0.744,1.296 0,0.84 0.672,1.224 0.672,0.384 2.136,0.456 2.616,0.072 3.768,0.864 1.176,0.792 1.176,2.496 z"
|
||||
id="path18692" />
|
||||
</g>
|
||||
<path
|
||||
d="M320 133.531C320 127.178 331.864 122.028 346.5 122.028 361.136 122.028 373 127.178 373 133.531 373 139.883 361.136 145.033 346.5 145.033 331.864 145.033 320 139.883 320 133.531Z"
|
||||
stroke="#E96115"
|
||||
stroke-width="5.99862"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path18266" />
|
||||
<path
|
||||
d="M215.5 229.053C215.5 222.7 227.364 217.55 242 217.55 256.636 217.55 268.5 222.7 268.5 229.053 268.5 235.406 256.636 240.555 242 240.555 227.364 240.555 215.5 235.406 215.5 229.053Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="2.99931"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path18268" />
|
||||
<path
|
||||
d="M422.5 229.053C422.5 222.7 434.364 217.55 449 217.55 463.636 217.55 475.5 222.7 475.5 229.053 475.5 235.406 463.636 240.555 449 240.555 434.364 240.555 422.5 235.406 422.5 229.053Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="2.99931"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path18270" />
|
||||
<path
|
||||
d="M216.5 295.568C216.5 288.939 228.364 283.565 243 283.565 257.636 283.565 269.5 288.939 269.5 295.568 269.5 302.197 257.636 307.571 243 307.571 228.364 307.571 216.5 302.197 216.5 295.568Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="2.99931"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path18272" />
|
||||
<path
|
||||
d="M172.5 295.568C172.5 291.149 176.306 287.566 181 287.566 185.694 287.566 189.5 291.149 189.5 295.568 189.5 299.987 185.694 303.57 181 303.57 176.306 303.57 172.5 299.987 172.5 295.568Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="2.99931"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path18274" />
|
||||
<path
|
||||
d="M2.8778-0.845066 50.3686 160.881 44.613 162.571-2.8778 0.845066ZM55.2792 156.313 51.7162 176.115 38.0124 161.383Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1.00023 310 631.26)"
|
||||
id="path18276" />
|
||||
<path
|
||||
d="M189 294.068 209.793 294.068 209.793 296.068 189 296.068ZM208.46 291.068 216.458 295.068 208.46 299.068Z"
|
||||
fill="#AEAEAE"
|
||||
id="path18278" />
|
||||
<path
|
||||
d="M0.267582-0.963296 1.63018-0.584796 2.94898 0.656429 4.17747 2.43091 5.29426 4.73019 6.31987 7.48652 7.33691 10.7283 8.22228 14.333 9.10517 18.3691 10.6129 27.4153 11.8057 37.5229 12.5588 48.3177 12.8093 59.4012 12.8073 59.3573 12.9948 62.1692 12.9721 62.0141 13.597 64.7635 13.5489 64.6101 15.7984 70.1713 15.699 69.9851 19.3857 75.4213 19.2822 75.293 24.281 80.5418 24.1986 80.4645 30.3222 85.5883 30.2401 85.5265 37.5509 90.4629 37.4804 90.4194 45.7285 95.0433 45.6694 95.0127 54.8548 99.3867 54.7983 99.3619 64.7335 103.361 64.6813 103.342 75.3663 106.966 75.3128 106.949 86.5602 110.074 86.5155 110.062 98.2628 112.749 98.2165 112.74 110.401 114.927 110.354 114.919 122.789 116.544 122.738 116.539 135.422 117.538 135.368 117.536 141.452 117.685 141.403 119.684 135.292 119.534 122.555 118.53 110.071 116.899 97.84 114.704 86.0472 112.006 74.7506 108.868 64.0127 105.226 54.023 101.205 44.7797 96.8037 36.4664 92.1432 29.0785 87.1549 22.8721 81.9617 17.7772 76.6121 13.9846 71.0195 11.665 65.2852 11.0049 62.3806 10.8108 59.4684 10.5603 48.3864 10.5625 48.4334 9.81266 37.6859 9.81713 37.7335 8.6299 27.6733 8.63661 27.7205 7.13696 18.7226 7.14645 18.7719 6.27165 14.7728 6.27742 14.7976 5.40262 11.2359 5.4196 11.2967 4.41983 8.10997 4.43676 8.15935 3.43699 5.47247 3.47469 5.56062 2.41244 3.37363 2.48973 3.5059 1.36499 1.88128 1.50179 2.04023 0.439534 1.04046 0.857159 1.27572-0.267582 0.963296ZM140.193 114.654 148.091 118.848 139.997 122.649Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-6.12323e-17 -1.00023 -1 6.12465e-17 361.833 455.162)"
|
||||
id="path18280" />
|
||||
<path
|
||||
d="M361.02 454.694 361.304 453.274 362.199 451.781 363.518 450.393 365.19 449.122 367.284 447.879 369.678 446.778 375.324 444.684 382.029 442.976 389.412 441.587 397.359 440.767 405.514 440.454 405.403 440.464 407.465 440.152 407.238 440.214 409.238 439.401 409.042 439.507 411.104 438.07 410.98 438.172 412.98 436.235 412.895 436.328 414.894 433.828 414.841 433.902 416.778 430.965 416.744 431.019 418.744 427.519 418.718 427.568 420.655 423.693 420.63 423.746 422.504 419.371 422.493 419.398 424.368 414.648 424.354 414.685 426.166 409.498 426.156 409.528 427.906 403.965 427.897 403.995 431.334 391.745 431.322 391.79 434.447 378.29 434.44 378.32 437.377 363.57 437.372 363.597 440.059 347.847 440.054 347.878 442.366 331.191 442.363 331.217 444.3 313.842 444.298 313.863 445.922 295.925 445.92 295.954 447.045 277.517 447.044 277.538 447.794 258.85 447.793 258.877 447.954 246.668 449.954 246.695 449.792 258.917 449.041 277.628 447.915 296.091 446.288 314.053 444.348 331.452 442.032 348.168 439.341 363.947 436.398 378.726 433.265 392.263 429.818 404.551 428.059 410.143 426.235 415.364 424.348 420.146 422.456 424.561 420.494 428.487 418.464 432.039 416.484 435.042 414.417 437.627 412.314 439.665 410.094 441.212 407.881 442.111 405.647 442.45 397.468 442.765 397.532 442.76 389.659 443.573 389.741 443.561 382.43 444.936 382.492 444.922 375.869 446.609 375.969 446.578 370.408 448.64 370.478 448.611 368.166 449.674 368.259 449.625 366.26 450.813 366.354 450.749 364.792 451.936 364.912 451.829 363.725 453.079 363.857 452.905 363.107 454.155 363.23 453.836 362.98 455.086ZM444.938 247.962 449.042 240.015 452.935 248.068Z"
|
||||
fill="#AEAEAE"
|
||||
id="path18282" />
|
||||
<path
|
||||
d="M241.198 283.165 240.186 246.747 242.185 246.692 243.197 283.11ZM237.225 248.163 241 240.055 245.22 247.941Z"
|
||||
fill="#AEAEAE"
|
||||
id="path18284" />
|
||||
<path
|
||||
d="M0.0712306-0.997229 3.63338-0.74279 7.19032-0.0314014 10.7428 1.13122 14.1734 2.74943 20.6147 6.95858 26.4296 12.3902 31.2798 18.8996 35.0322 26.0863 36.4461 29.8783 37.4709 33.7853 38.1124 37.7626 38.3025 41.7553 38.49 45.6919 38.4784 45.5803 39.1032 49.4544 39.0793 49.3452 40.1415 53.1568 40.1221 53.0949 41.4343 56.844 41.3769 56.7116 45.0636 63.7725 44.9724 63.6291 49.7838 69.9401 49.6794 69.8234 55.3031 75.1972 55.1593 75.0831 61.4704 79.2071 61.3359 79.1333 64.6476 80.633 64.4946 80.5782 68.2843 81.5963 67.7655 83.5274 63.897 82.4881 60.441 80.9231 53.9884 76.7065 48.2412 71.2148 43.3305 64.7735 39.5709 57.5729 38.2241 53.7248 37.1381 49.828 36.4954 45.8432 36.3053 41.8504 36.1178 37.9138 36.1294 38.0255 35.5046 34.1514 35.5245 34.2458 34.5248 30.4342 34.5551 30.5298 33.1804 26.8432 33.2309 26.9566 29.5442 19.8957 29.6288 20.0304 24.8799 13.6568 24.9991 13.7901 19.3129 8.47883 19.4485 8.58514 13.1374 4.46109 13.2578 4.52839 9.94609 2.96625 10.0616 3.0122 6.62493 1.88746 6.73983 1.91764 3.30312 1.2303 3.42796 1.24717-0.0712306 0.997229ZM67.3347 78.4034 74.6078 83.6057 66.082 86.3028Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-1.83697e-16 -1.00023 -1 1.83739e-16 448.574 216.65)"
|
||||
id="path18286" />
|
||||
<path
|
||||
d="M240.003 216.94 240.257 213.379 240.97 209.75 242.197 206.196 243.803 202.791 245.79 199.456 248.171 196.238 253.775 190.442 260.408 185.594 267.786 181.84 271.757 180.431 275.784 179.344 279.89 178.702 283.943 178.512 287.942 178.325 287.835 178.336 291.834 177.711 291.728 177.733 295.664 176.671 295.586 176.695 299.397 175.32 299.283 175.37 306.531 171.682 306.39 171.77 312.888 166.957 312.768 167.062 318.267 161.437 318.178 161.541 320.49 158.416 320.435 158.499 322.372 155.249 322.327 155.335 323.889 152.022 323.831 152.178 324.915 148.325 326.839 148.867 325.733 152.8 324.115 156.232 322.127 159.566 319.745 162.786 314.142 168.517 307.512 173.427 300.134 177.181 296.225 178.591 292.196 179.678 288.09 180.32 284.037 180.51 280.038 180.698 280.145 180.687 276.146 181.312 276.252 181.289 272.316 182.352 272.39 182.329 268.516 183.704 268.634 183.653 261.386 187.34 261.523 187.256 255.024 192.006 255.153 191.894 249.654 197.581 249.739 197.481 247.427 200.606 247.482 200.523 245.545 203.773 245.591 203.688 244.029 207 244.069 206.9 242.882 210.338 242.918 210.204 242.231 213.704 242.247 213.582 241.997 217.082ZM321.719 149.289 326.918 142.011 329.619 150.538Z"
|
||||
fill="#AEAEAE"
|
||||
id="path18288" />
|
||||
<g
|
||||
aria-label="Reshape_1SoftmaxShapeReshapeshape"
|
||||
transform="matrix(1 0 0 1.00023 311.46 115)"
|
||||
id="text18300"
|
||||
style="font-size:16px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 7.04,-4.72 10.224,0 H 8.592 L 5.568,-4.56 H 2.416 V 0 H 1.04 v -11.2 h 4.928 q 1.888,0 2.944,0.88 1.056,0.864 1.056,2.384 0,1.248 -0.768,2.096 -0.768,0.848 -2.16,1.12 z m 1.536,-3.184 q 0,-0.976 -0.704,-1.52 -0.688,-0.56 -1.936,-0.56 h -3.52 v 4.224 h 3.408 q 1.264,0 2,-0.576 0.752,-0.592 0.752,-1.568 z"
|
||||
id="path18695" />
|
||||
<path
|
||||
d="m 15.023998,-0.864 q 0.768,0 1.408,-0.32 0.656,-0.336 1.2,-1.008 l 0.784,0.72 q -0.656,0.864 -1.536,1.296 -0.88,0.416 -1.968,0.416 -1.744,0 -2.864,-1.136 -1.104,-1.152 -1.104,-2.944 0,-1.744 1.136,-2.912 1.136,-1.168 2.8,-1.168 1.696,0 2.752,1.104 1.056,1.104 1.056,2.88 0,0.128 -0.016,0.304 -0.016,0.16 -0.032,0.24 h -6.352 q 0.128,1.136 0.88,1.84 0.768,0.688 1.856,0.688 z m -0.128,-5.952 q -1.008,0 -1.728,0.656 -0.72,0.656 -0.88,1.76 h 5.088 q -0.144,-1.12 -0.816,-1.76 -0.672,-0.656 -1.664,-0.656 z"
|
||||
id="path18697" />
|
||||
<path
|
||||
d="m 26.224008,-2.256 q 0,1.088 -0.928,1.776 -0.928,0.688 -2.416,0.688 -0.976,0 -1.84,-0.304 -0.848,-0.32 -1.536,-0.944 l 0.672,-0.992 q 0.736,0.608 1.376,0.88 0.64,0.272 1.36,0.272 0.928,0 1.488,-0.352 0.576,-0.368 0.576,-0.944 0,-0.592 -0.48,-0.848 -0.48,-0.272 -1.584,-0.288 -1.616,-0.064 -2.368,-0.592 -0.736,-0.528 -0.736,-1.648 0,-1.024 0.848,-1.696 0.864,-0.688 2.208,-0.688 0.912,0 1.712,0.288 0.8,0.288 1.456,0.848 l -0.624,0.976 q -0.656,-0.528 -1.264,-0.768 -0.608,-0.24 -1.296,-0.24 -0.784,0 -1.296,0.352 -0.496,0.352 -0.496,0.864 0,0.56 0.448,0.816 0.448,0.256 1.424,0.304 1.744,0.048 2.512,0.576 0.784,0.528 0.784,1.664 z"
|
||||
id="path18699" />
|
||||
<path
|
||||
d="m 27.536019,-11.2 h 1.296 v 4.64 q 0.464,-0.656 1.152,-1.008 0.704,-0.352 1.568,-0.352 1.552,0 2.416,0.992 0.88,0.992 0.88,2.72 V 0 h -1.296 v -4.128 q 0,-1.232 -0.608,-1.92 -0.608,-0.704 -1.696,-0.704 -1.088,0 -1.76,0.752 -0.656,0.752 -0.656,1.952 V 0 h -1.296 z"
|
||||
id="path18701" />
|
||||
<path
|
||||
d="m 43.088006,0 h -1.264 v -1.264 q -0.464,0.672 -1.264,1.056 -0.784,0.368 -1.696,0.368 -1.28,0 -2.08,-0.64 -0.8,-0.656 -0.8,-1.712 0,-1.168 0.896,-1.76 0.896,-0.608 2.656,-0.608 h 2.272 q -0.016,-1.088 -0.576,-1.68 -0.544,-0.592 -1.568,-0.592 -0.608,0 -1.2,0.256 -0.592,0.256 -1.168,0.784 l -0.688,-0.864 q 0.672,-0.64 1.472,-0.96 0.8,-0.336 1.712,-0.336 1.552,0 2.416,0.928 0.88,0.912 0.88,2.528 z m -1.28,-3.552 h -2.256 q -1.168,0 -1.728,0.32 -0.56,0.32 -0.56,1.008 0,0.576 0.48,0.96 0.496,0.384 1.264,0.384 1.152,0 1.936,-0.736 0.8,-0.752 0.864,-1.936 z"
|
||||
id="path18703" />
|
||||
<path
|
||||
d="m 45.983996,2.88 h -1.296 V -7.728 h 1.28 v 1.248 q 0.512,-0.672 1.264,-1.04 0.752,-0.384 1.6,-0.384 1.632,0 2.72,1.136 1.088,1.12 1.088,2.88 0,1.776 -1.136,2.928 -1.12,1.136 -2.768,1.136 -0.784,0 -1.52,-0.32 -0.72,-0.336 -1.232,-0.944 z m 2.704,-9.632 q -1.168,0 -1.968,0.848 -0.784,0.832 -0.784,2.08 0,1.232 0.768,2.048 0.784,0.816 1.936,0.816 1.184,0 1.92,-0.784 0.752,-0.8 0.752,-2.112 0,-1.296 -0.736,-2.096 -0.72,-0.8 -1.888,-0.8 z"
|
||||
id="path18705" />
|
||||
<path
|
||||
d="m 57.711986,-0.864 q 0.768,0 1.408,-0.32 0.656,-0.336 1.2,-1.008 l 0.784,0.72 q -0.656,0.864 -1.536,1.296 -0.88,0.416 -1.968,0.416 -1.744,0 -2.864,-1.136 -1.104,-1.152 -1.104,-2.944 0,-1.744 1.136,-2.912 1.136,-1.168 2.8,-1.168 1.696,0 2.752,1.104 1.056,1.104 1.056,2.88 0,0.128 -0.016,0.304 -0.016,0.16 -0.032,0.24 h -6.352 q 0.128,1.136 0.88,1.84 0.768,0.688 1.856,0.688 z m -0.128,-5.952 q -1.008,0 -1.728,0.656 -0.72,0.656 -0.88,1.76 h 5.088 q -0.144,-1.12 -0.816,-1.76 -0.672,-0.656 -1.664,-0.656 z"
|
||||
id="path18707" />
|
||||
<path
|
||||
d="M 61.807996,2.064 V 0.896 h 8.128 v 1.168 z"
|
||||
id="path18709" />
|
||||
<path
|
||||
d="m 72.063992,-9.984 h -1.728 V -11.2 h 3.072 V 0 h -1.344 z"
|
||||
id="path18711" />
|
||||
<path
|
||||
d="m -89.276403,90.826302 q 0,-0.976 -0.768,-1.424 -0.752,-0.448 -2.432,-0.496 -2.304,-0.112 -3.328,-0.8 -1.024,-0.704 -1.024,-2.16 0,-1.44 1.216,-2.416 1.232,-0.976 3.088,-0.976 1.36,0 2.448,0.464 1.088,0.464 1.92,1.408 l -0.864,0.912 q -0.672,-0.752 -1.584,-1.152 -0.912,-0.4 -1.92,-0.4 -1.28,0 -2.112,0.592 -0.832,0.592 -0.832,1.456 0,0.912 0.736,1.328 0.752,0.4 2.416,0.448 2.288,0.096 3.344,0.848 1.056,0.736 1.056,2.256 0,1.504 -1.264,2.496 -1.264,0.992 -3.232,0.992 -1.6,0 -2.768,-0.512 -1.152,-0.512 -2.048,-1.632 l 0.848,-0.912 q 0.816,0.944 1.776,1.392 0.96,0.432 2.192,0.432 1.376,0 2.256,-0.608 0.88,-0.624 0.88,-1.536 z"
|
||||
id="path18713" />
|
||||
<path
|
||||
d="m -78.684401,90.122302 q 0,1.744 -1.168,2.928 -1.168,1.168 -2.88,1.168 -1.712,0 -2.864,-1.168 -1.136,-1.168 -1.136,-2.912 0,-1.744 1.168,-2.912 1.168,-1.184 2.88,-1.184 1.712,0 2.848,1.168 1.152,1.168 1.152,2.912 z m -6.72,0.016 q 0,1.248 0.768,2.096 0.784,0.848 1.936,0.848 1.136,0 1.904,-0.848 0.784,-0.848 0.784,-2.112 0,-1.232 -0.784,-2.08 -0.784,-0.864 -1.92,-0.864 -1.136,0 -1.92,0.848 -0.768,0.848 -0.768,2.112 z"
|
||||
id="path18715" />
|
||||
<path
|
||||
d="m -76.988396,87.402302 h -1.056 v -1.152 h 1.056 v -1.456 q 0,-0.976 0.512,-1.488 0.528,-0.528 1.504,-0.528 h 1.056 v 1.088 h -0.864 q -0.448,0 -0.672,0.24 -0.224,0.24 -0.224,0.72 v 1.424 h 1.76 v 1.152 h -1.76 v 6.576 h -1.312 z"
|
||||
id="path18717" />
|
||||
<path
|
||||
d="m -72.460395,91.914302 v -4.512 h -1.056 v -1.152 h 1.056 v -2.368 h 1.296 v 2.368 h 1.792 v 1.152 h -1.792 v 4.4 q 0,0.512 0.224,0.768 0.24,0.24 0.72,0.24 h 0.848 v 1.168 h -1.04 q -1.072,0 -1.568,-0.496 -0.48,-0.496 -0.48,-1.568 z"
|
||||
id="path18719" />
|
||||
<path
|
||||
d="m -68.060394,86.250302 h 1.28 v 0.928 q 0.432,-0.56 0.976,-0.832 0.56,-0.288 1.248,-0.288 0.8,0 1.424,0.336 0.624,0.336 0.992,0.976 0.432,-0.64 1.088,-0.976 0.672,-0.336 1.504,-0.336 1.36,0 2.16,0.88 0.816,0.88 0.816,2.288 v 4.752 h -1.312 v -4.672 q 0,-0.976 -0.496,-1.52 -0.48,-0.56 -1.36,-0.56 -0.864,0 -1.408,0.608 -0.528,0.592 -0.528,1.552 v 4.592 h -1.296 v -4.672 q 0,-0.976 -0.496,-1.52 -0.48,-0.56 -1.36,-0.56 -0.864,0 -1.408,0.608 -0.528,0.592 -0.528,1.552 v 4.592 h -1.296 z"
|
||||
id="path18721" />
|
||||
<path
|
||||
d="m -48.332381,93.978302 h -1.264 v -1.264 q -0.464,0.672 -1.264,1.056 -0.784,0.368 -1.696,0.368 -1.28,0 -2.08,-0.64 -0.8,-0.656 -0.8,-1.712 0,-1.168 0.896,-1.76 0.896,-0.608 2.656,-0.608 h 2.272 q -0.016,-1.088 -0.576,-1.68 -0.544,-0.592 -1.568,-0.592 -0.608,0 -1.2,0.256 -0.592,0.256 -1.168,0.784 l -0.688,-0.864 q 0.672,-0.64 1.472,-0.96 0.8,-0.336 1.712,-0.336 1.552,0 2.416,0.928 0.88,0.912 0.88,2.528 z m -1.28,-3.552 h -2.256 q -1.168,0 -1.728,0.32 -0.56,0.32 -0.56,1.008 0,0.576 0.48,0.96 0.496,0.384 1.264,0.384 1.152,0 1.936,-0.736 0.8,-0.752 0.864,-1.936 z"
|
||||
id="path18723" />
|
||||
<path
|
||||
d="m -45.980391,93.978302 h -1.456 l 3.008,-3.984 -2.8,-3.744 h 1.536 l 2.032,2.832 2.064,-2.832 h 1.44 l -2.8,3.728 3.056,4 h -1.52 l -2.272,-3.12 z"
|
||||
id="path18725" />
|
||||
<path
|
||||
d="m 121.996,90.826302 q 0,-0.976 -0.768,-1.424 -0.752,-0.448 -2.432,-0.496 -2.304,-0.112 -3.328,-0.8 -1.024,-0.704 -1.024,-2.16 0,-1.44 1.216,-2.416 1.232,-0.976 3.088,-0.976 1.36,0 2.448,0.464 1.088,0.464 1.92,1.408 l -0.864,0.912 q -0.672,-0.752 -1.584,-1.152 -0.912,-0.4 -1.92,-0.4 -1.28,0 -2.112,0.592 -0.832,0.592 -0.832,1.456 0,0.912 0.736,1.328 0.752,0.4 2.416,0.448 2.288,0.096 3.344,0.848 1.056,0.736 1.056,2.256 0,1.504 -1.264,2.496 -1.264,0.992 -3.232,0.992 -1.6,0 -2.768,-0.512 -1.152,-0.512 -2.048,-1.632 l 0.848,-0.912 q 0.816,0.944 1.776,1.392 0.96,0.432 2.192,0.432 1.376,0 2.256,-0.608 0.88,-0.624 0.88,-1.536 z"
|
||||
id="path18727" />
|
||||
<path
|
||||
d="m 124.892,82.778302 h 1.296 v 4.64 q 0.464,-0.656 1.152,-1.008 0.704,-0.352 1.568,-0.352 1.552,0 2.416,0.992 0.88,0.992 0.88,2.72 v 4.208 h -1.296 v -4.128 q 0,-1.232 -0.608,-1.92 -0.608,-0.704 -1.696,-0.704 -1.088,0 -1.76,0.752 -0.656,0.752 -0.656,1.952 v 4.048 h -1.296 z"
|
||||
id="path18729" />
|
||||
<path
|
||||
d="m 140.44399,93.978302 h -1.264 v -1.264 q -0.464,0.672 -1.264,1.056 -0.784,0.368 -1.696,0.368 -1.28,0 -2.08,-0.64 -0.8,-0.656 -0.8,-1.712 0,-1.168 0.896,-1.76 0.896,-0.608 2.656,-0.608 h 2.272 q -0.016,-1.088 -0.576,-1.68 -0.544,-0.592 -1.568,-0.592 -0.608,0 -1.2,0.256 -0.592,0.256 -1.168,0.784 l -0.688,-0.864 q 0.672,-0.64 1.472,-0.96 0.8,-0.336 1.712,-0.336 1.552,0 2.416,0.928 0.88,0.912 0.88,2.528 z m -1.28,-3.552 h -2.256 q -1.168,0 -1.728,0.32 -0.56,0.32 -0.56,1.008 0,0.576 0.48,0.96 0.496,0.384 1.264,0.384 1.152,0 1.936,-0.736 0.8,-0.752 0.864,-1.936 z"
|
||||
id="path18731" />
|
||||
<path
|
||||
d="m 143.33998,96.858302 h -1.296 v -10.608 h 1.28 v 1.248 q 0.512,-0.672 1.264,-1.04 0.752,-0.384 1.6,-0.384 1.632,0 2.72,1.136 1.088,1.12 1.088,2.88 0,1.776 -1.136,2.928 -1.12,1.136 -2.768,1.136 -0.784,0 -1.52,-0.32 -0.72,-0.336 -1.232,-0.944 z m 2.704,-9.632 q -1.168,0 -1.968,0.848 -0.784,0.832 -0.784,2.08 0,1.232 0.768,2.048 0.784,0.816 1.936,0.816 1.184,0 1.92,-0.784 0.752,-0.8 0.752,-2.112 0,-1.296 -0.736,-2.096 -0.72,-0.8 -1.888,-0.8 z"
|
||||
id="path18733" />
|
||||
<path
|
||||
d="m 155.06797,93.114302 q 0.768,0 1.408,-0.32 0.656,-0.336 1.2,-1.008 l 0.784,0.72 q -0.656,0.864 -1.536,1.296 -0.88,0.416 -1.968,0.416 -1.744,0 -2.864,-1.136 -1.104,-1.152 -1.104,-2.944 0,-1.744 1.136,-2.912 1.136,-1.168 2.8,-1.168 1.696,0 2.752,1.104 1.056,1.104 1.056,2.88 0,0.128 -0.016,0.304 -0.016,0.16 -0.032,0.24 h -6.352 q 0.128,1.136 0.88,1.84 0.768,0.688 1.856,0.688 z m -0.128,-5.952 q -1.008,0 -1.728,0.656 -0.72,0.656 -0.88,1.76 h 5.088 q -0.144,-1.12 -0.816,-1.76 -0.672,-0.656 -1.664,-0.656 z"
|
||||
id="path18735" />
|
||||
<path
|
||||
d="m -94.515,154.243 3.184,4.72 h -1.632 l -3.024,-4.56 h -3.152 v 4.56 h -1.376 v -11.2 h 4.928 q 1.888,0 2.944,0.88 1.056,0.864 1.056,2.384 0,1.248 -0.768,2.096 -0.768,0.848 -2.16,1.12 z m 1.536,-3.184 q 0,-0.976 -0.704,-1.52 -0.688,-0.56 -1.936,-0.56 h -3.52 v 4.224 h 3.408 q 1.264,0 2,-0.576 0.752,-0.592 0.752,-1.568 z"
|
||||
id="path18737" />
|
||||
<path
|
||||
d="m -86.531002,158.099 q 0.768,0 1.408,-0.32 0.656,-0.336 1.2,-1.008 l 0.784,0.72 q -0.656,0.864 -1.536,1.296 -0.88,0.416 -1.968,0.416 -1.744,0 -2.864,-1.136 -1.104,-1.152 -1.104,-2.944 0,-1.744 1.136,-2.912 1.136,-1.168 2.8,-1.168 1.696,0 2.752,1.104 1.056,1.104 1.056,2.88 0,0.128 -0.016,0.304 -0.016,0.16 -0.032,0.24 h -6.352 q 0.128,1.136 0.88,1.84 0.768,0.688 1.856,0.688 z m -0.128,-5.952 q -1.008,0 -1.728,0.656 -0.72,0.656 -0.88,1.76 h 5.088 q -0.144,-1.12 -0.816,-1.76 -0.672,-0.656 -1.664,-0.656 z"
|
||||
id="path18739" />
|
||||
<path
|
||||
d="m -75.330992,156.707 q 0,1.088 -0.928,1.776 -0.928,0.688 -2.416,0.688 -0.976,0 -1.84,-0.304 -0.848,-0.32 -1.536,-0.944 l 0.672,-0.992 q 0.736,0.608 1.376,0.88 0.64,0.272 1.36,0.272 0.928,0 1.488,-0.352 0.576,-0.368 0.576,-0.944 0,-0.592 -0.48,-0.848 -0.48,-0.272 -1.584,-0.288 -1.616,-0.064 -2.368,-0.592 -0.736,-0.528 -0.736,-1.648 0,-1.024 0.848,-1.696 0.864,-0.688 2.208,-0.688 0.912,0 1.712,0.288 0.8,0.288 1.456,0.848 l -0.624,0.976 q -0.656,-0.528 -1.264,-0.768 -0.608,-0.24 -1.296,-0.24 -0.784,0 -1.296,0.352 -0.496,0.352 -0.496,0.864 0,0.56 0.448,0.816 0.448,0.256 1.424,0.304 1.744,0.048 2.512,0.576 0.784,0.528 0.784,1.664 z"
|
||||
id="path18741" />
|
||||
<path
|
||||
d="m -74.018982,147.763 h 1.296 v 4.64 q 0.464,-0.656 1.152,-1.008 0.704,-0.352 1.568,-0.352 1.552,0 2.416,0.992 0.88,0.992 0.88,2.72 v 4.208 h -1.296 v -4.128 q 0,-1.232 -0.608,-1.92 -0.608,-0.704 -1.696,-0.704 -1.088,0 -1.76,0.752 -0.656,0.752 -0.656,1.952 v 4.048 h -1.296 z"
|
||||
id="path18743" />
|
||||
<path
|
||||
d="m -58.466994,158.963 h -1.264 v -1.264 q -0.464,0.672 -1.264,1.056 -0.784,0.368 -1.696,0.368 -1.28,0 -2.08,-0.64 -0.8,-0.656 -0.8,-1.712 0,-1.168 0.896,-1.76 0.896,-0.608 2.656,-0.608 h 2.272 q -0.016,-1.088 -0.576,-1.68 -0.544,-0.592 -1.568,-0.592 -0.608,0 -1.2,0.256 -0.592,0.256 -1.168,0.784 l -0.688,-0.864 q 0.672,-0.64 1.472,-0.96 0.8,-0.336 1.712,-0.336 1.552,0 2.416,0.928 0.88,0.912 0.88,2.528 z m -1.28,-3.552 h -2.256 q -1.168,0 -1.728,0.32 -0.56,0.32 -0.56,1.008 0,0.576 0.48,0.96 0.496,0.384 1.264,0.384 1.152,0 1.936,-0.736 0.8,-0.752 0.864,-1.936 z"
|
||||
id="path18745" />
|
||||
<path
|
||||
d="m -55.571004,161.843 h -1.296 v -10.608 h 1.28 v 1.248 q 0.512,-0.672 1.264,-1.04 0.752,-0.384 1.6,-0.384 1.632,0 2.72,1.136 1.088,1.12 1.088,2.88 0,1.776 -1.136,2.928 -1.12,1.136 -2.768,1.136 -0.784,0 -1.52,-0.32 -0.72,-0.336 -1.232,-0.944 z m 2.704,-9.632 q -1.168,0 -1.968,0.848 -0.784,0.832 -0.784,2.08 0,1.232 0.768,2.048 0.784,0.816 1.936,0.816 1.184,0 1.92,-0.784 0.752,-0.8 0.752,-2.112 0,-1.296 -0.736,-2.096 -0.72,-0.8 -1.888,-0.8 z"
|
||||
id="path18747" />
|
||||
<path
|
||||
d="m -43.843014,158.099 q 0.768,0 1.408,-0.32 0.656,-0.336 1.2,-1.008 l 0.784,0.72 q -0.656,0.864 -1.536,1.296 -0.88,0.416 -1.968,0.416 -1.744,0 -2.864,-1.136 -1.104,-1.152 -1.104,-2.944 0,-1.744 1.136,-2.912 1.136,-1.168 2.8,-1.168 1.696,0 2.752,1.104 1.056,1.104 1.056,2.88 0,0.128 -0.016,0.304 -0.016,0.16 -0.032,0.24 h -6.352 q 0.128,1.136 0.88,1.84 0.768,0.688 1.856,0.688 z m -0.128,-5.952 q -1.008,0 -1.728,0.656 -0.72,0.656 -0.88,1.76 h 5.088 q -0.144,-1.12 -0.816,-1.76 -0.672,-0.656 -1.664,-0.656 z"
|
||||
id="path18749" />
|
||||
<path
|
||||
d="m -176.142,183.124 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
style="font-size:13px"
|
||||
id="path18751" />
|
||||
<path
|
||||
d="m -174.867,175.857 h 1.053 v 3.77 q 0.377,-0.533 0.936,-0.819 0.572,-0.286 1.274,-0.286 1.261,0 1.963,0.806 0.715,0.806 0.715,2.21 v 3.419 h -1.053 v -3.354 q 0,-1.001 -0.494,-1.56 -0.494,-0.572 -1.378,-0.572 -0.884,0 -1.43,0.611 -0.533,0.611 -0.533,1.586 v 3.289 h -1.053 z"
|
||||
style="font-size:13px"
|
||||
id="path18753" />
|
||||
<path
|
||||
d="m -162.23101,184.957 h -1.027 v -1.027 q -0.377,0.546 -1.027,0.858 -0.637,0.299 -1.378,0.299 -1.04,0 -1.69,-0.52 -0.65,-0.533 -0.65,-1.391 0,-0.949 0.728,-1.43 0.728,-0.494 2.158,-0.494 h 1.846 q -0.013,-0.884 -0.468,-1.365 -0.442,-0.481 -1.274,-0.481 -0.494,0 -0.975,0.208 -0.481,0.208 -0.949,0.637 l -0.559,-0.702 q 0.546,-0.52 1.196,-0.78 0.65,-0.273 1.391,-0.273 1.261,0 1.963,0.754 0.715,0.741 0.715,2.054 z m -1.04,-2.886 h -1.833 q -0.949,0 -1.404,0.26 -0.455,0.26 -0.455,0.819 0,0.468 0.39,0.78 0.403,0.312 1.027,0.312 0.936,0 1.573,-0.598 0.65,-0.611 0.702,-1.573 z"
|
||||
style="font-size:13px"
|
||||
id="path18755" />
|
||||
<path
|
||||
d="m -159.87802,187.297 h -1.053 v -8.619 h 1.04 v 1.014 q 0.416,-0.546 1.027,-0.845 0.611,-0.312 1.3,-0.312 1.326,0 2.21,0.923 0.884,0.91 0.884,2.34 0,1.443 -0.923,2.379 -0.91,0.923 -2.249,0.923 -0.637,0 -1.235,-0.26 -0.585,-0.273 -1.001,-0.767 z m 2.197,-7.826 q -0.949,0 -1.599,0.689 -0.637,0.676 -0.637,1.69 0,1.001 0.624,1.664 0.637,0.663 1.573,0.663 0.962,0 1.56,-0.637 0.611,-0.65 0.611,-1.716 0,-1.053 -0.598,-1.703 -0.585,-0.65 -1.534,-0.65 z"
|
||||
style="font-size:13px"
|
||||
id="path18757" />
|
||||
<path
|
||||
d="m -150.34903,184.255 q 0.624,0 1.144,-0.26 0.533,-0.273 0.975,-0.819 l 0.637,0.585 q -0.533,0.702 -1.248,1.053 -0.715,0.338 -1.599,0.338 -1.417,0 -2.327,-0.923 -0.897,-0.936 -0.897,-2.392 0,-1.417 0.923,-2.366 0.923,-0.949 2.275,-0.949 1.378,0 2.236,0.897 0.858,0.897 0.858,2.34 0,0.104 -0.013,0.247 -0.013,0.13 -0.026,0.195 h -5.161 q 0.104,0.923 0.715,1.495 0.624,0.559 1.508,0.559 z m -0.104,-4.836 q -0.819,0 -1.404,0.533 -0.585,0.533 -0.715,1.43 h 4.134 q -0.117,-0.91 -0.663,-1.43 -0.546,-0.533 -1.352,-0.533 z"
|
||||
style="font-size:13px"
|
||||
id="path18759" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 30 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9859464a5c3ec91e4d6316109f523f48ad8972d2213a6797330e665d45b35c54
|
||||
size 44117
|
||||
913
docs/MO_DG/img/lm_1b.svg
Normal file
@@ -0,0 +1,913 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="992"
|
||||
height="1200"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg21925"
|
||||
sodipodi:docname="lm_1b.svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview21927"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.74333333"
|
||||
inkscape:cx="495.73991"
|
||||
inkscape:cy="600"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1137"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg21925" />
|
||||
<defs
|
||||
id="defs21805">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="992"
|
||||
height="1200"
|
||||
id="rect21802" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g21923">
|
||||
<path
|
||||
id="rect21807"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 992 V 1200 H 0 Z" />
|
||||
<path
|
||||
d="M0.0128494-0.999917 53.5258-0.31225 106.614 1.75123 158.885 5.06511 184.588 7.19136 209.913 9.56753 234.8 12.2563 259.25 15.1328 283.139 18.3222 306.464 21.7616 329.164 25.3885 351.242 29.2036 372.506 33.269 393.023 37.4598 412.728 41.9013 431.497 46.4684 449.332 51.2244 466.229 56.1684 482.004 61.1762 496.72 66.3114 510.318 71.5751 522.67 76.9671 533.843 82.4281 543.648 88.0224 552.154 93.63 559.225 99.3745 562.22 102.243 564.835 105.176 567.072 108.117 568.935 111.071 570.419 114.039 571.528 117.041 572.117 120.048 572.376 123.03 572.124 134.606 571.371 146.085 570.179 157.378 568.486 168.419 566.478 179.022 564.031 189.251 561.331 198.918 558.314 207.97 554.98 216.273 551.453 223.832 547.723 230.469 543.787 236.12 539.692 240.727 535.377 244.192 533.217 245.009 532.509 243.139 534.521 242.377 534.249 242.533 538.374 239.22 538.253 239.336 542.253 234.836 542.179 234.928 546.054 229.366 546.003 229.448 549.691 222.885 549.656 222.952 553.156 215.452 553.134 215.502 556.447 207.252 556.426 207.309 559.426 198.309 559.412 198.356 562.099 188.731 562.09 188.767 564.527 178.58 564.517 178.626 566.517 168.064 566.512 168.098 568.199 157.098 568.193 157.145 569.38 145.895 569.377 145.935 570.127 134.497 570.125 134.541 570.375 123.041 570.379 123.149 570.129 120.274 570.144 120.38 569.581 117.505 569.624 117.659 568.562 114.784 568.606 114.885 567.168 112.01 567.216 112.096 565.404 109.221 565.454 109.293 563.267 106.418 563.316 106.478 560.753 103.603 560.808 103.66 557.871 100.847 557.932 100.901 550.932 95.2136 551.012 95.2724 542.575 89.7099 542.629 89.7436 532.879 84.1811 532.936 84.2109 521.811 78.7734 521.85 78.7915 509.537 73.4165 509.576 73.4326 496.014 68.1826 496.046 68.1942 481.358 63.0692 481.385 63.0781 465.635 58.0781 465.657 58.0848 448.782 53.1473 448.805 53.1537 430.992 48.4037 431.014 48.4091 412.264 43.8466 412.28 43.8505 392.593 39.413 392.612 39.4173 372.112 35.2298 372.125 35.2322 350.875 31.1697 350.892 31.1729 328.83 27.3604 328.842 27.3625 306.155 23.7375 306.167 23.7393 282.854 20.3018 282.868 20.3037 258.993 17.1162 259.008 17.1182 234.571 14.2432 234.58 14.2442 209.705 11.5567 209.719 11.5581 184.407 9.18313 184.418 9.1841 158.73 7.0591 158.749 7.0605 106.499 3.748 106.524 3.74925 53.4612 1.68675 53.4872 1.68742-0.0128494 0.999917ZM535.329 247.494 526.5 246.062 532.943 239.859Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-1.83697e-16 -1 -1 1.83697e-16 917.045 640.491)"
|
||||
id="path21809" />
|
||||
<path
|
||||
d="M119.493 24.1627C182.557 5.55553 242.292 81.0831 253.731 193.89L275.12 192.678 238.33 225.845 197.988 197.048 219.4 195.835C210.213 101.671 167.105 40.7267 122.698 59.1199Z"
|
||||
stroke="#EDB200"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#EDB200"
|
||||
fill-rule="evenodd"
|
||||
transform="matrix(-1 0 0 1 389.5 412.5)"
|
||||
id="path21811" />
|
||||
<path
|
||||
d="M434.993 436.663C498.057 418.056 557.792 493.583 569.231 606.39L590.62 605.178 553.83 638.345 513.488 609.548 534.901 608.335C525.713 514.171 482.605 453.227 438.198 471.62Z"
|
||||
stroke="#EDB200"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#EDB200"
|
||||
fill-rule="evenodd"
|
||||
id="path21813" />
|
||||
<path
|
||||
d="M785.441 565.553 804.145 531.169 838.529 549.873 827.224 553.212 853.097 640.814 822.619 649.816 796.745 562.214Z"
|
||||
stroke="#808080"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#808080"
|
||||
fill-rule="evenodd"
|
||||
id="path21815" />
|
||||
<path
|
||||
d="M376.5 1160C376.5 1150.89 392.618 1143.5 412.5 1143.5 432.382 1143.5 448.5 1150.89 448.5 1160 448.5 1169.11 432.382 1176.5 412.5 1176.5 392.618 1176.5 376.5 1169.11 376.5 1160Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21817" />
|
||||
<path
|
||||
d="M610.5 53.5001C610.5 44.1112 626.618 36.5001 646.5 36.5001 666.382 36.5001 682.5 44.1112 682.5 53.5001 682.5 62.8889 666.382 70.5001 646.5 70.5001 626.618 70.5001 610.5 62.8889 610.5 53.5001Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21819" />
|
||||
<path
|
||||
d="M610.5 126C610.5 116.887 626.618 109.5 646.5 109.5 666.382 109.5 682.5 116.887 682.5 126 682.5 135.113 666.382 142.5 646.5 142.5 626.618 142.5 610.5 135.113 610.5 126Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21821" />
|
||||
<g
|
||||
aria-label="softmax_..."
|
||||
transform="translate(603.52 24)"
|
||||
id="text21823"
|
||||
style="font-size:20px;font-family:Calibri, Calibri_MSFontService, sans-serif">
|
||||
<path
|
||||
d="m 7.0117187,-2.6367188 q 0,0.6738282 -0.2539062,1.2011719 Q 6.5136719,-0.90820313 6.0546875,-0.546875 5.5957031,-0.18554687 4.9609375,0 4.3261719,0.18554687 3.5644531,0.18554687 q -0.46875,0 -0.8984375,-0.078125 Q 2.2460938,0.0390625 1.9042969,-0.06835937 1.5722656,-0.18554687 1.3378906,-0.30273438 1.1035156,-0.4296875 0.99609375,-0.52734375 0.88867188,-0.625 0.83984375,-0.80078125 0.79101562,-0.9765625 0.79101562,-1.2792969 q 0,-0.1855468 0.0195313,-0.3125 Q 0.83007813,-1.71875 0.859375,-1.796875 0.88867188,-1.875 0.9375,-1.9042969 q 0.0585938,-0.039063 0.1269531,-0.039063 0.1074219,0 0.3125,0.1367188 0.2148438,0.1269531 0.5175781,0.2832031 0.3125001,0.15625 0.7324219,0.2929688 0.4199219,0.1269531 0.9667969,0.1269531 0.4101562,0 0.7421875,-0.087891 0.3320312,-0.087891 0.5761719,-0.2539063 0.2441406,-0.1757812 0.3710937,-0.4394531 0.1367188,-0.2636719 0.1367188,-0.625 0,-0.3710938 -0.1953125,-0.625 Q 5.0390625,-3.3886719 4.7265625,-3.5839844 4.4140625,-3.7792969 4.0234375,-3.9257813 3.6328125,-4.0820312 3.2128906,-4.2480469 2.8027344,-4.4140625 2.4023438,-4.6191406 2.0117188,-4.8339844 1.6992187,-5.1367187 1.3867187,-5.4394531 1.1914062,-5.859375 1.0058594,-6.2792969 1.0058594,-6.8652344 q 0,-0.5175781 0.1953125,-0.9863281 0.2050781,-0.4785156 0.6054687,-0.8300781 0.4003907,-0.3613282 0.9960938,-0.5761719 0.6054687,-0.2148438 1.40625,-0.2148438 0.3515625,0 0.703125,0.058594 0.3515625,0.058594 0.6347656,0.1464844 0.2832031,0.087891 0.4785156,0.1953125 0.2050781,0.097656 0.3027344,0.1757812 0.1074219,0.078125 0.1367187,0.1367188 0.039063,0.058594 0.048828,0.1367187 0.019531,0.068359 0.029297,0.1757813 0.019531,0.1074218 0.019531,0.2636718 0,0.1660157 -0.019531,0.2929688 -0.00977,0.1171875 -0.048828,0.1953125 -0.029297,0.078125 -0.078125,0.1171875 -0.048828,0.029297 -0.1074219,0.029297 -0.087891,0 -0.2539062,-0.1074219 Q 5.8886719,-7.7636719 5.625,-7.8808594 5.3613281,-8.0078125 5,-8.1152344 4.6484375,-8.2226563 4.1894531,-8.2226563 q -0.4101562,0 -0.7226562,0.097656 -0.3125,0.087891 -0.5175781,0.2636719 -0.1953125,0.1660156 -0.3027344,0.4003906 -0.097656,0.234375 -0.097656,0.5078125 0,0.3808594 0.1953125,0.6445313 0.1953125,0.2539062 0.5078125,0.4492187 0.3125,0.1953125 0.7128907,0.3515625 0.4003906,0.15625 0.8105468,0.3222656 0.4199219,0.1660157 0.8203125,0.3710938 0.4101563,0.2050781 0.7226563,0.4980469 0.3125,0.2929687 0.4980468,0.7031249 0.1953125,0.4101563 0.1953125,0.9765625 z"
|
||||
id="path22293" />
|
||||
<path
|
||||
d="m 17.490234,-4.7363281 q 0,1.0742187 -0.283203,1.9824218 -0.283203,0.8984376 -0.849609,1.5527344 -0.556641,0.6542969 -1.40625,1.02539065 -0.839844,0.36132812 -1.953125,0.36132812 -1.083984,0 -1.894531,-0.32226562 Q 10.302734,-0.45898438 9.765625,-1.0742187 9.2285156,-1.6894531 8.9648438,-2.5683594 8.7011719,-3.4472656 8.7011719,-4.5605469 q 0,-1.0742187 0.2734375,-1.9726562 0.2832031,-0.9082031 0.8398437,-1.5625 0.5664059,-0.6542969 1.4062499,-1.015625 0.839844,-0.3613282 1.962891,-0.3613282 1.083984,0 1.884765,0.3222657 0.810547,0.3222656 1.347657,0.9375 0.537109,0.6152344 0.800781,1.4941406 0.273437,0.8789063 0.273437,1.9824219 z m -1.699218,0.1074219 q 0,-0.7128907 -0.136719,-1.3476563 Q 15.527344,-6.6113281 15.224609,-7.0898437 14.921875,-7.5683594 14.404297,-7.8417969 13.886719,-8.125 13.115234,-8.125 q -0.71289,0 -1.230468,0.2539063 -0.507813,0.2539062 -0.839844,0.7226562 -0.332031,0.4589844 -0.498047,1.09375 -0.15625,0.6347656 -0.15625,1.3867188 0,0.7226562 0.126953,1.3574218 0.136719,0.6347656 0.439453,1.1132813 0.3125,0.46875 0.830078,0.7519531 0.517579,0.2734375 1.289063,0.2734375 0.703125,0 1.220703,-0.2539062 0.517578,-0.2539063 0.849609,-0.7128907 0.332032,-0.4589844 0.488282,-1.09375 0.15625,-0.6347656 0.15625,-1.3964843 z"
|
||||
id="path22295" />
|
||||
<path
|
||||
d="m 21.904297,-7.9492187 v 7.70507807 q 0,0.078125 -0.03906,0.13671876 -0.03906,0.04882812 -0.136718,0.08789062 -0.08789,0.0390625 -0.244141,0.05859375 -0.15625,0.01953125 -0.390625,0.01953125 -0.234375,0 -0.390625,-0.01953125 -0.15625,-0.01953125 -0.253906,-0.05859375 -0.08789,-0.0390625 -0.126953,-0.08789062 -0.03906,-0.0585938 -0.03906,-0.13671876 V -7.9492187 h -1.240234 q -0.15625,0 -0.234375,-0.1562501 -0.07813,-0.15625 -0.07813,-0.5078125 0,-0.1855468 0.01953,-0.3125 0.01953,-0.1269531 0.05859,-0.2050781 0.03906,-0.087891 0.09766,-0.1171875 0.05859,-0.039063 0.136719,-0.039063 h 1.240234 v -0.9082036 q 0,-0.917968 0.166016,-1.572265 0.175781,-0.664063 0.517578,-1.083985 0.351562,-0.419921 0.878906,-0.615234 0.537109,-0.205078 1.259766,-0.205078 0.361328,0 0.703125,0.07813 0.341797,0.06836 0.488281,0.136719 0.146484,0.05859 0.205078,0.117187 0.05859,0.05859 0.08789,0.15625 0.03906,0.08789 0.04883,0.22461 0.01953,0.126953 0.01953,0.3125 0,0.185546 -0.01953,0.302734 -0.0098,0.107422 -0.03906,0.175781 -0.0293,0.06836 -0.06836,0.09766 -0.03906,0.01953 -0.08789,0.01953 -0.07813,0 -0.175781,-0.03906 -0.09766,-0.04883 -0.244141,-0.09766 -0.146484,-0.05859 -0.351562,-0.09766 -0.195313,-0.04883 -0.458984,-0.04883 -0.361329,0 -0.615235,0.117187 -0.24414,0.117188 -0.40039,0.371094 -0.15625,0.24414 -0.234375,0.644531 -0.06836,0.390625 -0.06836,0.957031 v 0.9570316 h 3.964844 v -2.0898436 q 0,-0.06836 0.03906,-0.126953 0.03906,-0.05859 0.126954,-0.09766 0.08789,-0.04883 0.24414,-0.06836 0.15625,-0.01953 0.400391,-0.01953 0.234375,0 0.390625,0.01953 0.15625,0.01953 0.24414,0.06836 0.08789,0.03906 0.126954,0.09766 0.03906,0.05859 0.03906,0.126953 v 2.0898436 h 2.246094 q 0.07813,0 0.136719,0.039063 0.06836,0.029297 0.107421,0.1171875 0.03906,0.078125 0.05859,0.2050781 0.0293,0.1269532 0.0293,0.3125 0,0.3515625 -0.08789,0.5078125 -0.08789,0.1562501 -0.234375,0.1562501 h -2.255859 v 4.9023437 q 0,0.9082031 0.263672,1.3769531 0.263672,0.4589844 0.966797,0.4589844 0.214844,0 0.390625,-0.039063 0.185547,-0.048828 0.322265,-0.097656 0.136719,-0.048828 0.234375,-0.087891 0.107422,-0.048828 0.175782,-0.048828 0.05859,0 0.09766,0.029297 0.03906,0.019531 0.05859,0.087891 0.0293,0.068359 0.04883,0.1855469 0.01953,0.1171875 0.01953,0.29296872 0,0.28320313 -0.04883,0.44921875 -0.03906,0.16601563 -0.117188,0.24414063 -0.07813,0.078125 -0.224609,0.14648438 -0.146484,0.06835937 -0.351562,0.10742187 -0.195313,0.04882812 -0.429688,0.078125 -0.224609,0.0292969 -0.439453,0.0292969 -0.693359,0 -1.181641,-0.17578125 Q 26.767578,-0.1953125 26.455078,-0.55664062 26.142578,-0.92773438 25.996094,-1.484375 25.859375,-2.0507813 25.859375,-2.8125 v -5.1367187 z"
|
||||
id="path22297" />
|
||||
<path
|
||||
d="m 45.214844,-0.24414063 q 0,0.078125 -0.03906,0.13671876 -0.03906,0.04882812 -0.126953,0.08789062 -0.08789,0.0390625 -0.244141,0.05859375 -0.15625,0.01953125 -0.390625,0.01953125 -0.24414,0 -0.40039,-0.01953125 -0.15625,-0.01953125 -0.253906,-0.05859375 -0.08789,-0.0390625 -0.126954,-0.08789062 -0.03906,-0.0585938 -0.03906,-0.13671876 V -5.5859375 q 0,-0.5566406 -0.09766,-1.015625 -0.09766,-0.4589844 -0.3125,-0.7910156 -0.214844,-0.3320313 -0.546875,-0.5078125 -0.332032,-0.1757813 -0.78125,-0.1757813 -0.556641,0 -1.123047,0.4296875 -0.556641,0.4296875 -1.230469,1.2597657 v 6.14257807 q 0,0.078125 -0.03906,0.13671876 -0.03906,0.04882812 -0.136719,0.08789062 -0.08789,0.0390625 -0.244141,0.05859375 -0.15625,0.01953125 -0.390625,0.01953125 -0.224609,0 -0.390625,-0.01953125 -0.15625,-0.01953125 -0.253906,-0.05859375 -0.08789,-0.0390625 -0.126953,-0.08789062 -0.0293,-0.0585938 -0.0293,-0.13671876 V -5.5859375 q 0,-0.5566406 -0.107422,-1.015625 -0.107422,-0.4589844 -0.322266,-0.7910156 -0.214843,-0.3320313 -0.546875,-0.5078125 -0.322265,-0.1757813 -0.771484,-0.1757813 -0.556641,0 -1.123047,0.4296875 -0.566406,0.4296875 -1.230469,1.2597657 v 6.14257807 q 0,0.078125 -0.03906,0.13671876 -0.03906,0.04882812 -0.126953,0.08789062 -0.08789,0.0390625 -0.244141,0.05859375 -0.15625,0.01953125 -0.40039,0.01953125 -0.234375,0 -0.390625,-0.01953125 -0.15625,-0.01953125 -0.253907,-0.05859375 -0.08789,-0.0390625 -0.126953,-0.08789062 -0.0293,-0.0585938 -0.0293,-0.13671876 V -9.0332031 q 0,-0.078125 0.0293,-0.1269532 0.0293,-0.058594 0.117188,-0.097656 0.08789,-0.048828 0.224609,-0.058594 0.136719,-0.019531 0.361328,-0.019531 0.214844,0 0.351563,0.019531 0.146484,0.00977 0.224609,0.058594 0.07813,0.039063 0.107422,0.097656 0.03906,0.048828 0.03906,0.1269532 v 1.1621094 q 0.742188,-0.8300782 1.435547,-1.2109376 0.703125,-0.390625 1.416016,-0.390625 0.546875,0 0.976562,0.1269532 0.439454,0.1269531 0.771485,0.3613281 0.332031,0.2246094 0.566406,0.546875 0.234375,0.3125 0.390625,0.703125 0.439453,-0.4785156 0.830078,-0.8105469 0.400391,-0.3320312 0.761719,-0.5371094 0.371094,-0.2050781 0.712891,-0.2929687 0.351562,-0.097656 0.703125,-0.097656 0.849609,0 1.425781,0.3027344 0.576172,0.2929688 0.927734,0.7910156 0.361328,0.4980469 0.507813,1.1718751 0.15625,0.6640625 0.15625,1.40625 z"
|
||||
id="path22299" />
|
||||
<path
|
||||
d="m 54.775391,-0.234375 q 0,0.1171875 -0.07813,0.17578125 Q 54.619141,0 54.482422,0.02929688 q -0.136719,0.02929687 -0.400391,0.02929687 -0.253906,0 -0.410156,-0.02929687 Q 53.525391,0 53.457031,-0.05859375 53.388672,-0.1171875 53.388672,-0.234375 V -1.1132812 Q 52.8125,-0.49804688 52.099609,-0.15625 51.396484,0.18554687 50.605469,0.18554687 49.912109,0.18554687 49.345703,0 48.789062,-0.17578125 48.388672,-0.51757812 47.998047,-0.859375 47.773437,-1.3574219 q -0.214843,-0.4980468 -0.214843,-1.1328125 0,-0.7421875 0.302734,-1.2890625 0.302734,-0.546875 0.869141,-0.9082031 0.566406,-0.3613281 1.386718,-0.5371094 0.820313,-0.1855468 1.845704,-0.1855468 h 1.210937 V -6.09375 q 0,-0.5078125 -0.107422,-0.8984375 -0.107422,-0.390625 -0.351562,-0.6445312 -0.234375,-0.2636719 -0.615235,-0.3906251 -0.380859,-0.1367187 -0.9375,-0.1367187 -0.595703,0 -1.074218,0.1464844 -0.46875,0.1367187 -0.830079,0.3125 -0.351562,0.1660156 -0.595703,0.3125 -0.234375,0.1367187 -0.351562,0.1367187 -0.07813,0 -0.136719,-0.039063 -0.05859,-0.039063 -0.107422,-0.1171875 -0.03906,-0.078125 -0.05859,-0.1953125 -0.01953,-0.1269531 -0.01953,-0.2734375 0,-0.2441406 0.0293,-0.3808594 0.03906,-0.1464843 0.166016,-0.2734375 0.136718,-0.1269531 0.458984,-0.2929687 0.322266,-0.1757813 0.742188,-0.3125 0.419921,-0.1464844 0.917968,-0.234375 0.498047,-0.097656 1.00586,-0.097656 0.947265,0 1.611328,0.2148438 0.664062,0.2148437 1.074219,0.6347656 0.410156,0.4101563 0.595703,1.0253907 0.185547,0.6152343 0.185547,1.4355468 z m -1.601563,-4.0136719 h -1.376953 q -0.664063,0 -1.152344,0.1171875 -0.488281,0.1074219 -0.810547,0.3320313 -0.322265,0.2148437 -0.478515,0.5273437 -0.146485,0.3027344 -0.146485,0.703125 0,0.6835938 0.429688,1.09375 0.439453,0.4003907 1.220703,0.4003907 0.634766,0 1.171875,-0.3222657 0.546875,-0.3222656 1.142578,-0.9863281 z"
|
||||
id="path22301" />
|
||||
<path
|
||||
d="m 64.208984,-0.4296875 q 0.07813,0.13671875 0.07813,0.234375 0,0.0878906 -0.09766,0.14648438 -0.08789,0.05859375 -0.283203,0.078125 -0.185547,0.02929687 -0.488281,0.02929687 -0.292969,0 -0.46875,-0.01953125 Q 62.783203,0.02929688 62.675781,0 q -0.09766,-0.0390625 -0.15625,-0.08789062 -0.04883,-0.05859375 -0.08789,-0.12695313 l -2.109375,-3.48632815 -2.09961,3.48632815 Q 58.183594,-0.14648437 58.125,-0.08789062 58.076172,-0.0390625 57.96875,0 q -0.09766,0.02929688 -0.263672,0.0390625 -0.166016,0.01953125 -0.439453,0.01953125 -0.283203,0 -0.46875,-0.02929687 -0.175781,-0.01953125 -0.263672,-0.078125 -0.07813,-0.05859375 -0.07813,-0.14648438 0.0098,-0.0976563 0.09766,-0.234375 l 2.734375,-4.2871094 -2.58789,-4.1308594 q -0.07813,-0.1367187 -0.08789,-0.2246093 0,-0.097656 0.08789,-0.15625 0.09766,-0.068359 0.283203,-0.087891 0.195312,-0.019531 0.507812,-0.019531 0.283203,0 0.449219,0.019531 0.175781,0.00977 0.273438,0.039063 0.09766,0.029297 0.146484,0.078125 0.04883,0.048828 0.08789,0.1171875 l 2.001953,3.2714844 2.03125,-3.2714844 q 0.03906,-0.058594 0.08789,-0.1074218 0.04883,-0.048828 0.126953,-0.078125 0.08789,-0.039063 0.234375,-0.048828 0.15625,-0.019531 0.419922,-0.019531 0.283203,0 0.458985,0.019531 0.185547,0.019531 0.273437,0.078125 0.08789,0.048828 0.07813,0.1464844 -0.0098,0.097656 -0.09766,0.2441406 l -2.568359,4.0820313 z"
|
||||
id="path22303" />
|
||||
<path
|
||||
d="m 74.716797,2.9101563 q 0,0.3417968 -0.07813,0.4980468 -0.07813,0.15625 -0.244141,0.15625 h -9.404297 q -0.166015,0 -0.253906,-0.1464843 -0.08789,-0.1464844 -0.08789,-0.4882813 0,-0.3320312 0.08789,-0.4980469 0.08789,-0.1660156 0.253906,-0.1660156 h 9.404297 q 0.15625,0 0.234375,0.15625 0.08789,0.15625 0.08789,0.4882813 z"
|
||||
id="path22305" />
|
||||
<path
|
||||
d="m 78.251953,-1.0644531 q 0,0.69335935 -0.234375,0.92773435 -0.224609,0.22460937 -0.830078,0.22460937 -0.585938,0 -0.810547,-0.22460937 -0.224609,-0.22460938 -0.224609,-0.89843745 0,-0.6933594 0.224609,-0.9179688 0.234375,-0.234375 0.839844,-0.234375 0.585937,0 0.810547,0.2246094 0.224609,0.2246094 0.224609,0.8984375 z"
|
||||
id="path22307" />
|
||||
<path
|
||||
d="m 83.300781,-1.0644531 q 0,0.69335935 -0.234375,0.92773435 -0.224609,0.22460937 -0.830078,0.22460937 -0.585937,0 -0.810547,-0.22460937 -0.224609,-0.22460938 -0.224609,-0.89843745 0,-0.6933594 0.224609,-0.9179688 0.234375,-0.234375 0.839844,-0.234375 0.585937,0 0.810547,0.2246094 0.224609,0.2246094 0.224609,0.8984375 z"
|
||||
id="path22309" />
|
||||
<path
|
||||
d="m 88.349609,-1.0644531 q 0,0.69335935 -0.234375,0.92773435 -0.224609,0.22460937 -0.830078,0.22460937 -0.585937,0 -0.810547,-0.22460937 Q 86.25,-0.36132813 86.25,-1.0351562 q 0,-0.6933594 0.224609,-0.9179688 0.234375,-0.234375 0.839844,-0.234375 0.585938,0 0.810547,0.2246094 0.224609,0.2246094 0.224609,0.8984375 z"
|
||||
id="path22311" />
|
||||
</g>
|
||||
<path
|
||||
d="M645 109.148 645 76.6667 647 76.6667 647 109.148ZM642 78 646 70 650 78Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21825" />
|
||||
<g
|
||||
aria-label="BiasAdd"
|
||||
transform="translate(609.945 101)"
|
||||
id="text21827"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 1.3,0 v -14 h 6.06 q 2.34,0 3.62,0.96 1.28,0.96 1.28,2.72 0,1.12 -0.58,1.94 -0.58,0.8 -1.74,1.24 1.3,0.34 1.94,1.18 0.66,0.82 0.66,2.12 0,1.72 -1.34,2.78 Q 9.86,0 7.66,0 Z m 9.24,-10.24 q 0,-1.1 -0.82,-1.66 -0.82,-0.58 -2.4,-0.58 H 3.04 v 4.66 h 4.54 q 1.3,0 2.12,-0.68 0.84,-0.68 0.84,-1.74 z m 0.28,6.38 q 0,-1.12 -0.76,-1.78 Q 9.3,-6.3 8,-6.3 H 3.04 v 4.78 h 4.74 q 1.38,0 2.2,-0.64 0.84,-0.64 0.84,-1.7 z"
|
||||
id="path22314" />
|
||||
<path
|
||||
d="m 14.420002,-12.9 h 1.8 v 1.78 h -1.8 z m 0.08,12.9 v -9.66 h 1.64 V 0 Z"
|
||||
id="path22316" />
|
||||
<path
|
||||
d="m 26.73999,0 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22318" />
|
||||
<path
|
||||
d="m 36.519977,-2.82 q 0,1.36 -1.16,2.22 -1.16,0.86 -3.02,0.86 -1.22,0 -2.3,-0.38 -1.06,-0.4 -1.92,-1.18 l 0.84,-1.24 q 0.92,0.76 1.72,1.1 0.8,0.34 1.7,0.34 1.16,0 1.86,-0.44 0.72,-0.46 0.72,-1.18 0,-0.74 -0.6,-1.06 -0.6,-0.34 -1.98,-0.36 -2.02,-0.08 -2.96,-0.74 -0.92,-0.66 -0.92,-2.06 0,-1.28 1.06,-2.12 1.08,-0.86 2.76,-0.86 1.14,0 2.14,0.36 1,0.36 1.82,1.06 l -0.78,1.22 q -0.82,-0.66 -1.58,-0.96 -0.76,-0.3 -1.62,-0.3 -0.98,0 -1.62,0.44 -0.62,0.44 -0.62,1.08 0,0.7 0.56,1.02 0.56,0.32 1.78,0.38 2.18,0.06 3.14,0.72 0.98,0.66 0.98,2.08 z"
|
||||
id="path22320" />
|
||||
<path
|
||||
d="m 40.999991,-4.22 -1.84,4.22 h -1.8 l 6.22,-14 h 1.68 l 6.2,14 h -1.82 l -1.86,-4.22 z m 3.38,-7.72 -2.7,6.2 h 5.44 z"
|
||||
id="path22322" />
|
||||
<path
|
||||
d="m 60.38001,-14 h 1.64 V 0 h -1.62 v -1.56 q -0.66,0.84 -1.6,1.3 -0.92,0.46 -1.98,0.46 -2.02,0 -3.4,-1.4 -1.36,-1.42 -1.36,-3.6 0,-2.2 1.4,-3.64 1.42,-1.46 3.48,-1.46 1,0 1.88,0.42 0.9,0.4 1.56,1.16 z m -3.38,12.78 q 1.46,0 2.44,-1.04 1,-1.06 1,-2.6 0,-1.56 -0.96,-2.58 -0.96,-1.02 -2.4,-1.02 -1.48,0 -2.42,1 -0.94,1 -0.94,2.62 0,1.62 0.92,2.62 0.92,1 2.36,1 z"
|
||||
id="path22324" />
|
||||
<path
|
||||
d="m 71.999998,-14 h 1.64 V 0 h -1.62 v -1.56 q -0.66,0.84 -1.6,1.3 -0.92,0.46 -1.98,0.46 -2.02,0 -3.4,-1.4 -1.36,-1.42 -1.36,-3.6 0,-2.2 1.4,-3.64 1.42,-1.46 3.48,-1.46 1,0 1.88,0.42 0.9,0.4 1.56,1.16 z m -3.38,12.78 q 1.46,0 2.44,-1.04 1,-1.06 1,-2.6 0,-1.56 -0.96,-2.58 -0.96,-1.02 -2.4,-1.02 -1.48,0 -2.42,1 -0.94,1 -0.94,2.62 0,1.62 0.92,2.62 0.92,1 2.36,1 z"
|
||||
id="path22326" />
|
||||
</g>
|
||||
<path
|
||||
d="M503.5 198.5C503.5 189.111 519.394 181.5 539 181.5 558.606 181.5 574.5 189.111 574.5 198.5 574.5 207.889 558.606 215.5 539 215.5 519.394 215.5 503.5 207.889 503.5 198.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21829" />
|
||||
<path
|
||||
d="M538.003 181.75 538.447 176.43 539.666 171.104 541.65 165.921 544.328 160.82 547.635 155.922 551.507 151.226 555.882 146.787 560.823 142.606 566.137 138.81 571.764 135.396 577.774 132.359 584.033 129.83 590.484 127.743 597.139 126.222 603.91 125.378 604.157 127.363 597.436 128.201 597.535 128.183 590.973 129.683 591.058 129.66 584.683 131.722 584.75 131.698 578.562 134.198 578.638 134.163 572.701 137.163 572.769 137.126 567.206 140.501 567.269 140.46 562.019 144.21 562.083 144.159 557.208 148.284 557.275 148.223 552.962 152.598 553.022 152.532 549.209 157.157 549.266 157.081 546.016 161.893 546.073 161.798 543.448 166.798 543.496 166.691 541.559 171.753 541.6 171.619 540.412 176.807 540.434 176.666 539.997 181.916ZM602.456 122.46 610.688 125.958 602.95 130.445Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21831" />
|
||||
<g
|
||||
aria-label="MatMul"
|
||||
transform="translate(505.6 171)"
|
||||
id="text21833"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 13.04,-14 h 2.28 V 0 H 13.6 v -11.8 l -4.58,8 h -1.4 l -4.6,-7.98 V 0 H 1.3 v -14 h 2.28 l 4.74,8.18 z"
|
||||
id="path22329" />
|
||||
<path
|
||||
d="m 25.979987,0 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22331" />
|
||||
<path
|
||||
d="m 28.419992,-2.58 v -5.64 h -1.32 v -1.44 h 1.32 v -2.96 h 1.62 v 2.96 h 2.24 v 1.44 h -2.24 v 5.5 q 0,0.64 0.28,0.96 0.3,0.3 0.9,0.3 h 1.06 V 0 h -1.3 q -1.34,0 -1.96,-0.62 -0.6,-0.62 -0.6,-1.96 z"
|
||||
id="path22333" />
|
||||
<path
|
||||
d="m 45.899993,-14 h 2.28 V 0 h -1.72 v -11.8 l -4.58,8 h -1.4 l -4.6,-7.98 V 0 h -1.72 v -14 h 2.28 l 4.74,8.18 z"
|
||||
id="path22335" />
|
||||
<path
|
||||
d="m 59.57998,0 h -1.62 v -1.48 q -0.58,0.84 -1.46,1.28 -0.88,0.42 -2,0.42 -1.9,0 -3,-1.24 -1.08,-1.24 -1.08,-3.38 v -5.26 h 1.64 v 5.16 q 0,1.52 0.74,2.4 0.76,0.88 2.1,0.88 1.36,0 2.2,-0.94 0.84,-0.94 0.84,-2.44 v -5.06 h 1.64 z"
|
||||
id="path22337" />
|
||||
<path
|
||||
d="m 61.859965,-14 h 1.64 V 0 h -1.64 z"
|
||||
id="path22339" />
|
||||
</g>
|
||||
<path
|
||||
d="M754.5 282C754.5 272.887 770.618 265.5 790.5 265.5 810.382 265.5 826.5 272.887 826.5 282 826.5 291.113 810.382 298.5 790.5 298.5 770.618 298.5 754.5 291.113 754.5 282Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21835" />
|
||||
<path
|
||||
d="M317.5 272C317.5 262.887 333.618 255.5 353.5 255.5 373.382 255.5 389.5 262.887 389.5 272 389.5 281.113 373.382 288.5 353.5 288.5 333.618 288.5 317.5 281.113 317.5 272Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21837" />
|
||||
<path
|
||||
d="M352.002 254.606 352.196 251.826 352.924 249.048 354.032 246.309 355.523 243.588 357.394 240.878 359.653 238.231 365.239 233.09 372.077 228.088 380.091 223.418 389.275 218.952 399.329 214.742 410.319 210.911 422.053 207.46 434.472 204.449 447.388 201.879 460.742 199.81 474.472 198.242 488.393 197.302 495.75 197.138 495.795 199.137 488.46 199.301 488.505 199.299 474.63 200.236 474.676 200.232 460.988 201.795 461.028 201.789 447.716 203.852 447.758 203.844 434.883 206.407 434.923 206.398 422.548 209.398 422.595 209.385 410.907 212.823 410.954 212.808 400.017 216.62 400.074 216.598 390.074 220.786 390.125 220.763 381 225.2 381.066 225.165 373.128 229.79 373.215 229.733 366.465 234.671 366.552 234.599 361.052 239.662 361.135 239.575 358.948 242.138 359.01 242.057 357.198 244.682 357.252 244.594 355.815 247.219 355.864 247.114 354.802 249.739 354.842 249.617 354.155 252.242 354.185 252.058 353.998 254.746ZM494.35 194.168 502.437 197.989 494.529 202.166Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21839" />
|
||||
<path
|
||||
d="M0.0738717-0.997268 3.53213-0.7411 6.99691 0.108751 10.4925 1.5329 13.9123 3.40408 17.2438 5.83862 20.5527 8.63848 23.8578 11.9436 27.0901 15.6829 30.2528 19.8577 33.2796 24.3979 36.3 29.3061 39.2617 34.5992 44.9138 46.2174 50.1237 59.0225 54.9526 72.9449 59.3411 87.8657 63.1643 103.597 66.4229 120.078 68.9917 137.057 70.9338 154.473 72.1242 172.142 72.3985 183.311 70.3991 183.36 70.1253 172.212 70.1273 172.255 68.9398 154.63 68.9437 154.673 67.0062 137.298 67.0113 137.337 64.4488 120.4 64.4565 120.444 61.2065 104.006 61.2158 104.049 57.4033 88.3612 57.4156 88.4072 53.0406 73.5322 53.0552 73.5777 48.2427 59.7027 48.2612 59.7519 43.0737 47.0019 43.1008 47.0625 37.4758 35.5 37.5023 35.5508 34.5648 30.3008 34.5858 30.3366 31.5858 25.4616 31.6054 25.4922 28.6054 20.9922 28.6404 21.0414 25.5154 16.9164 25.556 16.9665 22.3685 13.279 22.4179 13.3321 19.1679 10.0821 19.2291 10.1384 15.9791 7.38839 16.035 7.43239 12.785 5.05739 12.895 5.12726 9.58249 3.31476 9.6852 3.36359 6.3102 1.98859 6.44928 2.03371 3.13678 1.22121 3.30113 1.24727-0.0738717 0.997268ZM75.3649 181.904 71.5625 190 67.3673 182.101Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(-1.83697e-16 -1 -1 1.83697e-16 763.972 269.555)"
|
||||
id="path21841" />
|
||||
<g
|
||||
aria-label="feature_s..."
|
||||
transform="translate(311.403 245)"
|
||||
id="text21843"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 1.7,-8.22 H 0.38 V -9.66 H 1.7 v -1.82 q 0,-1.22 0.64,-1.86 Q 3,-14 4.22,-14 h 1.32 v 1.36 H 4.46 q -0.56,0 -0.84,0.3 -0.28,0.3 -0.28,0.9 v 1.78 h 2.2 v 1.44 H 3.34 V 0 H 1.7 Z"
|
||||
id="path22342" />
|
||||
<path
|
||||
d="m 11.279998,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.1800004,0 -3.5800004,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5000004,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 H 7.8599976 q 0.16,1.42 1.1,2.3 0.96,0.86 2.3200004,0.86 z m -0.16,-7.44 q -1.2600004,0 -2.1600004,0.82 -0.9,0.82 -1.1,2.2 h 6.3600004 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22344" />
|
||||
<path
|
||||
d="m 25.80001,0 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22346" />
|
||||
<path
|
||||
d="m 28.240015,-2.58 v -5.64 h -1.32 v -1.44 h 1.32 v -2.96 h 1.62 v 2.96 h 2.24 v 1.44 h -2.24 v 5.5 q 0,0.64 0.28,0.96 0.3,0.3 0.9,0.3 h 1.06 V 0 h -1.3 q -1.34,0 -1.96,-0.62 -0.6,-0.62 -0.6,-1.96 z"
|
||||
id="path22348" />
|
||||
<path
|
||||
d="m 42.780016,0 h -1.62 v -1.48 q -0.58,0.84 -1.46,1.28 -0.88,0.42 -2,0.42 -1.9,0 -3,-1.24 -1.08,-1.24 -1.08,-3.38 v -5.26 h 1.64 v 5.16 q 0,1.52 0.74,2.4 0.76,0.88 2.1,0.88 1.36,0 2.2,-0.94 0.84,-0.94 0.84,-2.44 v -5.06 h 1.64 z"
|
||||
id="path22350" />
|
||||
<path
|
||||
d="m 44.88,-9.66 h 1.6 v 1.22 q 0.44,-0.64 1.14,-0.96 0.7,-0.32 1.66,-0.32 h 0.88 v 1.54 h -0.9 q -1.38,0 -2.08,0.72 -0.68,0.72 -0.68,2.18 V 0 h -1.62 z"
|
||||
id="path22352" />
|
||||
<path
|
||||
d="m 55.879988,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22354" />
|
||||
<path
|
||||
d="M 61.000001,2.58 V 1.12 h 10.16 v 1.46 z"
|
||||
id="path22356" />
|
||||
<path
|
||||
d="m 79.959995,-2.82 q 0,1.36 -1.16,2.22 -1.16,0.86 -3.02,0.86 -1.22,0 -2.3,-0.38 -1.06,-0.4 -1.92,-1.18 l 0.84,-1.24 q 0.92,0.76 1.72,1.1 0.8,0.34 1.7,0.34 1.16,0 1.86,-0.44 0.72,-0.46 0.72,-1.18 0,-0.74 -0.6,-1.06 -0.6,-0.34 -1.98,-0.36 -2.02,-0.08 -2.96,-0.74 -0.92,-0.66 -0.92,-2.06 0,-1.28 1.06,-2.12 1.08,-0.86 2.76,-0.86 1.14,0 2.14,0.36 1,0.36 1.82,1.06 l -0.78,1.22 q -0.82,-0.66 -1.58,-0.96 -0.76,-0.3 -1.62,-0.3 -0.98,0 -1.62,0.44 -0.62,0.44 -0.62,1.08 0,0.7 0.56,1.02 0.56,0.32 1.78,0.38 2.18,0.06 3.14,0.72 0.98,0.66 0.98,2.08 z"
|
||||
id="path22358" />
|
||||
<path
|
||||
d="m 81.500009,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22360" />
|
||||
<path
|
||||
d="m 85.440019,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22362" />
|
||||
<path
|
||||
d="m 89.380029,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22364" />
|
||||
</g>
|
||||
<path
|
||||
d="M759.5 515C759.5 505.887 775.618 498.5 795.5 498.5 815.382 498.5 831.5 505.887 831.5 515 831.5 524.113 815.382 531.5 795.5 531.5 775.618 531.5 759.5 524.113 759.5 515Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21845" />
|
||||
<path
|
||||
d="M794.19 497.589 789.174 304.691 791.173 304.639 796.189 497.537ZM786.209 306.101 790 298 794.207 305.893Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21847" />
|
||||
<path
|
||||
d="M705.5 282C705.5 278.962 707.963 276.5 711 276.5 714.038 276.5 716.5 278.962 716.5 282 716.5 285.038 714.038 287.5 711 287.5 707.963 287.5 705.5 285.038 705.5 282Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21849" />
|
||||
<path
|
||||
d="M0.00498456-0.999988 30.7448-0.846761 30.7348 1.15321-0.00498456 0.999988ZM29.4264-3.85337 37.4064 0.186457 29.3866 4.14653Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 716 281.186)"
|
||||
id="path21851" />
|
||||
<g
|
||||
aria-label="shape"
|
||||
transform="translate(652.441 285)"
|
||||
id="text21853"
|
||||
style="font-size:18px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 7.956,-2.538 q 0,1.224 -1.044,1.998 -1.044,0.774 -2.718,0.774 -1.098,0 -2.07,-0.342 Q 1.17,-0.468 0.396,-1.17 l 0.756,-1.116 q 0.828,0.684 1.548,0.99 0.72,0.306 1.53,0.306 1.044,0 1.674,-0.396 0.648,-0.414 0.648,-1.062 0,-0.666 -0.54,-0.954 Q 5.472,-3.708 4.23,-3.726 2.412,-3.798 1.566,-4.392 0.738,-4.986 0.738,-6.246 q 0,-1.152 0.954,-1.908 0.972,-0.774 2.484,-0.774 1.026,0 1.926,0.324 0.9,0.324 1.638,0.954 L 7.038,-6.552 Q 6.3,-7.146 5.616,-7.416 q -0.684,-0.27 -1.458,-0.27 -0.882,0 -1.458,0.396 -0.558,0.396 -0.558,0.972 0,0.63 0.504,0.918 0.504,0.288 1.602,0.342 1.962,0.054 2.826,0.648 0.882,0.594 0.882,1.872 z"
|
||||
id="path22367" />
|
||||
<path
|
||||
d="m 9.4320121,-12.6 h 1.4579999 v 5.22 q 0.522,-0.738 1.296,-1.134 0.792,-0.396 1.764,-0.396 1.746,0 2.718,1.116 0.99,1.116 0.99,3.06 V 0 h -1.458 v -4.644 q 0,-1.386 -0.684,-2.16 -0.684,-0.792 -1.908,-0.792 -1.224,0 -1.98,0.846 -0.738,0.846 -0.738,2.196 V 0 H 9.4320121 Z"
|
||||
id="path22369" />
|
||||
<path
|
||||
d="m 26.927998,0 h -1.422 v -1.422 q -0.522,0.756 -1.422,1.188 -0.882,0.414 -1.908,0.414 -1.44,0 -2.34,-0.72 -0.9,-0.738 -0.9,-1.926 0,-1.314 1.008,-1.98 1.008,-0.684 2.988,-0.684 h 2.556 q -0.018,-1.224 -0.648,-1.89 -0.612,-0.666 -1.764,-0.666 -0.684,0 -1.35,0.288 -0.666,0.288 -1.314,0.882 l -0.774,-0.972 q 0.756,-0.72 1.656,-1.08 0.9,-0.378 1.926,-0.378 1.746,0 2.718,1.044 0.99,1.026 0.99,2.844 z m -1.44,-3.996 h -2.538 q -1.314,0 -1.944,0.36 -0.63,0.36 -0.63,1.134 0,0.648 0.54,1.08 0.558,0.432 1.422,0.432 1.296,0 2.178,-0.828 0.9,-0.846 0.972,-2.178 z"
|
||||
id="path22371" />
|
||||
<path
|
||||
d="m 30.185987,3.24 h -1.458 V -8.694 h 1.44 v 1.404 q 0.576,-0.756 1.422,-1.17 0.846,-0.432 1.8,-0.432 1.836,0 3.06,1.278 1.224,1.26 1.224,3.24 0,1.998 -1.278,3.294 -1.26,1.278 -3.114,1.278 -0.882,0 -1.71,-0.36 -0.81,-0.378 -1.386,-1.062 z m 3.042,-10.836 q -1.314,0 -2.214,0.954 -0.882,0.936 -0.882,2.34 0,1.386 0.864,2.304 0.882,0.918 2.178,0.918 1.332,0 2.16,-0.882 0.846,-0.9 0.846,-2.376 0,-1.458 -0.828,-2.358 -0.81,-0.9 -2.124,-0.9 z"
|
||||
id="path22373" />
|
||||
<path
|
||||
d="m 43.379976,-0.972 q 0.864,0 1.584,-0.36 0.738,-0.378 1.35,-1.134 l 0.882,0.81 q -0.738,0.972 -1.728,1.458 -0.99,0.468 -2.214,0.468 -1.962,0 -3.222,-1.278 -1.242,-1.296 -1.242,-3.312 0,-1.962 1.278,-3.276 1.278,-1.314 3.15,-1.314 1.908,0 3.096,1.242 1.188,1.242 1.188,3.24 0,0.144 -0.018,0.342 -0.018,0.18 -0.036,0.27 h -7.146 q 0.144,1.278 0.99,2.07 0.864,0.774 2.088,0.774 z m -0.144,-6.696 q -1.134,0 -1.944,0.738 -0.81,0.738 -0.99,1.98 h 5.724 q -0.162,-1.26 -0.918,-1.98 -0.756,-0.738 -1.872,-0.738 z"
|
||||
id="path22375" />
|
||||
</g>
|
||||
<path
|
||||
d="M705.5 514.5C705.5 511.186 707.963 508.5 711 508.5 714.038 508.5 716.5 511.186 716.5 514.5 716.5 517.814 714.038 520.5 711 520.5 707.963 520.5 705.5 517.814 705.5 514.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21855" />
|
||||
<path
|
||||
d="M716 513 751.929 513 751.929 515 716 515ZM750.596 510 758.596 514 750.596 518Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21857" />
|
||||
<g
|
||||
aria-label="concat_dim"
|
||||
transform="translate(609.945 519)"
|
||||
id="text21859"
|
||||
style="font-size:18px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 5.058,0.27 Q 3.06,0.27 1.8,-0.99 0.558,-2.268 0.558,-4.32 q 0,-1.944 1.314,-3.276 1.314,-1.332 3.222,-1.332 1.386,0 2.358,0.648 0.99,0.63 1.494,1.836 L 7.704,-5.85 Q 7.29,-6.75 6.624,-7.2 5.976,-7.65 5.076,-7.65 q -1.278,0 -2.16,0.972 -0.864,0.954 -0.864,2.358 0,1.422 0.864,2.376 0.882,0.936 2.196,0.936 0.846,0 1.566,-0.45 0.738,-0.468 1.188,-1.296 L 9,-2.142 Q 8.478,-1.026 7.434,-0.378 6.39,0.27 5.058,0.27 Z"
|
||||
id="path22378" />
|
||||
<path
|
||||
d="m 19.115998,-4.338 q 0,1.962 -1.314,3.294 -1.314,1.314 -3.24,1.314 -1.926,0 -3.222,-1.314 -1.278,-1.314 -1.278,-3.276 0,-1.962 1.314,-3.276 1.314,-1.332 3.24,-1.332 1.926,0 3.204,1.314 1.296,1.314 1.296,3.276 z m -7.56,0.018 q 0,1.404 0.864,2.358 0.882,0.954 2.178,0.954 1.278,0 2.142,-0.954 0.882,-0.954 0.882,-2.376 0,-1.386 -0.882,-2.34 -0.882,-0.972 -2.16,-0.972 -1.278,0 -2.16,0.954 -0.864,0.954 -0.864,2.376 z"
|
||||
id="path22380" />
|
||||
<path
|
||||
d="m 20.628007,-8.694 h 1.44 v 1.35 q 0.522,-0.756 1.314,-1.152 0.81,-0.414 1.8,-0.414 1.728,0 2.7,1.116 0.972,1.116 0.972,3.06 V 0 h -1.458 v -4.644 q 0,-1.368 -0.684,-2.16 -0.666,-0.792 -1.872,-0.792 -1.242,0 -1.998,0.846 -0.756,0.846 -0.756,2.196 V 0 h -1.458 z"
|
||||
id="path22382" />
|
||||
<path
|
||||
d="m 34.757993,0.27 q -1.998,0 -3.258,-1.26 -1.242,-1.278 -1.242,-3.33 0,-1.944 1.314,-3.276 1.314,-1.332 3.222,-1.332 1.386,0 2.358,0.648 0.99,0.63 1.494,1.836 l -1.242,0.594 q -0.414,-0.9 -1.08,-1.35 -0.648,-0.45 -1.548,-0.45 -1.278,0 -2.16,0.972 -0.864,0.954 -0.864,2.358 0,1.422 0.864,2.376 0.882,0.936 2.196,0.936 0.846,0 1.566,-0.45 0.738,-0.468 1.188,-1.296 l 1.134,0.612 q -0.522,1.116 -1.566,1.764 -1.044,0.648 -2.376,0.648 z"
|
||||
id="path22384" />
|
||||
<path
|
||||
d="m 47.627991,0 h -1.422 v -1.422 q -0.522,0.756 -1.422,1.188 -0.882,0.414 -1.908,0.414 -1.44,0 -2.34,-0.72 -0.9,-0.738 -0.9,-1.926 0,-1.314 1.008,-1.98 1.008,-0.684 2.988,-0.684 h 2.556 q -0.018,-1.224 -0.648,-1.89 -0.612,-0.666 -1.764,-0.666 -0.684,0 -1.35,0.288 -0.666,0.288 -1.314,0.882 l -0.774,-0.972 q 0.756,-0.72 1.656,-1.08 0.9,-0.378 1.926,-0.378 1.746,0 2.718,1.044 0.99,1.026 0.99,2.844 z m -1.44,-3.996 h -2.538 q -1.314,0 -1.944,0.36 -0.63,0.36 -0.63,1.134 0,0.648 0.54,1.08 0.558,0.432 1.422,0.432 1.296,0 2.178,-0.828 0.9,-0.846 0.972,-2.178 z"
|
||||
id="path22386" />
|
||||
<path
|
||||
d="m 49.823995,-2.322 v -5.076 h -1.188 v -1.296 h 1.188 v -2.664 h 1.458 v 2.664 h 2.016 v 1.296 h -2.016 v 4.95 q 0,0.576 0.252,0.864 0.27,0.27 0.81,0.27 h 0.954 V 0 h -1.17 q -1.206,0 -1.764,-0.558 -0.54,-0.558 -0.54,-1.764 z"
|
||||
id="path22388" />
|
||||
<path
|
||||
d="M 53.783996,2.322 V 1.008 h 9.144 v 1.314 z"
|
||||
id="path22390" />
|
||||
<path
|
||||
d="m 70.937991,-12.6 h 1.476 V 0 h -1.458 v -1.404 q -0.594,0.756 -1.44,1.17 -0.828,0.414 -1.782,0.414 -1.818,0 -3.06,-1.26 -1.224,-1.278 -1.224,-3.24 0,-1.98 1.26,-3.276 1.278,-1.314 3.132,-1.314 0.9,0 1.692,0.378 0.81,0.36 1.404,1.044 z m -3.042,11.502 q 1.314,0 2.196,-0.936 0.9,-0.954 0.9,-2.34 0,-1.404 -0.864,-2.322 -0.864,-0.918 -2.16,-0.918 -1.332,0 -2.178,0.9 -0.846,0.9 -0.846,2.358 0,1.458 0.828,2.358 0.828,0.9 2.124,0.9 z"
|
||||
id="path22392" />
|
||||
<path
|
||||
d="m 74.393976,-11.61 h 1.62 v 1.602 h -1.62 z m 0.072,11.61 v -8.694 h 1.476 V 0 Z"
|
||||
id="path22394" />
|
||||
<path
|
||||
d="m 78.011968,-8.694 h 1.44 v 1.044 q 0.486,-0.63 1.098,-0.936 0.63,-0.324 1.404,-0.324 0.9,0 1.602,0.378 0.702,0.378 1.116,1.098 0.486,-0.72 1.224,-1.098 0.756,-0.378 1.692,-0.378 1.53,0 2.43,0.99 0.918,0.99 0.918,2.574 V 0 h -1.476 v -5.256 q 0,-1.098 -0.558,-1.71 -0.54,-0.63 -1.53,-0.63 -0.972,0 -1.584,0.684 -0.594,0.666 -0.594,1.746 V 0 h -1.458 v -5.256 q 0,-1.098 -0.558,-1.71 -0.54,-0.63 -1.53,-0.63 -0.972,0 -1.584,0.684 -0.594,0.666 -0.594,1.746 V 0 h -1.458 z"
|
||||
id="path22396" />
|
||||
</g>
|
||||
<path
|
||||
d="M268.5 422.667C268.5 409.32 279.32 398.5 292.667 398.5L412.333 398.5C425.68 398.5 436.5 409.32 436.5 422.667L436.5 519.333C436.5 532.68 425.68 543.5 412.333 543.5L292.667 543.5C279.32 543.5 268.5 532.68 268.5 519.333Z"
|
||||
stroke="#808080"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path21861" />
|
||||
<g
|
||||
aria-label="Istm"
|
||||
transform="translate(315.751 485)"
|
||||
id="text21863"
|
||||
style="font-size:40px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 2.88,0 V -28 H 6.32 V 0 Z"
|
||||
id="path22399" />
|
||||
<path
|
||||
d="m 26.879982,-5.64 q 0,2.72 -2.32,4.44 -2.32,1.72 -6.04,1.72 -2.44,0 -4.6,-0.76 -2.12,-0.8 -3.84,-2.36 l 1.68,-2.48 q 1.84,1.52 3.44,2.2 1.6,0.68 3.4,0.68 2.32,0 3.72,-0.88 1.44,-0.92 1.44,-2.36 0,-1.48 -1.2,-2.12 -1.2,-0.68 -3.96,-0.72 -4.04,-0.16 -5.92,-1.48 -1.84,-1.32 -1.84,-4.12 0,-2.56 2.12,-4.24 2.16,-1.72 5.52,-1.72 2.28,0 4.28,0.72 2,0.72 3.64,2.12 l -1.56,2.44 q -1.64,-1.32 -3.16,-1.92 -1.52,-0.6 -3.24,-0.6 -1.96,0 -3.24,0.88 -1.24,0.88 -1.24,2.16 0,1.4 1.12,2.04 1.12,0.64 3.56,0.76 4.36,0.12 6.28,1.44 1.96,1.32 1.96,4.16 z"
|
||||
id="path22401" />
|
||||
<path
|
||||
d="m 31.040042,-5.16 v -11.28 h -2.64 v -2.88 h 2.64 v -5.92 h 3.24 v 5.92 h 4.48 v 2.88 h -4.48 v 11 q 0,1.28 0.56,1.92 0.6,0.6 1.8,0.6 h 2.12 V 0 h -2.6 q -2.68,0 -3.92,-1.24 -1.2,-1.24 -1.2,-3.92 z"
|
||||
id="path22403" />
|
||||
<path
|
||||
d="m 42.040044,-19.32 h 3.2 V -17 q 1.08,-1.4 2.44,-2.08 1.4,-0.72 3.12,-0.72 2,0 3.56,0.84 1.56,0.84 2.48,2.44 1.08,-1.6 2.72,-2.44 1.68,-0.84 3.76,-0.84 3.4,0 5.4,2.2 2.04,2.2 2.04,5.72 V 0 h -3.28 v -11.68 q 0,-2.44 -1.24,-3.8 -1.2,-1.4 -3.4,-1.4 -2.16,0 -3.52,1.52 -1.32,1.48 -1.32,3.88 V 0 h -3.24 v -11.68 q 0,-2.44 -1.24,-3.8 -1.2,-1.4 -3.4,-1.4 -2.16,0 -3.52,1.52 -1.32,1.48 -1.32,3.88 V 0 h -3.24 z"
|
||||
id="path22405" />
|
||||
</g>
|
||||
<path
|
||||
d="M328.5 920.667C328.5 907.32 339.32 896.5 352.667 896.5L472.333 896.5C485.68 896.5 496.5 907.32 496.5 920.667L496.5 1017.33C496.5 1030.68 485.68 1041.5 472.333 1041.5L352.667 1041.5C339.32 1041.5 328.5 1030.68 328.5 1017.33Z"
|
||||
stroke="#808080"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path21865" />
|
||||
<g
|
||||
aria-label="char_embed..."
|
||||
transform="translate(350.917 975)"
|
||||
id="text21867"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 5.62,0.3 Q 3.4,0.3 2,-1.1 0.62,-2.52 0.62,-4.8 q 0,-2.16 1.46,-3.64 1.46,-1.48 3.58,-1.48 1.54,0 2.62,0.72 1.1,0.7 1.66,2.04 L 8.56,-6.5 Q 8.1,-7.5 7.36,-8 6.64,-8.5 5.64,-8.5 q -1.42,0 -2.4,1.08 -0.96,1.06 -0.96,2.62 0,1.58 0.96,2.64 0.98,1.04 2.44,1.04 0.94,0 1.74,-0.5 0.82,-0.52 1.32,-1.44 L 10,-2.38 Q 9.42,-1.14 8.26,-0.42 7.1,0.3 5.62,0.3 Z"
|
||||
id="path22408" />
|
||||
<path
|
||||
d="m 11.619998,-14 h 1.62 v 5.8 q 0.58,-0.82 1.44,-1.26 0.88,-0.44 1.96,-0.44 1.94,0 3.02,1.24 1.1,1.24 1.1,3.4 V 0 h -1.62 v -5.16 q 0,-1.54 -0.76,-2.4 -0.76,-0.88 -2.12,-0.88 -1.36,0 -2.2,0.94 -0.82,0.94 -0.82,2.44 V 0 h -1.62 z"
|
||||
id="path22410" />
|
||||
<path
|
||||
d="m 31.059982,0 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22412" />
|
||||
<path
|
||||
d="m 33.059969,-9.66 h 1.6 v 1.22 q 0.44,-0.64 1.14,-0.96 0.7,-0.32 1.66,-0.32 h 0.88 v 1.54 h -0.9 q -1.38,0 -2.08,0.72 -0.68,0.72 -0.68,2.18 V 0 h -1.62 z"
|
||||
id="path22414" />
|
||||
<path
|
||||
d="M 38.559968,2.58 V 1.12 h 10.16 v 1.46 z"
|
||||
id="path22416" />
|
||||
<path
|
||||
d="m 54.399962,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22418" />
|
||||
<path
|
||||
d="m 60.619975,-9.66 h 1.6 v 1.16 q 0.54,-0.7 1.22,-1.04 0.7,-0.36 1.56,-0.36 1,0 1.78,0.42 0.78,0.42 1.24,1.22 0.54,-0.8 1.36,-1.22 0.84,-0.42 1.88,-0.42 1.7,0 2.7,1.1 1.02,1.1 1.02,2.86 V 0 h -1.64 v -5.84 q 0,-1.22 -0.62,-1.9 -0.6,-0.7 -1.7,-0.7 -1.08,0 -1.76,0.76 -0.66,0.74 -0.66,1.94 V 0 h -1.62 v -5.84 q 0,-1.22 -0.62,-1.9 -0.6,-0.7 -1.7,-0.7 -1.08,0 -1.76,0.76 -0.66,0.74 -0.66,1.94 V 0 h -1.62 z"
|
||||
id="path22420" />
|
||||
<path
|
||||
d="m 78.579991,0 h -1.6 v -14 h 1.62 v 5.84 q 0.64,-0.82 1.56,-1.26 0.94,-0.46 1.98,-0.46 2.04,0 3.4,1.42 1.38,1.4 1.38,3.6 0,2.22 -1.4,3.66 -1.4,1.42 -3.46,1.42 -1.02,0 -1.94,-0.44 -0.9,-0.44 -1.54,-1.22 z m 3.4,-8.44 q -1.44,0 -2.44,1.06 -1,1.04 -1,2.6 0,1.52 0.98,2.56 0.98,1.02 2.4,1.02 1.48,0 2.4,-0.98 0.94,-1 0.94,-2.64 0,-1.62 -0.92,-2.62 -0.9,-1 -2.36,-1 z"
|
||||
id="path22422" />
|
||||
<path
|
||||
d="m 93.259978,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22424" />
|
||||
<path
|
||||
d="m 107.35999,-14 h 1.64 V 0 h -1.62 v -1.56 q -0.66,0.84 -1.6,1.3 -0.92,0.46 -1.98,0.46 -2.02,0 -3.4,-1.4 -1.359999,-1.42 -1.359999,-3.6 0,-2.2 1.399999,-3.64 1.42,-1.46 3.48,-1.46 1,0 1.88,0.42 0.9,0.4 1.56,1.16 z m -3.38,12.78 q 1.46,0 2.44,-1.04 1,-1.06 1,-2.6 0,-1.56 -0.96,-2.58 -0.96,-1.02 -2.4,-1.02 -1.48,0 -2.42,1 -0.94,1 -0.94,2.62 0,1.62 0.92,2.62 0.92,1 2.36,1 z"
|
||||
id="path22426" />
|
||||
<path
|
||||
d="m 110.99998,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22428" />
|
||||
<path
|
||||
d="m 114.93999,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22430" />
|
||||
<path
|
||||
d="m 118.88,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22432" />
|
||||
</g>
|
||||
<path
|
||||
d="M0.999939-0.0110558 2.14104 103.195 0.141159 103.217-0.999939 0.0110558ZM5.12611 101.829 1.2148 109.873-2.8734 101.917Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 352 397.873)"
|
||||
id="path21869" />
|
||||
<path
|
||||
d="M30.5001 651.5C30.5001 645.425 35.425 640.5 41.5003 640.5L204.5 640.5C210.575 640.5 215.5 645.425 215.5 651.5L215.5 695.5C215.5 701.575 210.575 706.5 204.5 706.5L41.5003 706.5C35.425 706.5 30.5001 701.575 30.5001 695.5Z"
|
||||
stroke="#708541"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#B1D272"
|
||||
fill-rule="evenodd"
|
||||
id="path21871" />
|
||||
<g
|
||||
aria-label="Variable"
|
||||
transform="translate(65.9837 684)"
|
||||
id="text21873"
|
||||
style="font-size:31px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 10.013,0 -9.61,-21.7 h 2.914 l 8.091,18.29 7.998,-18.29 h 2.852 L 12.648,0 Z"
|
||||
id="path22435" />
|
||||
<path
|
||||
d="m 35.618992,0 h -2.449 v -2.449 q -0.899,1.302 -2.449,2.046 -1.519,0.713 -3.286,0.713 -2.48,0 -4.03,-1.24 -1.55,-1.271 -1.55,-3.317 0,-2.263 1.736,-3.41 1.736,-1.178 5.146,-1.178 h 4.402 q -0.031,-2.108 -1.116,-3.255 -1.054,-1.147 -3.038,-1.147 -1.178,0 -2.325,0.496 -1.147,0.496 -2.263,1.519 l -1.333,-1.674 q 1.302,-1.24 2.852,-1.86 1.55,-0.651 3.317,-0.651 3.007,0 4.681,1.798 1.705,1.767 1.705,4.898 z m -2.48,-6.882 h -4.371 q -2.263,0 -3.348,0.62 -1.085,0.62 -1.085,1.953 0,1.116 0.93,1.86 0.961,0.744 2.449,0.744 2.232,0 3.751,-1.426 1.55,-1.457 1.674,-3.751 z"
|
||||
id="path22437" />
|
||||
<path
|
||||
d="m 38.718974,-14.973 h 2.48 v 1.891 q 0.682,-0.992 1.767,-1.488 1.085,-0.496 2.573,-0.496 h 1.364 v 2.387 h -1.395 q -2.139,0 -3.224,1.116 -1.054,1.116 -1.054,3.379 V 0 h -2.511 z"
|
||||
id="path22439" />
|
||||
<path
|
||||
d="m 49.103969,-19.995 h 2.79 v 2.759 h -2.79 z M 49.227969,0 v -14.973 h 2.542 V 0 Z"
|
||||
id="path22441" />
|
||||
<path
|
||||
d="m 68.199952,0 h -2.449 v -2.449 q -0.899,1.302 -2.449,2.046 -1.519,0.713 -3.286,0.713 -2.48,0 -4.03,-1.24 -1.55,-1.271 -1.55,-3.317 0,-2.263 1.736,-3.41 1.736,-1.178 5.146,-1.178 h 4.402 q -0.031,-2.108 -1.116,-3.255 -1.054,-1.147 -3.038,-1.147 -1.178,0 -2.325,0.496 -1.147,0.496 -2.263,1.519 l -1.333,-1.674 q 1.302,-1.24 2.852,-1.86 1.55,-0.651 3.317,-0.651 3.007,0 4.681,1.798 1.705,1.767 1.705,4.898 z m -2.48,-6.882 h -4.371 q -2.263,0 -3.348,0.62 -1.085,0.62 -1.085,1.953 0,1.116 0.93,1.86 0.961,0.744 2.449,0.744 2.232,0 3.751,-1.426 1.55,-1.457 1.674,-3.751 z"
|
||||
id="path22443" />
|
||||
<path
|
||||
d="m 73.779937,0 h -2.48 v -21.7 h 2.511 v 9.052 q 0.992,-1.271 2.418,-1.953 1.457,-0.713 3.069,-0.713 3.162,0 5.27,2.201 2.139,2.17 2.139,5.58 0,3.441 -2.17,5.673 -2.17,2.201 -5.363,2.201 -1.581,0 -3.007,-0.682 -1.395,-0.682 -2.387,-1.891 z m 5.27,-13.082 q -2.232,0 -3.782,1.643 -1.55,1.612 -1.55,4.03 0,2.356 1.519,3.968 1.519,1.581 3.72,1.581 2.294,0 3.72,-1.519 1.457,-1.55 1.457,-4.092 0,-2.511 -1.426,-4.061 -1.395,-1.55 -3.658,-1.55 z"
|
||||
id="path22445" />
|
||||
<path
|
||||
d="m 89.589915,-21.7 h 2.542 V 0 h -2.542 z"
|
||||
id="path22447" />
|
||||
<path
|
||||
d="m 102.91989,-1.674 q 1.488,0 2.728,-0.62 1.271,-0.651 2.325,-1.953 l 1.519,1.395 q -1.271,1.674 -2.976,2.511 -1.705,0.806 -3.813,0.806 -3.378996,0 -5.548996,-2.201 -2.139,-2.232 -2.139,-5.704 0,-3.379 2.201,-5.642 2.201,-2.263 5.424996,-2.263 3.286,0 5.332,2.139 2.046,2.139 2.046,5.58 0,0.248 -0.031,0.589 -0.031,0.31 -0.062,0.465 H 97.618894 q 0.248,2.201 1.705,3.565 1.487996,1.333 3.595996,1.333 z m -0.248,-11.532 q -1.953,0 -3.347996,1.271 -1.395,1.271 -1.705,3.41 h 9.857996 q -0.279,-2.17 -1.581,-3.41 -1.302,-1.271 -3.224,-1.271 z"
|
||||
id="path22449" />
|
||||
</g>
|
||||
<path
|
||||
d="M490.5 651.5C490.5 645.425 495.425 640.5 501.5 640.5L665.5 640.5C671.575 640.5 676.5 645.425 676.5 651.5L676.5 695.5C676.5 701.575 671.575 706.5 665.5 706.5L501.5 706.5C495.425 706.5 490.5 701.575 490.5 695.5Z"
|
||||
stroke="#708541"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#B1D272"
|
||||
fill-rule="evenodd"
|
||||
id="path21875" />
|
||||
<g
|
||||
aria-label="Variable_1"
|
||||
transform="translate(514.455 684)"
|
||||
id="text21877"
|
||||
style="font-size:31px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 10.013,0 -9.61,-21.7 h 2.914 l 8.091,18.29 7.998,-18.29 h 2.852 L 12.648,0 Z"
|
||||
id="path22452" />
|
||||
<path
|
||||
d="m 35.618992,0 h -2.449 v -2.449 q -0.899,1.302 -2.449,2.046 -1.519,0.713 -3.286,0.713 -2.48,0 -4.03,-1.24 -1.55,-1.271 -1.55,-3.317 0,-2.263 1.736,-3.41 1.736,-1.178 5.146,-1.178 h 4.402 q -0.031,-2.108 -1.116,-3.255 -1.054,-1.147 -3.038,-1.147 -1.178,0 -2.325,0.496 -1.147,0.496 -2.263,1.519 l -1.333,-1.674 q 1.302,-1.24 2.852,-1.86 1.55,-0.651 3.317,-0.651 3.007,0 4.681,1.798 1.705,1.767 1.705,4.898 z m -2.48,-6.882 h -4.371 q -2.263,0 -3.348,0.62 -1.085,0.62 -1.085,1.953 0,1.116 0.93,1.86 0.961,0.744 2.449,0.744 2.232,0 3.751,-1.426 1.55,-1.457 1.674,-3.751 z"
|
||||
id="path22454" />
|
||||
<path
|
||||
d="m 38.718974,-14.973 h 2.48 v 1.891 q 0.682,-0.992 1.767,-1.488 1.085,-0.496 2.573,-0.496 h 1.364 v 2.387 h -1.395 q -2.139,0 -3.224,1.116 -1.054,1.116 -1.054,3.379 V 0 h -2.511 z"
|
||||
id="path22456" />
|
||||
<path
|
||||
d="m 49.103969,-19.995 h 2.79 v 2.759 h -2.79 z M 49.227969,0 v -14.973 h 2.542 V 0 Z"
|
||||
id="path22458" />
|
||||
<path
|
||||
d="m 68.199952,0 h -2.449 v -2.449 q -0.899,1.302 -2.449,2.046 -1.519,0.713 -3.286,0.713 -2.48,0 -4.03,-1.24 -1.55,-1.271 -1.55,-3.317 0,-2.263 1.736,-3.41 1.736,-1.178 5.146,-1.178 h 4.402 q -0.031,-2.108 -1.116,-3.255 -1.054,-1.147 -3.038,-1.147 -1.178,0 -2.325,0.496 -1.147,0.496 -2.263,1.519 l -1.333,-1.674 q 1.302,-1.24 2.852,-1.86 1.55,-0.651 3.317,-0.651 3.007,0 4.681,1.798 1.705,1.767 1.705,4.898 z m -2.48,-6.882 h -4.371 q -2.263,0 -3.348,0.62 -1.085,0.62 -1.085,1.953 0,1.116 0.93,1.86 0.961,0.744 2.449,0.744 2.232,0 3.751,-1.426 1.55,-1.457 1.674,-3.751 z"
|
||||
id="path22460" />
|
||||
<path
|
||||
d="m 73.779937,0 h -2.48 v -21.7 h 2.511 v 9.052 q 0.992,-1.271 2.418,-1.953 1.457,-0.713 3.069,-0.713 3.162,0 5.27,2.201 2.139,2.17 2.139,5.58 0,3.441 -2.17,5.673 -2.17,2.201 -5.363,2.201 -1.581,0 -3.007,-0.682 -1.395,-0.682 -2.387,-1.891 z m 5.27,-13.082 q -2.232,0 -3.782,1.643 -1.55,1.612 -1.55,4.03 0,2.356 1.519,3.968 1.519,1.581 3.72,1.581 2.294,0 3.72,-1.519 1.457,-1.55 1.457,-4.092 0,-2.511 -1.426,-4.061 -1.395,-1.55 -3.658,-1.55 z"
|
||||
id="path22462" />
|
||||
<path
|
||||
d="m 89.589915,-21.7 h 2.542 V 0 h -2.542 z"
|
||||
id="path22464" />
|
||||
<path
|
||||
d="m 102.91989,-1.674 q 1.488,0 2.728,-0.62 1.271,-0.651 2.325,-1.953 l 1.519,1.395 q -1.271,1.674 -2.976,2.511 -1.705,0.806 -3.813,0.806 -3.378996,0 -5.548996,-2.201 -2.139,-2.232 -2.139,-5.704 0,-3.379 2.201,-5.642 2.201,-2.263 5.424996,-2.263 3.286,0 5.332,2.139 2.046,2.139 2.046,5.58 0,0.248 -0.031,0.589 -0.031,0.31 -0.062,0.465 H 97.618894 q 0.248,2.201 1.705,3.565 1.487996,1.333 3.595996,1.333 z m -0.248,-11.532 q -1.953,0 -3.347996,1.271 -1.395,1.271 -1.705,3.41 h 9.857996 q -0.279,-2.17 -1.581,-3.41 -1.302,-1.271 -3.224,-1.271 z"
|
||||
id="path22466" />
|
||||
<path
|
||||
d="M 110.85592,3.999 V 1.736 h 15.748 v 2.263 z"
|
||||
id="path22468" />
|
||||
<path
|
||||
d="m 130.7269,-19.344 h -3.348 V -21.7 h 5.952 V 0 h -2.604 z"
|
||||
id="path22470" />
|
||||
</g>
|
||||
<path
|
||||
d="M377.5 673.5C377.5 664.111 393.618 656.5 413.5 656.5 433.382 656.5 449.5 664.111 449.5 673.5 449.5 682.889 433.382 690.5 413.5 690.5 393.618 690.5 377.5 682.889 377.5 673.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21879" />
|
||||
<path
|
||||
d="M377.5 804C377.5 794.887 393.618 787.5 413.5 787.5 433.382 787.5 449.5 794.887 449.5 804 449.5 813.113 433.382 820.5 413.5 820.5 393.618 820.5 377.5 813.113 377.5 804Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21881" />
|
||||
<path
|
||||
d="M411.989 656.982 375.215 549.631 377.107 548.983 413.881 656.333ZM372.809 551.865 374 543 380.377 549.272Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21883" />
|
||||
<path
|
||||
d="M324.5 673.5C324.5 670.186 326.962 667.5 330 667.5 333.038 667.5 335.5 670.186 335.5 673.5 335.5 676.814 333.038 679.5 330 679.5 326.962 679.5 324.5 676.814 324.5 673.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21885" />
|
||||
<path
|
||||
d="M335 672 370.074 672 370.074 674 335 674ZM368.741 669 376.741 673 368.741 677Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21887" />
|
||||
<path
|
||||
d="M324.5 803.5C324.5 800.186 326.962 797.5 330 797.5 333.038 797.5 335.5 800.186 335.5 803.5 335.5 806.814 333.038 809.5 330 809.5 326.962 809.5 324.5 806.814 324.5 803.5Z"
|
||||
stroke="#AEAEAE"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path21889" />
|
||||
<path
|
||||
d="M335 802 370.074 802 370.074 804 335 804ZM368.741 799 376.741 803 368.741 807Z"
|
||||
fill="#AEAEAE"
|
||||
id="path21891" />
|
||||
<g
|
||||
aria-label="split_dimshape"
|
||||
transform="translate(250.246 678)"
|
||||
id="text21895"
|
||||
style="font-size:18px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 7.956,-2.538 q 0,1.224 -1.044,1.998 -1.044,0.774 -2.718,0.774 -1.098,0 -2.07,-0.342 Q 1.17,-0.468 0.396,-1.17 l 0.756,-1.116 q 0.828,0.684 1.548,0.99 0.72,0.306 1.53,0.306 1.044,0 1.674,-0.396 0.648,-0.414 0.648,-1.062 0,-0.666 -0.54,-0.954 Q 5.472,-3.708 4.23,-3.726 2.412,-3.798 1.566,-4.392 0.738,-4.986 0.738,-6.246 q 0,-1.152 0.954,-1.908 0.972,-0.774 2.484,-0.774 1.026,0 1.926,0.324 0.9,0.324 1.638,0.954 L 7.038,-6.552 Q 6.3,-7.146 5.616,-7.416 q -0.684,-0.27 -1.458,-0.27 -0.882,0 -1.458,0.396 -0.558,0.396 -0.558,0.972 0,0.63 0.504,0.918 0.504,0.288 1.602,0.342 1.962,0.054 2.826,0.648 0.882,0.594 0.882,1.872 z"
|
||||
id="path22473" />
|
||||
<path
|
||||
d="M 10.890012,3.24 H 9.4320121 V -8.694 h 1.4399999 v 1.404 q 0.576,-0.756 1.422,-1.17 0.846,-0.432 1.8,-0.432 1.836,0 3.06,1.278 1.224,1.26 1.224,3.24 0,1.998 -1.278,3.294 -1.26,1.278 -3.114,1.278 -0.882,0 -1.71,-0.36 -0.81,-0.378 -1.386,-1.062 z m 3.042,-10.836 q -1.314,0 -2.214,0.954 -0.882,0.936 -0.882,2.34 0,1.386 0.864,2.304 0.882,0.918 2.178,0.918 1.332,0 2.16,-0.882 0.846,-0.9 0.846,-2.376 0,-1.458 -0.828,-2.358 -0.81,-0.9 -2.124,-0.9 z"
|
||||
id="path22475" />
|
||||
<path
|
||||
d="m 20.052001,-12.6 h 1.476 V 0 h -1.476 z"
|
||||
id="path22477" />
|
||||
<path
|
||||
d="m 23.68799,-11.61 h 1.62 v 1.602 h -1.62 z M 23.75999,0 v -8.694 h 1.476 V 0 Z"
|
||||
id="path22479" />
|
||||
<path
|
||||
d="m 27.791978,-2.322 v -5.076 h -1.188 v -1.296 h 1.188 v -2.664 h 1.458 v 2.664 h 2.016 v 1.296 h -2.016 v 4.95 q 0,0.576 0.252,0.864 0.27,0.27 0.81,0.27 h 0.954 V 0 h -1.17 q -1.206,0 -1.764,-0.558 -0.54,-0.558 -0.54,-1.764 z"
|
||||
id="path22481" />
|
||||
<path
|
||||
d="M 31.751979,2.322 V 1.008 h 9.144 v 1.314 z"
|
||||
id="path22483" />
|
||||
<path
|
||||
d="m 48.905974,-12.6 h 1.476 V 0 h -1.458 v -1.404 q -0.594,0.756 -1.44,1.17 -0.828,0.414 -1.782,0.414 -1.818,0 -3.06,-1.26 -1.224,-1.278 -1.224,-3.24 0,-1.98 1.26,-3.276 1.278,-1.314 3.132,-1.314 0.9,0 1.692,0.378 0.81,0.36 1.404,1.044 z m -3.042,11.502 q 1.314,0 2.196,-0.936 0.9,-0.954 0.9,-2.34 0,-1.404 -0.864,-2.322 -0.864,-0.918 -2.16,-0.918 -1.332,0 -2.178,0.9 -0.846,0.9 -0.846,2.358 0,1.458 0.828,2.358 0.828,0.9 2.124,0.9 z"
|
||||
id="path22485" />
|
||||
<path
|
||||
d="m 52.361963,-11.61 h 1.62 v 1.602 h -1.62 z m 0.072,11.61 v -8.694 h 1.476 V 0 Z"
|
||||
id="path22487" />
|
||||
<path
|
||||
d="m 55.979951,-8.694 h 1.44 v 1.044 q 0.486,-0.63 1.098,-0.936 0.63,-0.324 1.404,-0.324 0.9,0 1.602,0.378 0.702,0.378 1.116,1.098 0.486,-0.72 1.224,-1.098 0.756,-0.378 1.692,-0.378 1.53,0 2.43,0.99 0.918,0.99 0.918,2.574 V 0 h -1.476 v -5.256 q 0,-1.098 -0.558,-1.71 -0.54,-0.63 -1.53,-0.63 -0.972,0 -1.584,0.684 -0.594,0.666 -0.594,1.746 V 0 h -1.458 v -5.256 q 0,-1.098 -0.558,-1.71 -0.54,-0.63 -1.53,-0.63 -0.972,0 -1.584,0.684 -0.594,0.666 -0.594,1.746 V 0 h -1.458 z"
|
||||
id="path22489" />
|
||||
<path
|
||||
d="m 28.950699,127.462 q 0,1.224 -1.044,1.998 -1.044,0.774 -2.718,0.774 -1.098,0 -2.07,-0.342 -0.954,-0.36 -1.728,-1.062 l 0.756,-1.116 q 0.828,0.684 1.548,0.99 0.72,0.306 1.53,0.306 1.044,0 1.674,-0.396 0.648,-0.414 0.648,-1.062 0,-0.666 -0.54,-0.954 -0.54,-0.306 -1.782,-0.324 -1.818,-0.072 -2.664,-0.666 -0.828,-0.594 -0.828,-1.854 0,-1.152 0.954,-1.908 0.972,-0.774 2.484,-0.774 1.026,0 1.926,0.324 0.9,0.324 1.638,0.954 l -0.702,1.098 q -0.738,-0.594 -1.422,-0.864 -0.684,-0.27 -1.458,-0.27 -0.882,0 -1.458,0.396 -0.558,0.396 -0.558,0.972 0,0.63 0.504,0.918 0.504,0.288 1.602,0.342 1.962,0.054 2.826,0.648 0.882,0.594 0.882,1.872 z"
|
||||
id="path22491" />
|
||||
<path
|
||||
d="m 30.426712,117.4 h 1.458 v 5.22 q 0.522,-0.738 1.296,-1.134 0.792,-0.396 1.764,-0.396 1.746,0 2.718,1.116 0.99,1.116 0.99,3.06 V 130 h -1.458 v -4.644 q 0,-1.386 -0.684,-2.16 -0.684,-0.792 -1.908,-0.792 -1.224,0 -1.98,0.846 -0.738,0.846 -0.738,2.196 V 130 h -1.458 z"
|
||||
id="path22493" />
|
||||
<path
|
||||
d="m 47.922697,130 h -1.422 v -1.422 q -0.522,0.756 -1.422,1.188 -0.882,0.414 -1.908,0.414 -1.44,0 -2.34,-0.72 -0.9,-0.738 -0.9,-1.926 0,-1.314 1.008,-1.98 1.008,-0.684 2.988,-0.684 h 2.556 q -0.018,-1.224 -0.648,-1.89 -0.612,-0.666 -1.764,-0.666 -0.684,0 -1.35,0.288 -0.666,0.288 -1.314,0.882 l -0.774,-0.972 q 0.756,-0.72 1.656,-1.08 0.9,-0.378 1.926,-0.378 1.746,0 2.718,1.044 0.99,1.026 0.99,2.844 z m -1.44,-3.996 h -2.538 q -1.314,0 -1.944,0.36 -0.63,0.36 -0.63,1.134 0,0.648 0.54,1.08 0.558,0.432 1.422,0.432 1.296,0 2.178,-0.828 0.9,-0.846 0.972,-2.178 z"
|
||||
id="path22495" />
|
||||
<path
|
||||
d="m 51.180686,133.24 h -1.458 v -11.934 h 1.44 v 1.404 q 0.576,-0.756 1.422,-1.17 0.846,-0.432 1.8,-0.432 1.836,0 3.06,1.278 1.224,1.26 1.224,3.24 0,1.998 -1.278,3.294 -1.26,1.278 -3.114,1.278 -0.882,0 -1.71,-0.36 -0.81,-0.378 -1.386,-1.062 z m 3.042,-10.836 q -1.314,0 -2.214,0.954 -0.882,0.936 -0.882,2.34 0,1.386 0.864,2.304 0.882,0.918 2.178,0.918 1.332,0 2.16,-0.882 0.846,-0.9 0.846,-2.376 0,-1.458 -0.828,-2.358 -0.81,-0.9 -2.124,-0.9 z"
|
||||
id="path22497" />
|
||||
<path
|
||||
d="m 64.374675,129.028 q 0.864,0 1.584,-0.36 0.738,-0.378 1.35,-1.134 l 0.882,0.81 q -0.738,0.972 -1.728,1.458 -0.99,0.468 -2.214,0.468 -1.962,0 -3.222,-1.278 -1.242,-1.296 -1.242,-3.312 0,-1.962 1.278,-3.276 1.278,-1.314 3.15,-1.314 1.908,0 3.096,1.242 1.188,1.242 1.188,3.24 0,0.144 -0.018,0.342 -0.018,0.18 -0.036,0.27 h -7.146 q 0.144,1.278 0.99,2.07 0.864,0.774 2.088,0.774 z m -0.144,-6.696 q -1.134,0 -1.944,0.738 -0.81,0.738 -0.99,1.98 h 5.724 q -0.162,-1.26 -0.918,-1.98 -0.756,-0.738 -1.872,-0.738 z"
|
||||
id="path22499" />
|
||||
</g>
|
||||
<path
|
||||
d="M1-1.08244e-06 1.0001 90.3242-0.999902 90.3242-1 1.08244e-06ZM4.0001 88.9909 0.000104987 96.9909-3.9999 88.9909Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 413 786.991)"
|
||||
id="path21897" />
|
||||
<path
|
||||
d="M0.999941-0.0108767 1.74906 68.8582-0.250825 68.8799-0.999941 0.0108767ZM4.73438 67.4923 0.821627 75.5353-3.26515 67.5793Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 412 895.535)"
|
||||
id="path21899" />
|
||||
<path
|
||||
d="M1-1.02461e-06 1.0001 95.7988-0.999902 95.7988-1 1.02461e-06ZM4.0001 94.4654 0.000104987 102.465-3.9999 94.4654Z"
|
||||
fill="#AEAEAE"
|
||||
transform="matrix(1 0 0 -1 412 1143.47)"
|
||||
id="path21901" />
|
||||
<g
|
||||
aria-label="all_embs...Reshapechar_inp..."
|
||||
transform="translate(371.015 646)"
|
||||
id="text21907"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 9.36,0 H 7.78 V -1.58 Q 7.2,-0.74 6.2,-0.26 5.22,0.2 4.08,0.2 q -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 Q 2.72,-5.7 4.92,-5.7 H 7.76 Q 7.74,-7.06 7.04,-7.8 6.36,-8.54 5.08,-8.54 q -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 L 1.26,-8.32 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z M 7.76,-4.44 H 4.94 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22502" />
|
||||
<path
|
||||
d="m 11.539988,-14 h 1.64 V 0 h -1.64 z"
|
||||
id="path22504" />
|
||||
<path
|
||||
d="m 15.659975,-14 h 1.64 V 0 h -1.64 z"
|
||||
id="path22506" />
|
||||
<path
|
||||
d="M 18.499963,2.58 V 1.12 h 10.16 v 1.46 z"
|
||||
id="path22508" />
|
||||
<path
|
||||
d="m 34.339957,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22510" />
|
||||
<path
|
||||
d="m 40.559969,-9.66 h 1.6 v 1.16 q 0.54,-0.7 1.22,-1.04 0.7,-0.36 1.56,-0.36 1,0 1.78,0.42 0.78,0.42 1.24,1.22 0.54,-0.8 1.36,-1.22 0.84,-0.42 1.88,-0.42 1.7,0 2.7,1.1 1.02,1.1 1.02,2.86 V 0 h -1.64 v -5.84 q 0,-1.22 -0.62,-1.9 -0.6,-0.7 -1.7,-0.7 -1.08,0 -1.76,0.76 -0.66,0.74 -0.66,1.94 V 0 h -1.62 v -5.84 q 0,-1.22 -0.62,-1.9 -0.6,-0.7 -1.7,-0.7 -1.08,0 -1.76,0.76 -0.66,0.74 -0.66,1.94 V 0 h -1.62 z"
|
||||
id="path22512" />
|
||||
<path
|
||||
d="m 58.519985,0 h -1.6 v -14 h 1.62 v 5.84 q 0.64,-0.82 1.56,-1.26 0.94,-0.46 1.98,-0.46 2.04,0 3.4,1.42 1.38,1.4 1.38,3.6 0,2.22 -1.4,3.66 -1.4,1.42 -3.46,1.42 -1.02,0 -1.94,-0.44 -0.9,-0.44 -1.54,-1.22 z m 3.4,-8.44 q -1.44,0 -2.44,1.06 -1,1.04 -1,2.6 0,1.52 0.98,2.56 0.98,1.02 2.4,1.02 1.48,0 2.4,-0.98 0.94,-1 0.94,-2.64 0,-1.62 -0.92,-2.62 -0.9,-1 -2.36,-1 z"
|
||||
id="path22514" />
|
||||
<path
|
||||
d="m 76.319973,-2.82 q 0,1.36 -1.16,2.22 -1.16,0.86 -3.02,0.86 -1.22,0 -2.3,-0.38 -1.06,-0.4 -1.92,-1.18 l 0.84,-1.24 q 0.92,0.76 1.72,1.1 0.8,0.34 1.7,0.34 1.16,0 1.86,-0.44 0.72,-0.46 0.72,-1.18 0,-0.74 -0.6,-1.06 -0.6,-0.34 -1.98,-0.36 -2.02,-0.08 -2.96,-0.74 -0.92,-0.66 -0.92,-2.06 0,-1.28 1.06,-2.12 1.08,-0.86 2.76,-0.86 1.14,0 2.14,0.36 1,0.36 1.82,1.06 l -0.78,1.22 q -0.82,-0.66 -1.58,-0.96 -0.76,-0.3 -1.62,-0.3 -0.98,0 -1.62,0.44 -0.62,0.44 -0.62,1.08 0,0.7 0.56,1.02 0.56,0.32 1.78,0.38 2.18,0.06 3.14,0.72 0.98,0.66 0.98,2.08 z"
|
||||
id="path22516" />
|
||||
<path
|
||||
d="m 77.859986,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22518" />
|
||||
<path
|
||||
d="m 81.799996,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22520" />
|
||||
<path
|
||||
d="m 85.740006,-2.02 h 2.02 V 0 h -2.02 z"
|
||||
id="path22522" />
|
||||
<path
|
||||
d="m 12.13102,125.1 3.98,5.9 h -2.04 l -3.78,-5.7 H 6.3510201 v 5.7 h -1.72 V 117 H 10.79102 q 2.36,0 3.68,1.1 1.32,1.08 1.32,2.98 0,1.56 -0.96,2.62 -0.96,1.06 -2.7,1.4 z m 1.92,-3.98 q 0,-1.22 -0.88,-1.9 -0.86,-0.7 -2.42,-0.7 H 6.3510201 v 5.28 H 10.61102 q 1.58,0 2.5,-0.72 0.94,-0.74 0.94,-1.96 z"
|
||||
id="path22524" />
|
||||
<path
|
||||
d="m 22.111018,129.92 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22526" />
|
||||
<path
|
||||
d="m 36.11103,128.18 q 0,1.36 -1.16,2.22 -1.16,0.86 -3.02,0.86 -1.22,0 -2.3,-0.38 -1.06,-0.4 -1.92,-1.18 l 0.84,-1.24 q 0.92,0.76 1.72,1.1 0.8,0.34 1.7,0.34 1.16,0 1.86,-0.44 0.72,-0.46 0.72,-1.18 0,-0.74 -0.6,-1.06 -0.6,-0.34 -1.98,-0.36 -2.02,-0.08 -2.96,-0.74 -0.92,-0.66 -0.92,-2.06 0,-1.28 1.06,-2.12 1.08,-0.86 2.76,-0.86 1.14,0 2.14,0.36 1,0.36 1.82,1.06 l -0.78,1.22 q -0.82,-0.66 -1.58,-0.96 -0.76,-0.3 -1.62,-0.3 -0.98,0 -1.62,0.44 -0.62,0.44 -0.62,1.08 0,0.7 0.56,1.02 0.56,0.32 1.78,0.38 2.18,0.06 3.14,0.72 0.98,0.66 0.98,2.08 z"
|
||||
id="path22528" />
|
||||
<path
|
||||
d="m 37.751044,117 h 1.62 v 5.8 q 0.58,-0.82 1.44,-1.26 0.88,-0.44 1.96,-0.44 1.94,0 3.02,1.24 1.1,1.24 1.1,3.4 V 131 h -1.62 v -5.16 q 0,-1.54 -0.76,-2.4 -0.76,-0.88 -2.12,-0.88 -1.36,0 -2.2,0.94 -0.82,0.94 -0.82,2.44 V 131 h -1.62 z"
|
||||
id="path22530" />
|
||||
<path
|
||||
d="m 57.191028,131 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22532" />
|
||||
<path
|
||||
d="m 60.811016,134.6 h -1.62 v -13.26 h 1.6 v 1.56 q 0.64,-0.84 1.58,-1.3 0.94,-0.48 2,-0.48 2.04,0 3.4,1.42 1.36,1.4 1.36,3.6 0,2.22 -1.42,3.66 -1.4,1.42 -3.46,1.42 -0.98,0 -1.9,-0.4 -0.9,-0.42 -1.54,-1.18 z m 3.38,-12.04 q -1.46,0 -2.46,1.06 -0.98,1.04 -0.98,2.6 0,1.54 0.96,2.56 0.98,1.02 2.42,1.02 1.48,0 2.4,-0.98 0.94,-1 0.94,-2.64 0,-1.62 -0.92,-2.62 -0.9,-1 -2.36,-1 z"
|
||||
id="path22534" />
|
||||
<path
|
||||
d="m 75.471003,129.92 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22536" />
|
||||
<path
|
||||
d="m 4.820195,484.3 q -2.22,0 -3.62,-1.4 -1.37999999,-1.42 -1.37999999,-3.7 0,-2.16 1.45999999,-3.64 1.46,-1.48 3.58,-1.48 1.54,0 2.62,0.72 1.1,0.7 1.66,2.04 l -1.38,0.66 q -0.46,-1 -1.2,-1.5 -0.72,-0.5 -1.72,-0.5 -1.42,0 -2.4,1.08 -0.96,1.06 -0.96,2.62 0,1.58 0.96,2.64 0.98,1.04 2.44,1.04 0.94,0 1.74,-0.5 0.82,-0.52 1.32,-1.44 l 1.26,0.68 q -0.58,1.24 -1.74,1.96 -1.16,0.72 -2.64,0.72 z"
|
||||
id="path22538" />
|
||||
<path
|
||||
d="m 10.820193,470 h 1.62 v 5.8 q 0.58,-0.82 1.44,-1.26 0.88,-0.44 1.96,-0.44 1.94,0 3.02,1.24 1.1,1.24 1.1,3.4 V 484 h -1.62 v -5.16 q 0,-1.54 -0.76,-2.4 -0.76,-0.88 -2.12,-0.88 -1.36,0 -2.2,0.94 -0.82,0.94 -0.82,2.44 V 484 h -1.62 z"
|
||||
id="path22540" />
|
||||
<path
|
||||
d="m 30.260177,484 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22542" />
|
||||
<path
|
||||
d="m 32.260164,474.34 h 1.6 v 1.22 q 0.44,-0.64 1.14,-0.96 0.7,-0.32 1.66,-0.32 h 0.88 v 1.54 h -0.9 q -1.38,0 -2.08,0.72 -0.68,0.72 -0.68,2.18 V 484 h -1.62 z"
|
||||
id="path22544" />
|
||||
<path
|
||||
d="m 37.760163,486.58 v -1.46 h 10.16 v 1.46 z"
|
||||
id="path22546" />
|
||||
<path
|
||||
d="m 49.040157,471.1 h 1.8 v 1.78 h -1.8 z m 0.08,12.9 v -9.66 h 1.64 V 484 Z"
|
||||
id="path22548" />
|
||||
<path
|
||||
d="m 53.060145,474.34 h 1.6 v 1.5 q 0.58,-0.84 1.46,-1.28 0.9,-0.46 2,-0.46 1.92,0 3,1.24 1.08,1.24 1.08,3.4 V 484 h -1.62 v -5.16 q 0,-1.52 -0.76,-2.4 -0.74,-0.88 -2.08,-0.88 -1.38,0 -2.22,0.94 -0.84,0.94 -0.84,2.44 V 484 h -1.62 z"
|
||||
id="path22550" />
|
||||
<path
|
||||
d="m 65.820129,487.6 h -1.62 v -13.26 h 1.6 v 1.56 q 0.64,-0.84 1.58,-1.3 0.94,-0.48 2,-0.48 2.04,0 3.4,1.42 1.36,1.4 1.36,3.6 0,2.22 -1.42,3.66 -1.4,1.42 -3.46,1.42 -0.98,0 -1.9,-0.4 -0.9,-0.42 -1.54,-1.18 z m 3.38,-12.04 q -1.46,0 -2.46,1.06 -0.98,1.04 -0.98,2.6 0,1.54 0.96,2.56 0.98,1.02 2.42,1.02 1.48,0 2.4,-0.98 0.94,-1 0.94,-2.64 0,-1.62 -0.92,-2.62 -0.9,-1 -2.36,-1 z"
|
||||
id="path22552" />
|
||||
<path
|
||||
d="m 75.580117,481.98 h 2.02 V 484 h -2.02 z"
|
||||
id="path22554" />
|
||||
<path
|
||||
d="m 79.520127,481.98 h 2.02 V 484 h -2.02 z"
|
||||
id="path22556" />
|
||||
<path
|
||||
d="m 83.460137,481.98 h 2.02 V 484 h -2.02 z"
|
||||
id="path22558" />
|
||||
</g>
|
||||
<path
|
||||
d="M786.5 651.5C786.5 645.425 791.425 640.5 797.5 640.5L961.5 640.5C967.575 640.5 972.5 645.425 972.5 651.5L972.5 695.5C972.5 701.575 967.575 706.5 961.5 706.5L797.5 706.5C791.425 706.5 786.5 701.575 786.5 695.5Z"
|
||||
stroke="#808080"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#E9E9E9"
|
||||
fill-rule="evenodd"
|
||||
id="path21909" />
|
||||
<g
|
||||
aria-label="softmax"
|
||||
transform="translate(825.683 684)"
|
||||
id="text21911"
|
||||
style="font-size:31px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 13.702,-4.371 q 0,2.108 -1.798,3.441 -1.798,1.333 -4.681,1.333 -1.891,0 -3.565,-0.589 -1.643,-0.62 -2.976,-1.829 l 1.302,-1.922 q 1.426,1.178 2.666,1.705 1.24,0.527 2.635,0.527 1.798,0 2.883,-0.682 1.116,-0.713 1.116,-1.829 0,-1.147 -0.93,-1.643 -0.93,-0.527 -3.069,-0.558 -3.131,-0.124 -4.588,-1.147 -1.426,-1.023 -1.426,-3.193 0,-1.984 1.643,-3.286 1.674,-1.333 4.278,-1.333 1.767,0 3.317,0.558 1.55,0.558 2.821,1.643 l -1.209,1.891 q -1.271,-1.023 -2.449,-1.488 -1.178,-0.465 -2.511,-0.465 -1.519,0 -2.511,0.682 -0.961,0.682 -0.961,1.674 0,1.085 0.868,1.581 0.868,0.496 2.759,0.589 3.379,0.093 4.867,1.116 1.519,1.023 1.519,3.224 z"
|
||||
id="path22561" />
|
||||
<path
|
||||
d="m 31.155021,-7.471 q 0,3.379 -2.263,5.673 -2.263,2.263 -5.58,2.263 -3.317,0 -5.549,-2.263 -2.201,-2.263 -2.201,-5.642 0,-3.379 2.263,-5.642 2.263,-2.294 5.58,-2.294 3.317,0 5.518,2.263 2.232,2.263 2.232,5.642 z m -13.02,0.031 q 0,2.418 1.488,4.061 1.519,1.643 3.751,1.643 2.201,0 3.689,-1.643 1.519,-1.643 1.519,-4.092 0,-2.387 -1.519,-4.03 -1.519,-1.674 -3.72,-1.674 -2.201,0 -3.72,1.643 -1.488,1.643 -1.488,4.092 z"
|
||||
id="path22563" />
|
||||
<path
|
||||
d="m 34.44103,-12.741 h -2.046 v -2.232 h 2.046 v -2.821 q 0,-1.891 0.992,-2.883 1.023,-1.023 2.914,-1.023 h 2.046 v 2.108 h -1.674 q -0.868,0 -1.302,0.465 -0.434,0.465 -0.434,1.395 v 2.759 h 3.41 v 2.232 h -3.41 V 0 h -2.542 z"
|
||||
id="path22565" />
|
||||
<path
|
||||
d="m 43.214032,-3.999 v -8.742 h -2.046 v -2.232 h 2.046 v -4.588 h 2.511 v 4.588 h 3.472 v 2.232 h -3.472 v 8.525 q 0,0.992 0.434,1.488 0.465,0.465 1.395,0.465 h 1.643 V 0 h -2.015 q -2.077,0 -3.038,-0.961 -0.93,-0.961 -0.93,-3.038 z"
|
||||
id="path22567" />
|
||||
<path
|
||||
d="m 51.739035,-14.973 h 2.48 v 1.798 q 0.837,-1.085 1.891,-1.612 1.085,-0.558 2.418,-0.558 1.55,0 2.759,0.651 1.209,0.651 1.922,1.891 0.837,-1.24 2.108,-1.891 1.302,-0.651 2.914,-0.651 2.635,0 4.185,1.705 1.581,1.705 1.581,4.433 V 0 h -2.542 v -9.052 q 0,-1.891 -0.961,-2.945 -0.93,-1.085 -2.635,-1.085 -1.674,0 -2.728,1.178 -1.023,1.147 -1.023,3.007 V 0 h -2.511 v -9.052 q 0,-1.891 -0.961,-2.945 -0.93,-1.085 -2.635,-1.085 -1.674,0 -2.728,1.178 -1.023,1.147 -1.023,3.007 V 0 h -2.511 z"
|
||||
id="path22569" />
|
||||
<path
|
||||
d="m 89.962056,0 h -2.449 v -2.449 q -0.899,1.302 -2.449,2.046 -1.519,0.713 -3.286,0.713 -2.48,0 -4.03,-1.24 -1.55,-1.271 -1.55,-3.317 0,-2.263 1.736,-3.41 1.736,-1.178 5.146,-1.178 h 4.402 q -0.031,-2.108 -1.116,-3.255 -1.054,-1.147 -3.038,-1.147 -1.178,0 -2.325,0.496 -1.147,0.496 -2.263,1.519 l -1.333,-1.674 q 1.302,-1.24 2.852,-1.86 1.55,-0.651 3.317,-0.651 3.007,0 4.681,1.798 1.705,1.767 1.705,4.898 z m -2.48,-6.882 h -4.371 q -2.263,0 -3.348,0.62 -1.085,0.62 -1.085,1.953 0,1.116 0.93,1.86 0.961,0.744 2.449,0.744 2.232,0 3.751,-1.426 1.55,-1.457 1.674,-3.751 z"
|
||||
id="path22571" />
|
||||
<path
|
||||
d="m 94.519037,0 h -2.821 l 5.828,-7.719 -5.425,-7.254 h 2.976 l 3.937,5.487 3.999003,-5.487 h 2.79 l -5.425,7.223 5.921,7.75 h -2.945 l -4.402003,-6.045 z"
|
||||
id="path22573" />
|
||||
</g>
|
||||
<g
|
||||
aria-label="8 tensors"
|
||||
transform="matrix(0.284936 0.958547 -0.958547 0.284936 809.068 560)"
|
||||
id="text21913"
|
||||
style="font-size:13px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif;fill:#ffffff">
|
||||
<path
|
||||
d="m 7.345,-2.444 q 0,1.17 -0.975,1.911 -0.962,0.728 -2.483,0.728 -1.508,0 -2.47,-0.728 -0.949,-0.728 -0.949,-1.885 0,-0.78 0.455,-1.378 0.468,-0.598 1.3,-0.923 -0.728,-0.299 -1.131,-0.845 -0.39,-0.559 -0.39,-1.248 0,-1.118 0.897,-1.807 0.897,-0.689 2.327,-0.689 1.417,0 2.301,0.689 0.884,0.676 0.884,1.781 0,0.689 -0.403,1.248 -0.39,0.546 -1.118,0.858 0.832,0.312 1.287,0.91 0.468,0.598 0.468,1.378 z M 1.807,-6.773 q 0,0.715 0.585,1.157 0.585,0.442 1.521,0.442 0.936,0 1.508,-0.442 0.585,-0.442 0.585,-1.183 0,-0.689 -0.598,-1.118 -0.585,-0.429 -1.508,-0.429 -0.923,0 -1.508,0.442 -0.585,0.429 -0.585,1.131 z m -0.234,4.316 q 0,0.741 0.65,1.222 0.663,0.468 1.69,0.468 1.027,0 1.677,-0.468 0.65,-0.481 0.65,-1.248 0,-0.78 -0.65,-1.248 -0.65,-0.481 -1.69,-0.481 -1.04,0 -1.69,0.481 -0.637,0.481 -0.637,1.274 z"
|
||||
id="path22576" />
|
||||
<path
|
||||
d="m 11.244998,-1.677 v -3.666 h -0.858 v -0.936 h 0.858 v -1.924 h 1.053 v 1.924 h 1.456 v 0.936 h -1.456 v 3.575 q 0,0.416 0.182,0.624 0.195,0.195 0.585,0.195 h 0.689 V 0 h -0.845 q -0.871,0 -1.274,-0.403 -0.39,-0.403 -0.39,-1.274 z"
|
||||
id="path22578" />
|
||||
<path
|
||||
d="m 17.745006,-0.702 q 0.624,0 1.144,-0.26 0.533,-0.273 0.975,-0.819 l 0.637,0.585 q -0.533,0.702 -1.248,1.053 -0.715,0.338 -1.599,0.338 -1.417,0 -2.327,-0.923 -0.897,-0.936 -0.897,-2.392 0,-1.417 0.923,-2.366 0.923,-0.949 2.275,-0.949 1.378,0 2.236,0.897 0.858,0.897 0.858,2.34 0,0.104 -0.013,0.247 -0.013,0.13 -0.026,0.195 h -5.161 q 0.104,0.923 0.715,1.495 0.624,0.559 1.508,0.559 z m -0.104,-4.836 q -0.819,0 -1.404,0.533 -0.585,0.533 -0.715,1.43 h 4.134 q -0.117,-0.91 -0.663,-1.43 -0.546,-0.533 -1.352,-0.533 z"
|
||||
id="path22580" />
|
||||
<path
|
||||
d="m 21.788014,-6.279 h 1.04 v 0.975 q 0.377,-0.546 0.949,-0.832 0.585,-0.299 1.3,-0.299 1.248,0 1.95,0.806 0.702,0.806 0.702,2.21 V 0 h -1.053 v -3.354 q 0,-0.988 -0.494,-1.56 -0.481,-0.572 -1.352,-0.572 -0.897,0 -1.443,0.611 -0.546,0.611 -0.546,1.586 V 0 h -1.053 z"
|
||||
id="path22582" />
|
||||
<path
|
||||
d="m 34.086004,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22584" />
|
||||
<path
|
||||
d="m 41.405013,-3.133 q 0,1.417 -0.949,2.379 -0.949,0.949 -2.34,0.949 -1.391,0 -2.327,-0.949 -0.923,-0.949 -0.923,-2.366 0,-1.417 0.949,-2.366 0.949,-0.962 2.34,-0.962 1.391,0 2.314,0.949 0.936,0.949 0.936,2.366 z m -5.46,0.013 q 0,1.014 0.624,1.703 0.637,0.689 1.573,0.689 0.923,0 1.547,-0.689 0.637,-0.689 0.637,-1.716 0,-1.001 -0.637,-1.69 -0.637,-0.702 -1.56,-0.702 -0.923,0 -1.56,0.689 -0.624,0.689 -0.624,1.716 z"
|
||||
id="path22586" />
|
||||
<path
|
||||
d="m 42.497022,-6.279 h 1.04 v 0.793 q 0.286,-0.416 0.741,-0.624 0.455,-0.208 1.079,-0.208 h 0.572 v 1.001 h -0.585 q -0.897,0 -1.352,0.468 -0.442,0.468 -0.442,1.417 V 0 h -1.053 z"
|
||||
id="path22588" />
|
||||
<path
|
||||
d="m 51.844019,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22590" />
|
||||
</g>
|
||||
<g
|
||||
aria-label="3 tensors"
|
||||
transform="matrix(0.652178 0.758066 -0.758066 0.652178 485.038 462)"
|
||||
id="text21915"
|
||||
style="font-size:13px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 3.783,-0.78 q 0.923,0 1.534,-0.507 0.624,-0.52 0.624,-1.287 0,-0.806 -0.637,-1.274 Q 4.68,-4.316 3.562,-4.316 H 2.99 v -0.923 h 0.546 q 1.027,0 1.625,-0.442 0.598,-0.455 0.598,-1.157 0,-0.65 -0.546,-1.066 -0.546,-0.416 -1.43,-0.416 -0.936,0 -1.495,0.507 -0.546,0.494 -0.611,1.365 H 0.611 q 0.039,-1.222 0.923,-2.041 0.897,-0.819 2.301,-0.819 1.339,0 2.184,0.689 0.858,0.676 0.858,1.69 0,0.754 -0.442,1.313 -0.429,0.559 -1.248,0.845 0.936,0.26 1.404,0.819 0.468,0.559 0.468,1.43 0,1.157 -0.936,1.95 -0.936,0.78 -2.366,0.78 -1.521,0 -2.418,-0.819 -0.884,-0.832 -0.897,-2.275 h 1.053 q 0.052,1.014 0.637,1.56 0.598,0.546 1.651,0.546 z"
|
||||
id="path22593" />
|
||||
<path
|
||||
d="m 10.971999,-1.677 v -3.666 h -0.858 v -0.936 h 0.858 v -1.924 h 1.053 v 1.924 h 1.456 v 0.936 h -1.456 v 3.575 q 0,0.416 0.182,0.624 0.195,0.195 0.585,0.195 h 0.689 V 0 h -0.845 q -0.871,0 -1.274,-0.403 -0.39,-0.403 -0.39,-1.274 z"
|
||||
id="path22595" />
|
||||
<path
|
||||
d="m 17.472007,-0.702 q 0.624,0 1.144,-0.26 0.533,-0.273 0.975,-0.819 l 0.637,0.585 q -0.533,0.702 -1.248,1.053 -0.715,0.338 -1.599,0.338 -1.417,0 -2.327,-0.923 -0.897,-0.936 -0.897,-2.392 0,-1.417 0.923,-2.366 0.923,-0.949 2.275,-0.949 1.378,0 2.236,0.897 0.858,0.897 0.858,2.34 0,0.104 -0.013,0.247 -0.013,0.13 -0.026,0.195 h -5.161 q 0.104,0.923 0.715,1.495 0.624,0.559 1.508,0.559 z m -0.104,-4.836 q -0.819,0 -1.404,0.533 -0.585,0.533 -0.715,1.43 h 4.134 q -0.117,-0.91 -0.663,-1.43 -0.546,-0.533 -1.352,-0.533 z"
|
||||
id="path22597" />
|
||||
<path
|
||||
d="m 21.515015,-6.279 h 1.04 v 0.975 q 0.377,-0.546 0.949,-0.832 0.585,-0.299 1.3,-0.299 1.248,0 1.95,0.806 0.702,0.806 0.702,2.21 V 0 h -1.053 v -3.354 q 0,-0.988 -0.494,-1.56 -0.481,-0.572 -1.352,-0.572 -0.897,0 -1.443,0.611 -0.546,0.611 -0.546,1.586 V 0 h -1.053 z"
|
||||
id="path22599" />
|
||||
<path
|
||||
d="m 33.813005,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22601" />
|
||||
<path
|
||||
d="m 41.132014,-3.133 q 0,1.417 -0.949,2.379 -0.949,0.949 -2.34,0.949 -1.391,0 -2.327,-0.949 -0.923,-0.949 -0.923,-2.366 0,-1.417 0.949,-2.366 0.949,-0.962 2.34,-0.962 1.391,0 2.314,0.949 0.936,0.949 0.936,2.366 z m -5.46,0.013 q 0,1.014 0.624,1.703 0.637,0.689 1.573,0.689 0.923,0 1.547,-0.689 0.637,-0.689 0.637,-1.716 0,-1.001 -0.637,-1.69 -0.637,-0.702 -1.56,-0.702 -0.923,0 -1.56,0.689 -0.624,0.689 -0.624,1.716 z"
|
||||
id="path22603" />
|
||||
<path
|
||||
d="m 42.224019,-6.279 h 1.04 v 0.793 q 0.286,-0.416 0.741,-0.624 0.455,-0.208 1.079,-0.208 h 0.572 v 1.001 h -0.585 q -0.897,0 -1.352,0.468 -0.442,0.468 -0.442,1.417 V 0 h -1.053 z"
|
||||
id="path22605" />
|
||||
<path
|
||||
d="m 51.57102,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22607" />
|
||||
</g>
|
||||
<g
|
||||
aria-label="3 tensors"
|
||||
transform="matrix(0.775552 -0.631284 0.631284 0.775552 178.391 506)"
|
||||
id="text21917"
|
||||
style="font-size:13px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 3.783,-0.78 q 0.923,0 1.534,-0.507 0.624,-0.52 0.624,-1.287 0,-0.806 -0.637,-1.274 Q 4.68,-4.316 3.562,-4.316 H 2.99 v -0.923 h 0.546 q 1.027,0 1.625,-0.442 0.598,-0.455 0.598,-1.157 0,-0.65 -0.546,-1.066 -0.546,-0.416 -1.43,-0.416 -0.936,0 -1.495,0.507 -0.546,0.494 -0.611,1.365 H 0.611 q 0.039,-1.222 0.923,-2.041 0.897,-0.819 2.301,-0.819 1.339,0 2.184,0.689 0.858,0.676 0.858,1.69 0,0.754 -0.442,1.313 -0.429,0.559 -1.248,0.845 0.936,0.26 1.404,0.819 0.468,0.559 0.468,1.43 0,1.157 -0.936,1.95 -0.936,0.78 -2.366,0.78 -1.521,0 -2.418,-0.819 -0.884,-0.832 -0.897,-2.275 h 1.053 q 0.052,1.014 0.637,1.56 0.598,0.546 1.651,0.546 z"
|
||||
id="path22610" />
|
||||
<path
|
||||
d="m 10.971999,-1.677 v -3.666 h -0.858 v -0.936 h 0.858 v -1.924 h 1.053 v 1.924 h 1.456 v 0.936 h -1.456 v 3.575 q 0,0.416 0.182,0.624 0.195,0.195 0.585,0.195 h 0.689 V 0 h -0.845 q -0.871,0 -1.274,-0.403 -0.39,-0.403 -0.39,-1.274 z"
|
||||
id="path22612" />
|
||||
<path
|
||||
d="m 17.472007,-0.702 q 0.624,0 1.144,-0.26 0.533,-0.273 0.975,-0.819 l 0.637,0.585 q -0.533,0.702 -1.248,1.053 -0.715,0.338 -1.599,0.338 -1.417,0 -2.327,-0.923 -0.897,-0.936 -0.897,-2.392 0,-1.417 0.923,-2.366 0.923,-0.949 2.275,-0.949 1.378,0 2.236,0.897 0.858,0.897 0.858,2.34 0,0.104 -0.013,0.247 -0.013,0.13 -0.026,0.195 h -5.161 q 0.104,0.923 0.715,1.495 0.624,0.559 1.508,0.559 z m -0.104,-4.836 q -0.819,0 -1.404,0.533 -0.585,0.533 -0.715,1.43 h 4.134 q -0.117,-0.91 -0.663,-1.43 -0.546,-0.533 -1.352,-0.533 z"
|
||||
id="path22614" />
|
||||
<path
|
||||
d="m 21.515015,-6.279 h 1.04 v 0.975 q 0.377,-0.546 0.949,-0.832 0.585,-0.299 1.3,-0.299 1.248,0 1.95,0.806 0.702,0.806 0.702,2.21 V 0 h -1.053 v -3.354 q 0,-0.988 -0.494,-1.56 -0.481,-0.572 -1.352,-0.572 -0.897,0 -1.443,0.611 -0.546,0.611 -0.546,1.586 V 0 h -1.053 z"
|
||||
id="path22616" />
|
||||
<path
|
||||
d="m 33.813005,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22618" />
|
||||
<path
|
||||
d="m 41.132014,-3.133 q 0,1.417 -0.949,2.379 -0.949,0.949 -2.34,0.949 -1.391,0 -2.327,-0.949 -0.923,-0.949 -0.923,-2.366 0,-1.417 0.949,-2.366 0.949,-0.962 2.34,-0.962 1.391,0 2.314,0.949 0.936,0.949 0.936,2.366 z m -5.46,0.013 q 0,1.014 0.624,1.703 0.637,0.689 1.573,0.689 0.923,0 1.547,-0.689 0.637,-0.689 0.637,-1.716 0,-1.001 -0.637,-1.69 -0.637,-0.702 -1.56,-0.702 -0.923,0 -1.56,0.689 -0.624,0.689 -0.624,1.716 z"
|
||||
id="path22620" />
|
||||
<path
|
||||
d="m 42.224019,-6.279 h 1.04 v 0.793 q 0.286,-0.416 0.741,-0.624 0.455,-0.208 1.079,-0.208 h 0.572 v 1.001 h -0.585 q -0.897,0 -1.352,0.468 -0.442,0.468 -0.442,1.417 V 0 h -1.053 z"
|
||||
id="path22622" />
|
||||
<path
|
||||
d="m 51.57102,-1.833 q 0,0.884 -0.754,1.443 -0.754,0.559 -1.963,0.559 -0.793,0 -1.495,-0.247 -0.689,-0.26 -1.248,-0.767 l 0.546,-0.806 q 0.598,0.494 1.118,0.715 0.52,0.221 1.105,0.221 0.754,0 1.209,-0.286 0.468,-0.299 0.468,-0.767 0,-0.481 -0.39,-0.689 -0.39,-0.221 -1.287,-0.234 -1.313,-0.052 -1.924,-0.481 -0.598,-0.429 -0.598,-1.339 0,-0.832 0.689,-1.378 0.702,-0.559 1.794,-0.559 0.741,0 1.391,0.234 0.65,0.234 1.183,0.689 l -0.507,0.793 q -0.533,-0.429 -1.027,-0.624 -0.494,-0.195 -1.053,-0.195 -0.637,0 -1.053,0.286 -0.403,0.286 -0.403,0.702 0,0.455 0.364,0.663 0.364,0.208 1.157,0.247 1.417,0.039 2.041,0.468 0.637,0.429 0.637,1.352 z"
|
||||
id="path22624" />
|
||||
</g>
|
||||
<g
|
||||
aria-label="Reshape_3concat"
|
||||
transform="translate(742.754 252)"
|
||||
id="text21921"
|
||||
style="font-size:20px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 8.8,-5.9 12.78,0 H 10.74 L 6.96,-5.7 H 3.02 V 0 H 1.3 v -14 h 6.16 q 2.36,0 3.68,1.1 1.32,1.08 1.32,2.98 0,1.56 -0.96,2.62 -0.96,1.06 -2.7,1.4 z m 1.92,-3.98 q 0,-1.22 -0.88,-1.9 -0.86,-0.7 -2.42,-0.7 h -4.4 v 5.28 h 4.26 q 1.58,0 2.5,-0.72 0.94,-0.74 0.94,-1.96 z"
|
||||
id="path22627" />
|
||||
<path
|
||||
d="m 18.779998,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22629" />
|
||||
<path
|
||||
d="m 32.78001,-2.82 q 0,1.36 -1.16,2.22 -1.16,0.86 -3.02,0.86 -1.22,0 -2.3,-0.38 -1.06,-0.4 -1.92,-1.18 l 0.84,-1.24 q 0.92,0.76 1.72,1.1 0.8,0.34 1.7,0.34 1.16,0 1.86,-0.44 0.72,-0.46 0.72,-1.18 0,-0.74 -0.6,-1.06 -0.6,-0.34 -1.98,-0.36 -2.02,-0.08 -2.96,-0.74 -0.92,-0.66 -0.92,-2.06 0,-1.28 1.06,-2.12 1.08,-0.86 2.76,-0.86 1.14,0 2.14,0.36 1,0.36 1.82,1.06 l -0.78,1.22 q -0.82,-0.66 -1.58,-0.96 -0.76,-0.3 -1.62,-0.3 -0.98,0 -1.62,0.44 -0.62,0.44 -0.62,1.08 0,0.7 0.56,1.02 0.56,0.32 1.78,0.38 2.18,0.06 3.14,0.72 0.98,0.66 0.98,2.08 z"
|
||||
id="path22631" />
|
||||
<path
|
||||
d="m 34.420023,-14 h 1.62 v 5.8 q 0.58,-0.82 1.44,-1.26 0.88,-0.44 1.96,-0.44 1.94,0 3.02,1.24 1.1,1.24 1.1,3.4 V 0 h -1.62 v -5.16 q 0,-1.54 -0.76,-2.4 -0.76,-0.88 -2.12,-0.88 -1.36,0 -2.2,0.94 -0.82,0.94 -0.82,2.44 V 0 h -1.62 z"
|
||||
id="path22633" />
|
||||
<path
|
||||
d="m 53.860008,0 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22635" />
|
||||
<path
|
||||
d="m 57.479995,3.6 h -1.62 V -9.66 h 1.6 v 1.56 q 0.64,-0.84 1.58,-1.3 0.94,-0.48 2,-0.48 2.04,0 3.4,1.42 1.36,1.4 1.36,3.6 0,2.22 -1.42,3.66 -1.4,1.42 -3.46,1.42 -0.98,0 -1.9,-0.4 -0.9,-0.42 -1.54,-1.18 z m 3.38,-12.04 q -1.46,0 -2.46,1.06 -0.98,1.04 -0.98,2.6 0,1.54 0.96,2.56 0.98,1.02 2.42,1.02 1.48,0 2.4,-0.98 0.94,-1 0.94,-2.64 0,-1.62 -0.92,-2.62 -0.9,-1 -2.36,-1 z"
|
||||
id="path22637" />
|
||||
<path
|
||||
d="m 72.139983,-1.08 q 0.96,0 1.76,-0.4 0.82,-0.42 1.5,-1.26 l 0.98,0.9 q -0.82,1.08 -1.92,1.62 -1.1,0.52 -2.46,0.52 -2.18,0 -3.58,-1.42 -1.38,-1.44 -1.38,-3.68 0,-2.18 1.42,-3.64 1.42,-1.46 3.5,-1.46 2.12,0 3.44,1.38 1.32,1.38 1.32,3.6 0,0.16 -0.02,0.38 -0.02,0.2 -0.04,0.3 h -7.94 q 0.16,1.42 1.1,2.3 0.96,0.86 2.32,0.86 z m -0.16,-7.44 q -1.26,0 -2.16,0.82 -0.9,0.82 -1.1,2.2 h 6.36 q -0.18,-1.4 -1.02,-2.2 -0.84,-0.82 -2.08,-0.82 z"
|
||||
id="path22639" />
|
||||
<path
|
||||
d="M 77.259995,2.58 V 1.12 h 10.16 v 1.46 z"
|
||||
id="path22641" />
|
||||
<path
|
||||
d="m 93.19999,-1.2 q 1.42,0 2.36,-0.78 0.96,-0.8 0.96,-1.98 0,-1.24 -0.98,-1.96 -0.96,-0.72 -2.68,-0.72 h -0.88 v -1.42 h 0.84 q 1.58,0 2.5,-0.68 0.92,-0.7 0.92,-1.78 0,-1 -0.84,-1.64 -0.84,-0.64 -2.2,-0.64 -1.44,0 -2.3,0.78 -0.84,0.76 -0.94,2.1 h -1.64 q 0.06,-1.88 1.42,-3.14 1.38,-1.26 3.54,-1.26 2.06,0 3.36,1.06 1.32,1.04 1.32,2.6 0,1.16 -0.68,2.02 -0.66,0.86 -1.92,1.3 1.44,0.4 2.16,1.26 0.72,0.86 0.72,2.2 0,1.78 -1.44,3 -1.44,1.2 -3.64,1.2 -2.34,0 -3.72,-1.26 -1.36,-1.28 -1.38,-3.5 h 1.62 q 0.08,1.56 0.98,2.4 0.92,0.84 2.54,0.84 z"
|
||||
id="path22643" />
|
||||
<path
|
||||
d="m 27.1359,233.3 q -2.22,0 -3.62,-1.4 -1.38,-1.42 -1.38,-3.7 0,-2.16 1.46,-3.64 1.46,-1.48 3.58,-1.48 1.54,0 2.62,0.72 1.1,0.7 1.66,2.04 l -1.38,0.66 q -0.46,-1 -1.2,-1.5 -0.72,-0.5 -1.72,-0.5 -1.42,0 -2.4,1.08 -0.96,1.06 -0.96,2.62 0,1.58 0.96,2.64 0.98,1.04 2.44,1.04 0.94,0 1.74,-0.5 0.82,-0.52 1.32,-1.44 l 1.26,0.68 q -0.58,1.24 -1.74,1.96 -1.16,0.72 -2.64,0.72 z"
|
||||
id="path22645" />
|
||||
<path
|
||||
d="m 42.755897,228.18 q 0,2.18 -1.46,3.66 -1.46,1.46 -3.6,1.46 -2.14,0 -3.58,-1.46 -1.42,-1.46 -1.42,-3.64 0,-2.18 1.46,-3.64 1.46,-1.48 3.6,-1.48 2.14,0 3.56,1.46 1.44,1.46 1.44,3.64 z m -8.4,0.02 q 0,1.56 0.96,2.62 0.98,1.06 2.42,1.06 1.42,0 2.38,-1.06 0.98,-1.06 0.98,-2.64 0,-1.54 -0.98,-2.6 -0.98,-1.08 -2.4,-1.08 -1.42,0 -2.4,1.06 -0.96,1.06 -0.96,2.64 z"
|
||||
id="path22647" />
|
||||
<path
|
||||
d="m 44.435908,223.34 h 1.6 v 1.5 q 0.58,-0.84 1.46,-1.28 0.9,-0.46 2,-0.46 1.92,0 3,1.24 1.08,1.24 1.08,3.4 V 233 h -1.62 v -5.16 q 0,-1.52 -0.76,-2.4 -0.74,-0.88 -2.08,-0.88 -1.38,0 -2.22,0.94 -0.84,0.94 -0.84,2.44 V 233 h -1.62 z"
|
||||
id="path22649" />
|
||||
<path
|
||||
d="m 60.135892,233.3 q -2.22,0 -3.62,-1.4 -1.38,-1.42 -1.38,-3.7 0,-2.16 1.46,-3.64 1.46,-1.48 3.58,-1.48 1.54,0 2.62,0.72 1.1,0.7 1.66,2.04 l -1.38,0.66 q -0.46,-1 -1.2,-1.5 -0.72,-0.5 -1.72,-0.5 -1.42,0 -2.4,1.08 -0.96,1.06 -0.96,2.62 0,1.58 0.96,2.64 0.98,1.04 2.44,1.04 0.94,0 1.74,-0.5 0.82,-0.52 1.32,-1.44 l 1.26,0.68 q -0.58,1.24 -1.74,1.96 -1.16,0.72 -2.64,0.72 z"
|
||||
id="path22651" />
|
||||
<path
|
||||
d="m 74.43589,233 h -1.58 v -1.58 q -0.58,0.84 -1.58,1.32 -0.98,0.46 -2.12,0.46 -1.6,0 -2.6,-0.8 -1,-0.82 -1,-2.14 0,-1.46 1.12,-2.2 1.12,-0.76 3.32,-0.76 h 2.84 q -0.02,-1.36 -0.72,-2.1 -0.68,-0.74 -1.96,-0.74 -0.76,0 -1.5,0.32 -0.74,0.32 -1.46,0.98 l -0.86,-1.08 q 0.84,-0.8 1.84,-1.2 1,-0.42 2.14,-0.42 1.94,0 3.02,1.16 1.1,1.14 1.1,3.16 z m -1.6,-4.44 h -2.82 q -1.46,0 -2.16,0.4 -0.7,0.4 -0.7,1.26 0,0.72 0.6,1.2 0.62,0.48 1.58,0.48 1.44,0 2.42,-0.92 1,-0.94 1.08,-2.42 z"
|
||||
id="path22653" />
|
||||
<path
|
||||
d="m 76.875894,230.42 v -5.64 h -1.32 v -1.44 h 1.32 v -2.96 h 1.62 v 2.96 h 2.24 v 1.44 h -2.24 v 5.5 q 0,0.64 0.28,0.96 0.3,0.3 0.9,0.3 h 1.06 V 233 h -1.3 q -1.34,0 -1.96,-0.62 -0.6,-0.62 -0.6,-1.96 z"
|
||||
id="path22655" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 93 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3812efef32bd7f1bf40b130d5d522bc3df6aebd406bd1186699d214bca856722
|
||||
size 43721
|
||||
1117
docs/MO_DG/img/optimizations/groups.svg
Normal file
|
After Width: | Height: | Size: 139 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0e232c47e8500f42bd0e1f2b93f94f58e2d59caee149c687be3cdc3e8a5be59a
|
||||
size 18417
|
||||
1774
docs/MO_DG/img/optimizations/inception_v4.svg
Normal file
|
After Width: | Height: | Size: 170 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:92d36b9527a3e316cd9eb2b6f5054c312466df004e4aa9c3458e165330bc6561
|
||||
size 24157
|
||||
1696
docs/MO_DG/img/optimizations/resnet_269.svg
Normal file
|
After Width: | Height: | Size: 344 KiB |
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2adeca1e3512b9fe7b088a5412ce21592977a1f352a013735537ec92e895dc94
|
||||
size 15653
|
||||
2538
docs/MO_DG/img/optimizations/resnet_optimization.svg
Normal file
|
After Width: | Height: | Size: 486 KiB |
@@ -1,17 +1,53 @@
|
||||
# Getting Performance Numbers {#openvino_docs_MO_DG_Getting_Performance_Numbers}
|
||||
|
||||
This guide introduces things to notice and how to use the benchmark_app to get performance numbers. It also explains how the performance numbers are reflected through internal inference performance counters and execution graphs. In the last section, it includes information on using ITT and Intel® VTune™ Profiler to get performance insights.
|
||||
This guide explains how to use the benchmark_app to get performance numbers. It also explains how the performance numbers are reflected through internal inference performance counters and execution graphs. It also includes information on using ITT and Intel® VTune™ Profiler to get performance insights.
|
||||
|
||||
## Tip 1: Select Proper Set of Operations to Measure
|
||||
## Test performance with the benchmark_app
|
||||
|
||||
### Prerequisites
|
||||
|
||||
To run benchmarks, you need both OpenVINO developer tools and Runtime installed. Follow the [Installation guide](../../install_guides/installing-model-dev-tools.md) and make sure to install the latest general release package with support for frameworks of the models you want to test.
|
||||
|
||||
To test performance of your model, make sure you [prepare the model for use with OpenVINO](../../Documentation/model_introduction.md). For example, if you use [OpenVINO's automation tools](@ref omz_tools_downloader), these two lines of code will download the resnet-50-tf and convert it to OpenVINO IR.
|
||||
|
||||
```bash
|
||||
omz_downloader --name resnet-50-tf
|
||||
omz_converter --name resnet-50-tf
|
||||
```
|
||||
|
||||
### Running the benchmark application
|
||||
|
||||
For a detailed description, see the dedicated articles: [benchmark_app for C++](../../../samples/cpp/benchmark_app/README.md) and [benchmark_app for Python](../../../tools/benchmark_tool/README.md).
|
||||
|
||||
The benchmark_app includes a lot of device-specific options, but the primary usage is as simple as:
|
||||
|
||||
```bash
|
||||
benchmark_app -m <model> -d <device> -i <input>
|
||||
```
|
||||
|
||||
Each of the [OpenVINO supported devices](../../OV_Runtime_UG/supported_plugins/Supported_Devices.md) offers performance settings that contain command-line equivalents in the Benchmark app.
|
||||
|
||||
While these settings provide really low-level control for the optimal model performance on the _specific_ device, it is recommended to always start performance evaluation with the [OpenVINO High-Level Performance Hints](../../OV_Runtime_UG/performance_hints.md) first, like so:
|
||||
|
||||
```bash
|
||||
# for throughput prioritization
|
||||
benchmark_app -hint tput -m <model> -d <device>
|
||||
# for latency prioritization
|
||||
benchmark_app -hint latency -m <model> -d <device>
|
||||
```
|
||||
|
||||
## Additional benchmarking considerations
|
||||
|
||||
### 1 - Select a Proper Set of Operations to Measure
|
||||
|
||||
When evaluating performance of a model with OpenVINO Runtime, it is required to measure a proper set of operations.
|
||||
|
||||
When evaluating the performance of a model with OpenVINO Runtime, it is required to measure proper set of operations. Remember the following tips:
|
||||
- Avoid including one-time costs such as model loading.
|
||||
|
||||
- Track operations that occur outside OpenVINO Runtime (such as video decoding) separately.
|
||||
|
||||
> **NOTE**: Some image pre-processing can be baked into OpenVINO IR and accelerated accordingly. For more information, refer to [Embedding the Pre-processing](Additional_Optimizations.md) and [General Runtime Optimizations](../../optimization_guide/dldt_deployment_optimization_common.md).
|
||||
|
||||
## Tip 2: Try to Get Credible Data
|
||||
### 2 - Try to Get Credible Data
|
||||
|
||||
Performance conclusions should be build upon reproducible data. As for the performance measurements, they should be done with a large number of invocations of the same routine. Since the first iteration is almost always significantly slower than the subsequent ones, an aggregated value can be used for the execution time for final projections:
|
||||
|
||||
@@ -19,26 +55,8 @@ Performance conclusions should be build upon reproducible data. As for the perfo
|
||||
- If the time values range too much, consider geomean.
|
||||
- Be aware of the throttling and other power oddities. A device can exist in one of several different power states. When optimizing your model, consider fixing the device frequency for better performance data reproducibility. However, the end-to-end (application) benchmarking should also be performed under real operational conditions.
|
||||
|
||||
## Using benchmark_app to Measure Reference Performance Numbers
|
||||
|
||||
To get performance numbers, use the dedicated [OpenVINO Benchmark app](../../../samples/cpp/benchmark_app/README.md) sample, which is the most-recommended solution to produce performance reference.
|
||||
It includes a lot of device-specific knobs, but the primary usage is as simple as in the following command to measure the performance of the model on GPU:
|
||||
```bash
|
||||
$ ./benchmark_app –d GPU –m <model> -i <input>
|
||||
```
|
||||
to measure the performance of the model on the GPU.
|
||||
Or
|
||||
```bash
|
||||
$ ./benchmark_app –d CPU –m <model> -i <input>
|
||||
```
|
||||
to execute on the CPU instead.
|
||||
|
||||
Each of the [OpenVINO supported devices](../../OV_Runtime_UG/supported_plugins/Supported_Devices.md) offers performance settings that contain command-line equivalents in the [Benchmark app](../../../samples/cpp/benchmark_app/README.md).
|
||||
While these settings provide really low-level control and allow leveraging the optimal model performance on the _specific_ device, it is recommended to always start the performance evaluation with the [OpenVINO High-Level Performance Hints](../../OV_Runtime_UG/performance_hints.md) first:
|
||||
- benchmark_app **-hint tput** -d 'device' -m 'path to your model'
|
||||
- benchmark_app **-hint latency** -d 'device' -m 'path to your model'
|
||||
|
||||
## Notes for Comparing Performance with Native/Framework Code
|
||||
### 3 - Compare Performance with Native/Framework Code
|
||||
|
||||
When comparing the OpenVINO Runtime performance with the framework or another reference code, make sure that both versions are as similar as possible:
|
||||
|
||||
@@ -49,11 +67,12 @@ When comparing the OpenVINO Runtime performance with the framework or another re
|
||||
- When applicable, leverage the [Dynamic Shapes support](../../OV_Runtime_UG/ov_dynamic_shapes.md).
|
||||
- If possible, demand the same accuracy. For example, TensorFlow allows `FP16` execution, so when comparing to that, make sure to test the OpenVINO Runtime with the `FP16` as well.
|
||||
|
||||
## Data from Internal Inference Performance Counters and Execution Graphs <a name="performance-counters"></a>
|
||||
### Internal Inference Performance Counters and Execution Graphs <a name="performance-counters"></a>
|
||||
|
||||
More detailed insights into inference performance breakdown can be achieved with device-specific performance counters and/or execution graphs.
|
||||
Both [C++](../../../samples/cpp/benchmark_app/README.md) and [Python](../../../tools/benchmark_tool/README.md) versions of the `benchmark_app` support a `-pc` command-line parameter that outputs internal execution breakdown.
|
||||
|
||||
For example, the table shown below is the part of performance counters for quantized [TensorFlow implementation of ResNet-50](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-50-tf) model inference on [CPU Plugin](../../OV_Runtime_UG/supported_plugins/CPU.md).
|
||||
For example, the table shown below is part of performance counters for quantized [TensorFlow implementation of ResNet-50](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-50-tf) model inference on [CPU Plugin](../../OV_Runtime_UG/supported_plugins/CPU.md).
|
||||
Keep in mind that since the device is CPU, the `realTime` wall clock and the `cpu` time layers are the same. Information about layer precision is also stored in the performance counters.
|
||||
|
||||
| layerName | execStatus | layerType | execType | realTime (ms) | cpuTime (ms) |
|
||||
@@ -85,6 +104,6 @@ Especially when performance-debugging the [latency](../../optimization_guide/dld
|
||||
|
||||
Lastly, the performance statistics with both performance counters and execution graphs are averaged, so such data for the [inputs of dynamic shapes](../../OV_Runtime_UG/ov_dynamic_shapes.md) should be measured carefully, preferably by isolating the specific shape and executing multiple times in a loop, to gather the reliable data.
|
||||
|
||||
## Using ITT to Get Performance Insights
|
||||
### Use ITT to Get Performance Insights
|
||||
|
||||
In general, OpenVINO and its individual plugins are heavily instrumented with Intel® Instrumentation and Tracing Technology (ITT). Therefore, you can also compile OpenVINO from the source code with ITT enabled and use tools like [Intel® VTune™ Profiler](https://software.intel.com/en-us/vtune) to get detailed inference performance breakdown and additional insights in the application-level performance on the timeline view.
|
||||
|
||||
72
docs/MO_DG/prepare_model/MO_Python_API.md
Normal file
@@ -0,0 +1,72 @@
|
||||
## Model Optimizer Python API {#openvino_docs_MO_DG_Python_API}
|
||||
|
||||
Model Optimizer (MO) has a Python API for model conversion, which is represented by the `convert_model()` method in the openvino.tools.mo namespace.
|
||||
`convert_model()` has all the functionality available from the command-line tool.
|
||||
`convert_model()` returns an openvino.runtime.Model object which can be compiled and inferred or serialized to IR.
|
||||
|
||||
```sh
|
||||
from openvino.tools.mo import convert_model
|
||||
|
||||
ov_model = convert_model("resnet.onnx")
|
||||
```
|
||||
|
||||
`convert_model()` accepts all parameters available in the MO command-line tool. Parameters can be specified by Python classes or string analogs, similar to the command-line tool.
|
||||
Example 1:
|
||||
|
||||
```sh
|
||||
from openvino.runtime import PartialShape, Layout
|
||||
|
||||
ov_model = convert_model(model, input_shape=PartialShape([1,3,100,100]), mean_values=[127, 127, 127], layout=Layout("NCHW"))
|
||||
```
|
||||
|
||||
Example 2:
|
||||
|
||||
```sh
|
||||
ov_model = convert_model(model, input_shape="[1,3,100,100]", mean_values="[127,127,127]", layout="NCHW")
|
||||
```
|
||||
|
||||
Command-line flags, like `--compress_to_fp16`, can be set in the Python API by providing a boolean value (`True` or `False`).
|
||||
|
||||
```sh
|
||||
ov_model = convert_model(model, compress_to_fp16=True)
|
||||
```
|
||||
|
||||
The `input` parameter can be set by a `tuple` with a name, shape, and type. The input name of the type string is required in the tuple. The shape and type are optional.
|
||||
The shape can be a `list` or `tuple` of dimensions (`int` or `openvino.runtime.Dimension`), or `openvino.runtime.PartialShape`, or `openvino.runtime.Shape`. The type can be of numpy type or `openvino.runtime.Type`.
|
||||
|
||||
```sh
|
||||
ov_model = convert_model(model, input=("input_name", [3], np.float32))
|
||||
```
|
||||
|
||||
For complex cases, when a value needs to be set in the `input` parameter, the `InputCutInfo` class can be used. `InputCutInfo` accepts four parameters: `name`, `shape`, `type`, and `value`.
|
||||
|
||||
`InputCutInfo("input_name", [3], np.float32, [0.5, 2.1, 3.4])` is equivalent of `InputCutInfo(name="input_name", shape=[3], type=np.float32, value=[0.5, 2.1, 3.4])`.
|
||||
Supported types for `InputCutInfo`:
|
||||
- name: `string`.
|
||||
- shape: `list` or `tuple` of dimensions (`int` or `openvino.runtime.Dimension`), `openvino.runtime.PartialShape`,` openvino.runtime.Shape`.
|
||||
- type: `numpy type`, `openvino.runtime.Type`.
|
||||
- value: `numpy.ndarray`, `list` of numeric values, `bool`.
|
||||
|
||||
```sh
|
||||
from openvino.tools.mo import convert_model, InputCutInfo
|
||||
|
||||
ov_model = convert_model(model, input=InputCutInfo("input_name", [3], np.float32, [0.5, 2.1, 3.4]))
|
||||
```
|
||||
|
||||
`layout`, `source_layout` and `dest_layout` accept an `openvino.runtime.Layout` object or `string`.
|
||||
|
||||
```sh
|
||||
from openvino.runtime import Layout
|
||||
from openvino.tools.mo import convert_model
|
||||
|
||||
ov_model = convert_model(model, source_layout=Layout("NCHW"))
|
||||
```
|
||||
|
||||
To set both source and destination layouts in the `layout` parameter, the `LayoutMap` class can be used. `LayoutMap` accepts two parameters: `source_layout` and `target_layout`.
|
||||
`LayoutMap("NCHW", "NHWC")` is equivalent to `LayoutMap(source_layout="NCHW", target_layout="NHWC")`.
|
||||
|
||||
```sh
|
||||
from openvino.tools.mo import convert_model, LayoutMap
|
||||
|
||||
ov_model = convert_model(model, layout=LayoutMap("NCHW", "NHWC"))
|
||||
```
|
||||
@@ -26,7 +26,7 @@ This optimization method consists of three stages:
|
||||
|
||||
The picture below shows the depicted part of Caffe Resnet269 topology where `BatchNorm` and `ScaleShift` layers will be fused to `Convolution` layers.
|
||||
|
||||

|
||||

|
||||
|
||||
* * *
|
||||
|
||||
@@ -38,7 +38,7 @@ ResNet optimization is a specific optimization that applies to Caffe ResNet topo
|
||||
|
||||
In the picture below, you can see the original and optimized parts of a Caffe ResNet50 model. The main idea of this optimization is to move the stride that is greater than 1 from Convolution layers with the kernel size = 1 to upper Convolution layers. In addition, the Model Optimizer adds a Pooling layer to align the input shape for a Eltwise layer, if it was changed during the optimization.
|
||||
|
||||

|
||||

|
||||
|
||||
In this example, the stride from the `res3a_branch1` and `res3a_branch2a` Convolution layers moves to the `res2c_branch2b` Convolution layer. In addition, to align the input shape for `res2c` Eltwise, the optimization inserts the Pooling layer with kernel size = 1 and stride = 2.
|
||||
|
||||
@@ -48,7 +48,7 @@ In this example, the stride from the `res3a_branch1` and `res3a_branch2a` Convol
|
||||
|
||||
Grouped convolution fusing is a specific optimization that applies for TensorFlow topologies. The main idea of this optimization is to combine convolutions results for the `Split` outputs and then recombine them using `Concat` operation in the same order as they were out from `Split`.
|
||||
|
||||

|
||||

|
||||
|
||||
* * *
|
||||
|
||||
@@ -62,4 +62,4 @@ On the picture below you can see two visualized Intermediate Representations (IR
|
||||
The first one is original IR that will be produced by the Model Optimizer.
|
||||
The second one will be produced by the Model Optimizer with key `--finegrain_fusing InceptionV4/InceptionV4/Conv2d_1a_3x3/Conv2D`, where you can see that `Convolution` was not fused with `Mul1_3752` and `Mul1_4061/Fused_Mul_5096/FusedScaleShift_5987` operations.
|
||||
|
||||

|
||||

|
||||
@@ -670,64 +670,71 @@ paddlepaddle>=2.1
|
||||
|
||||
| Operator Name in PaddlePaddle| Limitations|
|
||||
| :----------| :----------|
|
||||
| adpative_pool2d | The `NHWC` data_layout is not supported. |
|
||||
| arg_max | The `int32` output data_type is not supported. |
|
||||
| adaptive_pool2d | The `NHWC` data_layout is not supported. |
|
||||
| assign | |
|
||||
| assign_value | |
|
||||
| batch_norm | |
|
||||
| bicubic_interp | |
|
||||
| bilinear_interp | `NCW`, `NWC`, `NHWC`, `NCDHW`, `NDHWC` data_layout are not supported. |
|
||||
| bilinear_interp_v2 | `NCW`, `NWC`, `NHWC`, `NCDHW`, `NDHWC` data_layout are not supported. |
|
||||
| bmm | |
|
||||
| box_coder | |
|
||||
| cast | |
|
||||
| ceil | |
|
||||
| clip | |
|
||||
| concat | |
|
||||
| conditional_block | |
|
||||
| conv2d | `NHWC` data_layout is not supported. |
|
||||
| conv2d_transpose | |
|
||||
| cumsum | |
|
||||
| deformable_conv | |
|
||||
| depthwise_conv2d | `NHWC` data_layout is not supported. |
|
||||
| depthwise_conv2d_transpose | |
|
||||
| dropout | |
|
||||
| elementwise_add | |
|
||||
| elementwise_div | |
|
||||
| elementwise_floordiv | |
|
||||
| elementwise_max | |
|
||||
| elementwise_min | |
|
||||
| elementwise_mod | |
|
||||
| elementwise_mul | |
|
||||
| elementwise_not_equal | |
|
||||
| elementwise_pow | |
|
||||
| elementwise_sub | |
|
||||
| equal | |
|
||||
| exp | |
|
||||
| expand | |
|
||||
| expand_v2 | |
|
||||
| fill_any_like | |
|
||||
| fill_constant | |
|
||||
| fill_constant_batch_size_like | |
|
||||
| fill_zeros_like | |
|
||||
| flatten_contiguous_range | |
|
||||
| floor | |
|
||||
| gather | |
|
||||
| gather_tree | |
|
||||
| gather_nd | |
|
||||
| gelu | |
|
||||
| generate_proposals_v2 | |
|
||||
| generate_proposals | |
|
||||
| greater_equal | |
|
||||
| greater_than | |
|
||||
| group_norm | |
|
||||
| hard_sigmoid | |
|
||||
| hard_swish | |
|
||||
| layer_norm | |
|
||||
| leaky_relu | |
|
||||
| less_than | |
|
||||
| linear_interp | |
|
||||
| log | |
|
||||
| logical_and | |
|
||||
| logical_not | |
|
||||
| logical_or | |
|
||||
| logical_xor | |
|
||||
| lookup_table_v2 | |
|
||||
| lookup_table | |
|
||||
| matmul | |
|
||||
| matmul_v2 | |
|
||||
| matrix_nms | Only supports IE CPU plugin with *"number of selected boxes"* static shape(e.g.: `min(min(num_boxes, nms_top_k) * num_classes_output, keep_top_k)`). |
|
||||
| matrix_nms | Only supports IE CPU plugin with "number of selected boxes" static shape (e.g.: `min(min(num_boxes, nms_top_k) * num_classes_output, keep_top_k)`). |
|
||||
| max_pool2d_with_index | |
|
||||
| meshgrid | |
|
||||
| mul | |
|
||||
| multiclass_nms3 | Only supports IE CPU plugin with *"number of selected boxes"* static shape(e.g.: `min(min(num_boxes, nms_top_k) * num_classes_output, keep_top_k)`). |
|
||||
| multiclass_nms | Only supports IE CPU plugin with "number of selected boxes" static shape (e.g.: `min(min(num_boxes, nms_top_k) * num_classes_output, keep_top_k)`). |
|
||||
| nearest_interp | `NCW`, `NWC`, `NHWC`, `NCDHW`, `NDHWC` data_layout are not supported. |
|
||||
| nearest_interp_v2 | `NCW`, `NWC`, `NHWC`, `NCDHW`, `NDHWC` data_layout are not supported. |
|
||||
| not_equal | |
|
||||
| p_norm | |
|
||||
| pad3d | `Circular` mode is not supported. |
|
||||
| pool2d | `NHWC` data_layout is not supported. |
|
||||
| pow | |
|
||||
@@ -739,11 +746,12 @@ paddlepaddle>=2.1
|
||||
| reduce_prod | |
|
||||
| reduce_sum | |
|
||||
| relu | |
|
||||
| relu6 | |
|
||||
| reshape2 | |
|
||||
| reshape | |
|
||||
| reverse | |
|
||||
| rnn | `SimpleRNN` and `GRU` modes are not supported. |
|
||||
| roi_align | |
|
||||
| scale | |
|
||||
| select_input | |
|
||||
| shape | |
|
||||
| sigmoid | |
|
||||
| slice | |
|
||||
@@ -751,14 +759,19 @@ paddlepaddle>=2.1
|
||||
| softplus | |
|
||||
| split | |
|
||||
| sqrt | |
|
||||
| squeeze2 | |
|
||||
| squeeze | |
|
||||
| stack | |
|
||||
| strided_slice | |
|
||||
| sum | |
|
||||
| swish | |
|
||||
| sync_batch_norm | |
|
||||
| tanh | |
|
||||
| tile | |
|
||||
| top_k | |
|
||||
| top_k_v2 | |
|
||||
| transpose2 | |
|
||||
| unsqueeze2 | |
|
||||
| transpose | |
|
||||
| trilinear_interp | |
|
||||
| unsqueeze | |
|
||||
| where | |
|
||||
| where_index | |
|
||||
| while | |
|
||||
| yolo_box | |
|
||||
|
||||
@@ -29,13 +29,13 @@ The input model is converted as a whole if neither `--input` nor `--output` comm
|
||||
|
||||
For Inception_V1, there is one `Placeholder`: input. If the model is viewed in TensorBoard, the input operation is easy to find:
|
||||
|
||||

|
||||

|
||||
|
||||
`Reshape` is the only output operation, which is enclosed in a nested name scope of `InceptionV1/Logits/Predictions`, under the full name of `InceptionV1/Logits/Predictions/Reshape_1`.
|
||||
|
||||
In TensorBoard, along with some of its predecessors, it looks as follows:
|
||||
|
||||

|
||||

|
||||
|
||||
Convert this model and put the results in a writable output directory:
|
||||
```sh
|
||||
@@ -90,7 +90,7 @@ The Intermediate Representations are identical for both conversions. The same is
|
||||
|
||||
Now, consider how to cut some parts of the model off. This chapter describes the first convolution block `InceptionV1/InceptionV1/Conv2d_1a_7x7` of the Inception V1 model to illustrate cutting:
|
||||
|
||||

|
||||

|
||||
|
||||
### Cutting at the End
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ There are two inputs in this network: boolean `phase_train` which manages state
|
||||
`batch_size` which is a part of batch joining pattern.
|
||||
|
||||
|
||||

|
||||

|
||||
|
||||
## Converting a TensorFlow FaceNet Model to the IR
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ where `rating/BiasAdd` is an output node.
|
||||
3. Convert the model to the OpenVINO format. If you look at your frozen model, you can see that
|
||||
it has one input that is split into four `ResourceGather` layers. (Click image to zoom in.)
|
||||
|
||||

|
||||

|
||||
|
||||
However, as the Model Optimizer does not support such data feeding, you should skip it. Cut
|
||||
the edges incoming in `ResourceGather` port 1:
|
||||
|
||||
@@ -62,7 +62,7 @@ lm_1b/
|
||||
```
|
||||
|
||||
|
||||

|
||||

|
||||
|
||||
The frozen model still has two variables: `Variable` and `Variable_1`.
|
||||
It means that the model keeps training those variables at each inference.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Deploying Your Applications with OpenVINO™ {#openvino_deployment_guide}
|
||||
# Deploy via OpenVINO Runtime {#openvino_deployment_guide}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
@@ -6,12 +6,17 @@
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
openvino_docs_install_guides_deployment_manager_tool
|
||||
openvino_docs_deploy_local_distribution
|
||||
Run Inference <openvino_docs_OV_UG_OV_Runtime_User_Guide>
|
||||
Optimize Inference <openvino_docs_deployment_optimization_guide_dldt_optimization_guide>
|
||||
Deploy Application with Deployment Manager <openvino_docs_install_guides_deployment_manager_tool>
|
||||
Local Distribution Libraries <openvino_docs_deploy_local_distribution>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
Once the [OpenVINO™ application development](../integrate_with_your_application.md) has been finished, application developers usually need to deploy their applications to end users. There are several ways to achieve that:
|
||||
> **NOTE**: Note that [running inference in OpenVINO Runtime](../openvino_intro.md) is the most basic form of deployment. Before moving forward, make sure you know how to create a proper Inference configuration and [develop your application properly](../integrate_with_your_application.md)
|
||||
|
||||
|
||||
## Local Deployment Options
|
||||
|
||||
- Set a dependency on the existing prebuilt packages, also called "centralized distribution":
|
||||
- using Debian / RPM packages - a recommended way for Linux operating systems;
|
||||
@@ -43,7 +48,8 @@ The granularity of OpenVINO packages may vary for different distribution types.
|
||||
|
||||
- The main library `openvino` is used by users' C++ applications to link against with. The library provides all OpenVINO Runtime public APIs, including both API 2.0 and the previous Inference Engine and nGraph APIs. For C language applications, `openvino_c` is additionally required for distribution.
|
||||
- The "optional" plugin libraries like `openvino_intel_cpu_plugin` (matching the `openvino_.+_plugin` pattern) are used to provide inference capabilities on specific devices or additional capabilities like [Hetero Execution](../hetero_execution.md) and [Multi-Device Execution](../multi_device.md).
|
||||
- The "optional" plugin libraries like `openvino_ir_frontend` (matching `openvino_.+_frontend`) are used to provide capabilities to read models of different file formats such as OpenVINO IR, ONNX, and PaddlePaddle.
|
||||
- The "optional" plugin libraries like `openvino_ir_frontend` (matching `openvino_.+_frontend`) are used to provide capabilities to read models of different file formats such as OpenVINO IR,
|
||||
TensorFlow (check [TensorFlow Frontend Capabilities and Limitations](../../resources/tensorflow_frontend.md)), ONNX, and PaddlePaddle.
|
||||
|
||||
Here the term "optional" means that if the application does not use the capability enabled by the plugin, the plugin library or a package with the plugin is not needed in the final distribution.
|
||||
|
||||
|
||||
@@ -120,12 +120,13 @@ The `HETERO`, `MULTI`, `BATCH` and `AUTO` execution modes can also be used expli
|
||||
|
||||
OpenVINO Runtime uses frontend libraries dynamically to read models in different formats:
|
||||
- `openvino_ir_frontend` is used to read OpenVINO IR.
|
||||
- `openvino_tensorflow_frontend` is used to read TensorFlow file format. Check [TensorFlow Frontend Capabilities and Limitations](../../resources/tensorflow_frontend.md).
|
||||
- `openvino_onnx_frontend` is used to read ONNX file format.
|
||||
- `openvino_paddle_frontend` is used to read Paddle file format.
|
||||
|
||||
Depending on the model format types that are used in the application in `ov::Core::read_model`, pick up the appropriate libraries.
|
||||
|
||||
> **NOTE**: To optimize the size of final distribution package, you are recommended to convert models to OpenVINO IR by using [Model Optimizer](../../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). This way you don't have to keep ONNX, PaddlePaddle, and other frontend libraries in the distribution package.
|
||||
> **NOTE**: To optimize the size of final distribution package, you are recommended to convert models to OpenVINO IR by using [Model Optimizer](../../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). This way you don't have to keep TensorFlow, ONNX, PaddlePaddle, and other frontend libraries in the distribution package.
|
||||
|
||||
### (Legacy) Preprocessing via G-API
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9976341ca931f3ab4e4fbccea26844b738adb27b091149a4c6231eda841ab867
|
||||
size 144541
|
||||
113
docs/OV_Runtime_UG/lowlatency2.md
Normal file
@@ -0,0 +1,113 @@
|
||||
# The LowLatencу2 Transformation {#openvino_docs_OV_UG_lowlatency2}
|
||||
|
||||
The LowLatency2 transformation changes the structure of the network containing [TensorIterator](../ops/infrastructure/TensorIterator_1.md) and [Loop](../ops/infrastructure/Loop_5.md) by adding the ability to work with the state, inserting the [Assign](../ops/infrastructure/Assign_3.md)/[ReadValue](../ops/infrastructure/ReadValue_3.md) layers as it is shown in the picture below.
|
||||
|
||||
## The Differences between the LowLatency and the LowLatency2:
|
||||
|
||||
* Unrolling of `TensorIterator`/`Loop` operations became a part of the LowLatency2, not a separate transformation. After invoking the transformation, the network can be serialized and inferred without re-invoking the transformation.
|
||||
* Support for `TensorIterator` and `Loop` operations with multiple iterations inside. The `TensorIterator`/`Loop` will not be unrolled in this case.
|
||||
* The "Parameters connected directly to ReadValues" limitation is resolved. To apply the previous version of the transformation in this case, additional manual manipulations were required. Now, the case is processed automatically.
|
||||
|
||||
## Example of Applying the Transformation:<a name="example-of-applying-lowlatency2-transformation"></a>
|
||||
|
||||

|
||||
|
||||
After applying the transformation, the `ReadValue` operations can receive other operations as an input, as shown in the picture above. These inputs should set the initial value for initialization of the `ReadValue` operations. However, such initialization is not supported in the current State API implementation. Input values are ignored and the initial values for the `ReadValue` operations are set to 0 unless otherwise specified by the user via [State API](@ref openvino-state-api).
|
||||
|
||||
## Steps to Apply LowLatency2
|
||||
|
||||
1. Get CNNNetwork. Either way is acceptable:
|
||||
* [from IR or ONNX model](./integrate_with_your_application.md)
|
||||
* [from ov::Model](../OV_Runtime_UG/model_representation.md)
|
||||
|
||||
|
||||
2. Change the number of iterations inside `TensorIterator`/`Loop` nodes in the network, using the [Reshape](ShapeInference.md) feature.
|
||||
|
||||
For example, when the `sequence_lengths` dimension of input of the network > 1, the `TensorIterator` layer has `number_iterations` > 1. You can reshape the inputs of the network to set `sequence_dimension` to 1.
|
||||
|
||||
```cpp
|
||||
|
||||
// Network before reshape: Parameter (name: X, shape: [2 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 2, axis = 0) -> ...
|
||||
|
||||
cnnNetwork.reshape({"X" : {1, 1, 16});
|
||||
|
||||
// Network after reshape: Parameter (name: X, shape: [1 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 1, axis = 0) -> ...
|
||||
|
||||
```
|
||||
**Unrolling**: If the LowLatency2 transformation is applied to a network containing `TensorIterator`/`Loop` nodes with exactly one iteration inside, these nodes are unrolled. Otherwise, the nodes remain as they are. For more details, see [the picture](#example-of-applying-lowlatency2-transformation) above.
|
||||
|
||||
3. Apply the LowLatency2 transformation.
|
||||
```cpp
|
||||
#include "ie_transformations.hpp"
|
||||
|
||||
...
|
||||
|
||||
InferenceEngine::lowLatency2(cnnNetwork); // 2nd argument 'use_const_initializer = true' by default
|
||||
```
|
||||
**Use_const_initializer argument**: By default, the LowLatency2 transformation inserts a constant subgraph of the same shape as the previous input node, and with 0 values as the initializing value for `ReadValue` nodes. (See the picture below.) Insertion of this subgraph can be disabled by passing the `false` value for the `use_const_initializer` argument.
|
||||
|
||||
```cpp
|
||||
InferenceEngine::lowLatency2(cnnNetwork, false);
|
||||
```
|
||||

|
||||
|
||||
**State naming rule**: A name of a state is a concatenation of names: original `TensorIterator` operation, parameter of the body, and additional suffix `variable_` + `id` (0-base indexing, new indexing for each `TensorIterator`). Use these rules to predict the name of the inserted state after the transformation is applied. For example:
|
||||
|
||||
```cpp
|
||||
// Precondition in ngraph::function.
|
||||
// Created TensorIterator and Parameter in body of TensorIterator with names
|
||||
std::string tensor_iterator_name = "TI_name"
|
||||
std::string body_parameter_name = "param_name"
|
||||
std::string idx = "0"; // it's a first variable in the network
|
||||
|
||||
// The State will be named "TI_name/param_name/variable_0"
|
||||
auto state_name = tensor_iterator_name + "//" + body_parameter_name + "//" + "variable_" + idx;
|
||||
|
||||
InferenceEngine::CNNNetwork cnnNetwork = InferenceEngine::CNNNetwork{function};
|
||||
InferenceEngine::lowLatency2(cnnNetwork);
|
||||
|
||||
InferenceEngine::ExecutableNetwork executableNetwork = core->LoadNetwork(/*cnnNetwork, targetDevice, configuration*/);
|
||||
|
||||
// Try to find the Variable by name
|
||||
auto states = executableNetwork.QueryState();
|
||||
for (auto& state : states) {
|
||||
auto name = state.GetName();
|
||||
if (name == state_name) {
|
||||
// some actions
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
4. Use state API. See the [OpenVINO state API](@ref openvino-state-api) and the [Example of stateful network inference](@ref example-of-stateful-network-inference) sections.
|
||||
|
||||
## Known Limitations
|
||||
1. Unable to execute the [Reshape](ShapeInference.md) feature to change the number iterations of `TensorIterator`/`Loop` layers to apply the transformation correctly.
|
||||
|
||||
The only way to change the number iterations of `TensorIterator`/`Loop` layer is to use the `Reshape` feature. However, networks can be non-reshapable. The most common reason is that the value of shapes is hardcoded in a constant somewhere in the network.
|
||||
|
||||

|
||||
|
||||
**Current solution:**
|
||||
|
||||
* Trim non-reshapable layers via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` and `--output` parameters. For example, the parameter and the problematic constant in the picture above can be trimmed using the `--input Reshape_layer_name` command-line option.
|
||||
The problematic constant can also be replaced using ngraph, as shown in the example below.
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace a Constant with hardcoded values of shapes in the network with another one with the new values.
|
||||
// Assume we know which Constant (const_with_hardcoded_shape) prevents the reshape from being applied.
|
||||
// Then we can find this Constant by name on the network and replace it with a new one with the correct shape.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with a correct shape.
|
||||
// For the example shown in the picture above, the new values of the Constant should be 1, 1, 10 instead of 1, 49, 10
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, value_with_correct_shape*/ );
|
||||
for (const auto& node : func->get_ops()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (node->get_friendly_name() == "name_of_non_reshapable_const") {
|
||||
auto const_with_hardcoded_shape = std::dynamic_pointer_cast<ngraph::opset6::Constant>(node);
|
||||
// Replacing the problematic Constant with a new one. Do this for all the problematic Constants in the network, then
|
||||
// you can apply the reshape feature.
|
||||
ngraph::replace_node(const_with_hardcoded_shape, new_const);
|
||||
}
|
||||
}
|
||||
```
|
||||
125
docs/OV_Runtime_UG/lowlatency_deprecated.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# [DEPRECATED] The LowLatency Transformation {#openvino_docs_OV_UG_lowlatency_deprecated}
|
||||
|
||||
The deprecated LowLatency transformation changes the structure of the network containing [TensorIterator](../ops/infrastructure/TensorIterator_1.md) and [Loop](../ops/infrastructure/Loop_5.md) operations by adding the ability to work with the state, inserting the [Assign](../ops/infrastructure/Assign_3.md)/[ReadValue](../ops/infrastructure/ReadValue_3.md) layers, as shown in the picture below.
|
||||
|
||||

|
||||
|
||||
After applying the transformation, `ReadValue` operations can receive other operations as an input, as shown in the picture above. These inputs should set the initial value for initialization of `ReadValue` operations. However, such initialization is not supported in the current State API implementation. Input values are ignored and the initial values for the `ReadValue` operations are set to 0 unless otherwise specified by the user via [State API](@ref openvino-state-api).
|
||||
|
||||
## Steps to Apply LowLatency
|
||||
|
||||
1. Get CNNNetwork. Either way is acceptable:
|
||||
|
||||
* [from IR or ONNX model](./integrate_with_your_application.md)
|
||||
* [from ov::Model](../OV_Runtime_UG/model_representation.md)
|
||||
|
||||
2. [Reshape](ShapeInference.md) the CNNNetwork network if necessary.
|
||||
|
||||
An example of such a **necessary case** is when the `sequence_lengths` dimension of input > 1, and it means that `TensorIterator` layer will have `number_iterations` > 1. The inputs of the network should be reshaped to set `sequence_dimension` to exactly 1.
|
||||
|
||||
Usually, the following exception, which occurs after applying a transform when trying to infer the network in a plugin, indicates the need to apply the reshape feature:
|
||||
`C++ exception with description "Function is incorrect. The Assign and ReadValue operations must be used in pairs in the network."`
|
||||
This means that there are several pairs of `Assign`/`ReadValue` operations with the same `variable_id` in the network and operations were inserted into each iteration of the `TensorIterator`.
|
||||
|
||||
```cpp
|
||||
|
||||
// Network before reshape: Parameter (name: X, shape: [2 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 2, axis = 0) -> ...
|
||||
|
||||
cnnNetwork.reshape({"X" : {1, 1, 16});
|
||||
|
||||
// Network after reshape: Parameter (name: X, shape: [1 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 1, axis = 0) -> ...
|
||||
|
||||
```
|
||||
|
||||
3. Apply the LowLatency transformation.
|
||||
```cpp
|
||||
#include "ie_transformations.hpp"
|
||||
|
||||
...
|
||||
|
||||
InferenceEngine::LowLatency(cnnNetwork);
|
||||
```
|
||||
**State naming rule**: A name of a state is a concatenation of names: original `TensorIterator` operation, parameter of the body, and additional suffix `variable_` + `id` (0-base indexing, new indexing for each `TensorIterator`). Use these rules to predict the name of the inserted state after the transformation is applied. For example:
|
||||
|
||||
```cpp
|
||||
// Precondition in ngraph::function.
|
||||
// Created TensorIterator and Parameter in body of TensorIterator with names
|
||||
std::string tensor_iterator_name = "TI_name"
|
||||
std::string body_parameter_name = "param_name"
|
||||
std::string idx = "0"; // it's a first variable in the network
|
||||
|
||||
// The State will be named "TI_name/param_name/variable_0"
|
||||
auto state_name = tensor_iterator_name + "//" + body_parameter_name + "//" + "variable_" + idx;
|
||||
|
||||
InferenceEngine::CNNNetwork cnnNetwork = InferenceEngine::CNNNetwork{function};
|
||||
InferenceEngine::LowLatency(cnnNetwork);
|
||||
|
||||
InferenceEngine::ExecutableNetwork executableNetwork = core->LoadNetwork(/*cnnNetwork, targetDevice, configuration*/);
|
||||
|
||||
// Try to find the Variable by name
|
||||
auto states = executableNetwork.QueryState();
|
||||
for (auto& state : states) {
|
||||
auto name = state.GetName();
|
||||
if (name == state_name) {
|
||||
// some actions
|
||||
}
|
||||
}
|
||||
```
|
||||
4. Use state API. See the [OpenVINO state API](@ref openvino-state-api) and the [Example of stateful network inference](@ref example-of-stateful-network-inference) sections.
|
||||
|
||||
|
||||
## Known Limitations for the LowLatency
|
||||
1. Parameters connected directly to `ReadValues` (states) after the transformation is applied are not allowed.
|
||||
|
||||
Unnecessary parameters may remain on the graph after applying the transformation. The automatic handling of this case inside the transformation is currently not possible. Such parameters should be removed manually from `ngraph::Function` or replaced with a constant.
|
||||
|
||||

|
||||
|
||||
**Current solutions:**
|
||||
* Replace a parameter with a constant (freeze) with the `[0, 0, 0 … 0]` value via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` or `--freeze_placeholder_with_value` parameters.
|
||||
* Use nGraph API to replace a parameter with a constant, as shown in the example below:
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace Parameter with Constant.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with zero values.
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, std::vector with zeros*/ );
|
||||
for (const auto& param : func->get_parameters()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (param->get_friendly_name() == "param_name") {
|
||||
// Replacing the problematic Param with a Constant.
|
||||
ngraph::replace_node(param, new_const);
|
||||
// Removing problematic Parameter from ngraph::function
|
||||
func->remove_parameter(param);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Unable to execute reshape precondition to apply the transformation correctly.
|
||||
|
||||
Networks can be non-reshapable. The most common reason is that the value of shapes is hardcoded in the constant somewhere in the network.
|
||||
|
||||

|
||||
|
||||
**Current solutions:**
|
||||
* Trim non-reshapable layers via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` and `--output` parameters. For example, the parameter and the problematic constant (as shown in the picture above) can be trimmed using the `--input Reshape_layer_name` command-line option.
|
||||
* Use nGraph API to replace the problematic constant, as shown in the example below:
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace a Constant with hardcoded values of shapes in the network with another one with the new values.
|
||||
// Assume we know which Constant (const_with_hardcoded_shape) prevents the reshape from being applied.
|
||||
// Then we can find this Constant by name on the network and replace it with a new one with the correct shape.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with a correct shape.
|
||||
// For the example shown in the picture above, the new values of the Constant should be 1, 1, 10 instead of 1, 49, 10
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, value_with_correct_shape*/ );
|
||||
for (const auto& node : func->get_ops()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (node->get_friendly_name() == "name_of_non_reshapable_const") {
|
||||
auto const_with_hardcoded_shape = std::dynamic_pointer_cast<ngraph::opset6::Constant>(node);
|
||||
// Replacing the problematic Constant with a new one. Do this for all the problematic Constants in the network, then
|
||||
// you can apply the reshape feature.
|
||||
ngraph::replace_node(const_with_hardcoded_shape, new_const);
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -65,7 +65,7 @@ To understand the differences between Inference Engine API and API 2.0, see the
|
||||
- Inference Engine API does not support compiling of models with dynamic input shapes.
|
||||
- **New behavior** implemented in 2022.1 assumes full model alignment with the framework:
|
||||
- Model Optimizer preserves input element types and order of dimensions (layouts), and stores tensor names from the original models.
|
||||
- OpenVINO Runtime 2022.1 reads models in any format (OpenVINO IR v10, OpenVINO IR v11, ONNX, PaddlePaddle, etc.).
|
||||
- OpenVINO Runtime 2022.1 reads models in any format (OpenVINO IR v10, OpenVINO IR v11, TensorFlow (check [TensorFlow Frontend Capabilities and Limitations](../../resources/tensorflow_frontend.md)), ONNX, PaddlePaddle, etc.).
|
||||
- API 2.0 uses tensor names for addressing, which is the standard approach among the compatible model frameworks.
|
||||
- API 2.0 can also address input and output tensors by the index. Some model formats like ONNX are sensitive to the input and output order, which is preserved by OpenVINO 2022.1.
|
||||
|
||||
|
||||
@@ -1,153 +1,166 @@
|
||||
# Stateful models {#openvino_docs_OV_UG_network_state_intro}
|
||||
|
||||
This article describes how to work with stateful networks in OpenVINO™ toolkit. More specifically, it illustrates how stateful networks are represented in IR and nGraph
|
||||
and how operations with a state can be done. The article additionally provides some examples of stateful networks and code to infer them.
|
||||
@sphinxdirective
|
||||
|
||||
## What is a Stateful Network?
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
Several use cases require processing of data sequences. When length of a sequence is known and small enough,
|
||||
it can be processed with RNN like networks that contain a cycle inside. However, in some cases, like online speech recognition of time series
|
||||
forecasting, length of data sequence is unknown. Then, data can be divided in small portions and processed step-by-step. The dependency
|
||||
between data portions should be addressed. For that, networks save some data between inferences - a state. When one dependent sequence is over,
|
||||
a state should be reset to initial value and a new sequence can be started.
|
||||
|
||||
Several frameworks have special APIs for states in networks. For example, Keras has special option for RNNs, i.e. `stateful` that turns on saving a state
|
||||
between inferences. Kaldi contains special `Offset` specifier to define time offset in a network.
|
||||
|
||||
OpenVINO also contains a special API to simplify work with networks with states. A state is automatically saved between inferences,
|
||||
and there is a way to reset a state when needed. A state can also be read or set to some new value between inferences.
|
||||
openvino_docs_OV_UG_lowlatency2
|
||||
openvino_docs_OV_UG_lowlatency_deprecated
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
Several use cases require processing of data sequences. When length of a sequence is known and small enough,
|
||||
it can be processed with RNN like networks that contain a cycle inside. However, in some cases (e.g., online speech recognition of time series
|
||||
forecasting) length of data sequence is unknown. Then, data can be divided in small portions and processed step-by-step. The dependency
|
||||
between data portions should be addressed. For that, networks save some data between inferences - a state. When one dependent sequence is over,
|
||||
a state should be reset to initial value and a new sequence can be started.
|
||||
|
||||
Several frameworks have special APIs for states in networks. For example, Keras has `stateful` - a special option for RNNs, that turns on saving a state between inferences. Kaldi contains special `Offset` specifier to define time offset in a network.
|
||||
|
||||
OpenVINO also contains a special API to simplify work with networks with states. A state is automatically saved between inferences,
|
||||
and there is a way to reset a state when needed. A state can also be read or set to some new value between inferences.
|
||||
|
||||
## OpenVINO State Representation
|
||||
|
||||
OpenVINO contains the `Variable`, a special abstraction to represent a state in a network. There are two operations to work with a state:
|
||||
* `Assign` - to save a value in a state.
|
||||
* `ReadValue` - to read a value saved on previous iteration.
|
||||
OpenVINO contains the `Variable`, a special abstraction to represent a state in a network. There are two operations: [Assign](../ops/infrastructure/Assign_3.md) - to save a value in a state and [ReadValue](../ops/infrastructure/ReadValue_3.md) - to read a value saved on previous iteration.
|
||||
|
||||
To get a model with states ready for inference, convert a model from another framework to OpenVINO IR with Model Optimizer or create an nGraph function.
|
||||
(For more information, refer to the [Build OpenVINO Model section](../OV_Runtime_UG/model_representation.md)).
|
||||
|
||||
For more details on these operations, refer to the [ReadValue specification](../ops/infrastructure/ReadValue_3.md) and
|
||||
[Assign specification](../ops/infrastructure/Assign_3.md) articles.
|
||||
Below is the graph in both forms:
|
||||
|
||||
## Examples of Networks with States
|
||||
@sphinxdirective
|
||||
|
||||
To get a model with states ready for inference, convert a model from another framework to IR with Model Optimizer or create an nGraph function. (For more information,
|
||||
refer to the [Build OpenVINO Model section](../OV_Runtime_UG/model_representation.md)). Below is the graph in both forms:
|
||||
.. image:: _static/images/state_network_example.svg
|
||||
:scale: 80 %
|
||||
|
||||
![state_network_example]
|
||||
@endsphinxdirective
|
||||
|
||||
### Example of IR with State
|
||||
|
||||
The `bin` file for this graph should contain `float 0` in binary form. The content of the `xml` file is as follows.
|
||||
|
||||
```xml
|
||||
<?xml version="1.0" ?>
|
||||
<net name="summator" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="init_value" type="Const" version="opset6">
|
||||
<data element_type="f32" offset="0" shape="1,1" size="4"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="read" type="ReadValue" version="opset6">
|
||||
<data variable_id="id"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="input" type="Parameter" version="opset6">
|
||||
<data element_type="f32" shape="1,1"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="add_sum" type="Add" version="opset6">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="save" type="Assign" version="opset6">
|
||||
<data variable_id="id"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
<layer id="10" name="add" type="Add" version="opset6">
|
||||
<data axis="1"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="output/sink_port_0" type="Result" version="opset6">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="1" to-layer="1" to-port="0"/>
|
||||
<edge from-layer="2" from-port="0" to-layer="3" to-port="1"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="3" to-port="0"/>
|
||||
<edge from-layer="3" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="2" to-layer="10" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="10" to-port="1"/>
|
||||
<edge from-layer="10" from-port="2" to-layer="5" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
</net>
|
||||
```
|
||||
@sphinxdirective
|
||||
|
||||
.. dropdown:: Click to see the XML file.
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<net name="summator" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="init_value" type="Const" version="opset6">
|
||||
<data element_type="f32" offset="0" shape="1,1" size="4"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="read" type="ReadValue" version="opset6">
|
||||
<data variable_id="id"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="input" type="Parameter" version="opset6">
|
||||
<data element_type="f32" shape="1,1"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="add_sum" type="Add" version="opset6">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="save" type="Assign" version="opset6">
|
||||
<data variable_id="id"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
<layer id="10" name="add" type="Add" version="opset6">
|
||||
<data axis="1"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="output/sink_port_0" type="Result" version="opset6">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="1" to-layer="1" to-port="0"/>
|
||||
<edge from-layer="2" from-port="0" to-layer="3" to-port="1"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="3" to-port="0"/>
|
||||
<edge from-layer="3" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="2" to-layer="10" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="10" to-port="1"/>
|
||||
<edge from-layer="10" from-port="2" to-layer="5" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
</net>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
### Example of Creating Model nGraph API
|
||||
|
||||
In the following example, the `SinkVector` is used to create the `ngraph::Function`. For a network with states, except inputs and outputs, the `Assign` nodes should also point to the `Function` to avoid deleting it during graph transformations. Use the constructor to do it, as shown in the example, or with the special `add_sinks(const SinkVector& sinks)` method. After deleting the node from the graph with the `delete_sink()` method, a sink can be deleted from `ngraph::Function`.
|
||||
|
||||
```cpp
|
||||
#include <ngraph/opsets/opset6.hpp>
|
||||
#include <ngraph/op/util/variable.hpp>
|
||||
@@ -172,21 +185,17 @@ The `bin` file for this graph should contain `float 0` in binary form. The conte
|
||||
auto f = make_shared<Function>(ResultVector({res}), ParameterVector({arg}), SinkVector({assign}));
|
||||
```
|
||||
|
||||
In this example, the `SinkVector` is used to create the `ngraph::Function`. For a network with states, except inputs and outputs, the `Assign` nodes should also point to the `Function` to avoid deleting it during graph transformations. Use the constructor to do it, as shown in the example, or with the special `add_sinks(const SinkVector& sinks)` method. After deleting the node from the graph with the `delete_sink()` method, a sink can be deleted from `ngraph::Function`.
|
||||
|
||||
@anchor openvino-state-api
|
||||
## OpenVINO State API
|
||||
|
||||
Inference Engine has the `InferRequest::QueryState` method to get the list of states from a network and `IVariableState` interface to operate with states. Below is a brief description of methods and the example of how to use this interface.
|
||||
|
||||
* `std::string GetName() const` -
|
||||
returns the name (variable_id) of a corresponding Variable.
|
||||
* `void Reset()` -
|
||||
resets a state to a default value.
|
||||
* `void SetState(Blob::Ptr newState)` -
|
||||
sets a new value for a state.
|
||||
* `Blob::CPtr GetState() const` -
|
||||
returns current value of state.
|
||||
Inference Engine has the `InferRequest::QueryState` method to get the list of states from a network and `IVariableState` interface to operate with states. Below is a brief description of methods and the example of how to use this interface.
|
||||
|
||||
* `std::string GetName() const` - returns the name (variable_id) of a corresponding Variable.
|
||||
* `void Reset()` - resets a state to a default value.
|
||||
* `void SetState(Blob::Ptr newState)` - sets a new value for a state.
|
||||
* `Blob::CPtr GetState() const` - returns current value of state.
|
||||
|
||||
@anchor example-of-stateful-network-inference
|
||||
## Example of Stateful Network Inference
|
||||
|
||||
Based on the IR from the previous section, the example below demonstrates inference of two independent sequences of data. A state should be reset between these sequences.
|
||||
@@ -195,266 +204,27 @@ One infer request and one thread will be used in this example. Using several thr
|
||||
|
||||
@snippet openvino/docs/snippets/InferenceEngine_network_with_state_infer.cpp part1
|
||||
|
||||
More elaborate examples demonstrating how to work with networks with states can be found in a speech sample and a demo.
|
||||
Refer to the [Samples Overview](./Samples_Overview.md).
|
||||
|
||||
[state_network_example]: ./img/state_network_example.png
|
||||
|
||||
For more elaborate examples demonstrating how to work with networks with states, refer to the speech sample and a demo in the [Samples Overview](./Samples_Overview.md).
|
||||
|
||||
## LowLatency Transformations
|
||||
|
||||
If the original framework does not have a special API for working with states, after importing the model, OpenVINO representation will not contain `Assign`/`ReadValue` layers. For example, if the original ONNX model contains RNN operations, IR will contain `TensorIterator` operations and the values will be obtained only after execution of the whole `TensorIterator` primitive. Intermediate values from each iteration will not be available. Working with these intermediate values of each iteration is enabled by special LowLatency and LowLatency2 transformations, which also help receive these values with a low latency after each infer request.
|
||||
If the original framework does not have a special API for working with states, OpenVINO representation will not contain `Assign`/`ReadValue` layers after importing the model. For example, if the original ONNX model contains RNN operations, OpenVINO IR will contain [TensorIterator](../ops/infrastructure/TensorIterator_1.md) operations and the values will be obtained only after execution of the whole `TensorIterator` primitive. Intermediate values from each iteration will not be available. Working with these intermediate values of each iteration is enabled by special [LowLatency](lowlatency_deprecated.md) and [LowLatency2](lowlatency2.md) transformations, which also help receive these values with a low latency after each infer request.
|
||||
|
||||
### How to Get TensorIterator/Loop operations from Different Frameworks via Model Optimizer.
|
||||
> **NOTE**: It is recommended to use LowLatency2, as LowLatency transformation has already been deprecated.
|
||||
|
||||
**ONNX and frameworks supported via ONNX format:** `LSTM`, `RNN`, and `GRU` original layers are converted to the `TensorIterator` operation. The `TensorIterator` body contains `LSTM`/`RNN`/`GRU Cell`. The `Peepholes` and `InputForget` modifications are not supported, while the `sequence_lengths` optional input is.
|
||||
`ONNX Loop` layer is converted to the OpenVINO Loop operation.
|
||||
### TensorIterator/Loop operations
|
||||
|
||||
**Apache MXNet:** `LSTM`, `RNN`, `GRU` original layers are converted to `TensorIterator` operation. The `TensorIterator` body contains `LSTM`/`RNN`/`GRU Cell` operations.
|
||||
You can get the TensorIterator/Loop operations from different frameworks via Model Optimizer.
|
||||
|
||||
**TensorFlow:** The `BlockLSTM` is converted to `TensorIterator` operation. The `TensorIterator` body contains `LSTM Cell` operation, whereas `Peepholes` and `InputForget` modifications are not supported.
|
||||
The `While` layer is converted to `TensorIterator`. The `TensorIterator` body can contain any supported operations. However, when count of iterations cannot be calculated in shape inference (Model Optimizer conversion) time, the dynamic cases are not supported.
|
||||
* **ONNX and frameworks supported via ONNX format** - `LSTM`, `RNN`, and `GRU` original layers are converted to the `TensorIterator` operation. The `TensorIterator`
|
||||
body contains `LSTM`/`RNN`/`GRU Cell`. The `Peepholes` and `InputForget` modifications are not supported, while the `sequence_lengths` optional input is.
|
||||
`ONNX Loop` layer is converted to the OpenVINO [Loop](../ops/infrastructure/Loop_5.md) operation.
|
||||
|
||||
**TensorFlow2:** The `While` layer is converted to `Loop` operation. The `Loop` body can contain any supported operations.
|
||||
* **Apache MXNet** - `LSTM`, `RNN`, `GRU` original layers are converted to `TensorIterator` operation, which body contains `LSTM`/`RNN`/`GRU Cell` operations.
|
||||
|
||||
**Kaldi:** Kaldi models already contain `Assign`/`ReadValue` (Memory) operations after model conversion. The `TensorIterator`/`Loop` operations are not generated.
|
||||
* **TensorFlow** - `BlockLSTM` is converted to `TensorIterator` operation. The `TensorIterator` body contains `LSTM Cell` operation, whereas `Peepholes` and `InputForget` modifications are not supported.
|
||||
The `While` layer is converted to `TensorIterator`, which body can contain any supported operations. However, when count of iterations cannot be calculated in shape inference (Model Optimizer conversion) time, the dynamic cases are not supported.
|
||||
|
||||
## The LowLatencу2 Transformation
|
||||
* **TensorFlow2** - `While` layer is converted to `Loop` operation, which body can contain any supported operations.
|
||||
|
||||
The LowLatency2 transformation changes the structure of the network containing [TensorIterator](../ops/infrastructure/TensorIterator_1.md) and [Loop](../ops/infrastructure/Loop_5.md) by adding the ability to work with the state, inserting the `Assign`/`ReadValue` layers as it is shown in the picture below.
|
||||
|
||||
### The Differences between the LowLatency and the LowLatency2**:
|
||||
|
||||
* Unrolling of `TensorIterator`/`Loop` operations became a part of the LowLatency2, not a separate transformation. After invoking the transformation, the network can be serialized and inferred without re-invoking the transformation.
|
||||
* Support for `TensorIterator` and `Loop` operations with multiple iterations inside. The `TensorIterator`/`Loop` will not be unrolled in this case.
|
||||
* The "Parameters connected directly to ReadValues" limitation is resolved. To apply the previous version of the transformation in this case, additional manual manipulations were required. Now, the case is processed automatically.
|
||||
|
||||
#### Example of Applying the LowLatency2 Transformation:
|
||||
|
||||
<a name="example-of-applying-lowlatency2-transformation"></a>
|
||||
|
||||

|
||||
|
||||
After applying the transformation, the `ReadValue` operations can receive other operations as an input, as shown in the picture above. These inputs should set the initial value for initialization of the `ReadValue` operations. However, such initialization is not supported in the current State API implementation. Input values are ignored and the initial values for the `ReadValue` operations are set to 0 unless otherwise specified by the user via [State API](#openvino-state-api).
|
||||
|
||||
### Steps to Apply the LowLatency2 Transformation
|
||||
|
||||
1. Get CNNNetwork. Either way is acceptable:
|
||||
|
||||
* [from IR or ONNX model](./integrate_with_your_application.md)
|
||||
* [from ov::Model](../OV_Runtime_UG/model_representation.md)
|
||||
|
||||
2. Change the number of iterations inside `TensorIterator`/`Loop` nodes in the network, using the [Reshape](ShapeInference.md) feature.
|
||||
|
||||
For example, when the `sequence_lengths` dimension of input of the network > 1, the `TensorIterator` layer has `number_iterations` > 1. You can reshape the inputs of the network to set `sequence_dimension` to 1.
|
||||
|
||||
```cpp
|
||||
|
||||
// Network before reshape: Parameter (name: X, shape: [2 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 2, axis = 0) -> ...
|
||||
|
||||
cnnNetwork.reshape({"X" : {1, 1, 16});
|
||||
|
||||
// Network after reshape: Parameter (name: X, shape: [1 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 1, axis = 0) -> ...
|
||||
|
||||
```
|
||||
**Unrolling**: If the LowLatency2 transformation is applied to a network containing `TensorIterator`/`Loop` nodes with exactly one iteration inside, these nodes are unrolled. Otherwise, the nodes remain as they are. For more details, see [the picture](#example-of-applying-lowlatency2-transformation) above.
|
||||
|
||||
3. Apply the LowLatency2 transformation.
|
||||
```cpp
|
||||
#include "ie_transformations.hpp"
|
||||
|
||||
...
|
||||
|
||||
InferenceEngine::lowLatency2(cnnNetwork); // 2nd argument 'use_const_initializer = true' by default
|
||||
```
|
||||
**Use_const_initializer argument**
|
||||
|
||||
By default, the LowLatency2 transformation inserts a constant subgraph of the same shape as the previous input node, and with 0 values as the initializing value for `ReadValue` nodes. (See the picture below.) Insertion of this subgraph can be disabled by passing the `false` value for the `use_const_initializer` argument.
|
||||
|
||||
```cpp
|
||||
InferenceEngine::lowLatency2(cnnNetwork, false);
|
||||
```
|
||||
|
||||

|
||||
|
||||
**State naming rule:** A name of a state is a concatenation of names: original `TensorIterator` operation, parameter of the body, and additional suffix `variable_` + `id` (0-base indexing, new indexing for each `TensorIterator`). Use these rules to predict the name of the inserted state after the transformation is applied. For example:
|
||||
|
||||
```cpp
|
||||
// Precondition in ngraph::function.
|
||||
// Created TensorIterator and Parameter in body of TensorIterator with names
|
||||
std::string tensor_iterator_name = "TI_name"
|
||||
std::string body_parameter_name = "param_name"
|
||||
std::string idx = "0"; // it's a first variable in the network
|
||||
|
||||
// The State will be named "TI_name/param_name/variable_0"
|
||||
auto state_name = tensor_iterator_name + "//" + body_parameter_name + "//" + "variable_" + idx;
|
||||
|
||||
InferenceEngine::CNNNetwork cnnNetwork = InferenceEngine::CNNNetwork{function};
|
||||
InferenceEngine::lowLatency2(cnnNetwork);
|
||||
|
||||
InferenceEngine::ExecutableNetwork executableNetwork = core->LoadNetwork(/*cnnNetwork, targetDevice, configuration*/);
|
||||
|
||||
// Try to find the Variable by name
|
||||
auto states = executableNetwork.QueryState();
|
||||
for (auto& state : states) {
|
||||
auto name = state.GetName();
|
||||
if (name == state_name) {
|
||||
// some actions
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Use state API. See the [OpenVINO state API](#openvino-state-api) and the [Example of stateful network inference](#example-of-stateful-network-inference) sections.
|
||||
|
||||
### Known Limitations
|
||||
1. Unable to execute the [Reshape](ShapeInference.md) feature to change the number iterations of `TensorIterator`/`Loop` layers to apply the transformation correctly.
|
||||
|
||||
The only way to change the number iterations of `TensorIterator`/`Loop` layer is to use the `Reshape` feature. However, networks can be non-reshapable. The most common reason is that the value of shapes is hardcoded in a constant somewhere in the network.
|
||||
|
||||

|
||||
|
||||
**Current solution:** Trim non-reshapable layers via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` and `--output` parameters. For example, the parameter and the problematic constant in the picture above can be trimmed using the `--input Reshape_layer_name` command-line option.
|
||||
The problematic constant can also be replaced using ngraph, as shown in the example below.
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace a Constant with hardcoded values of shapes in the network with another one with the new values.
|
||||
// Assume we know which Constant (const_with_hardcoded_shape) prevents the reshape from being applied.
|
||||
// Then we can find this Constant by name on the network and replace it with a new one with the correct shape.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with a correct shape.
|
||||
// For the example shown in the picture above, the new values of the Constant should be 1, 1, 10 instead of 1, 49, 10
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, value_with_correct_shape*/ );
|
||||
for (const auto& node : func->get_ops()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (node->get_friendly_name() == "name_of_non_reshapable_const") {
|
||||
auto const_with_hardcoded_shape = std::dynamic_pointer_cast<ngraph::opset6::Constant>(node);
|
||||
// Replacing the problematic Constant with a new one. Do this for all the problematic Constants in the network, then
|
||||
// you can apply the reshape feature.
|
||||
ngraph::replace_node(const_with_hardcoded_shape, new_const);
|
||||
}
|
||||
}
|
||||
```
|
||||
## [DEPRECATED] The LowLatency Transformation
|
||||
|
||||
The LowLatency transformation changes the structure of the network containing [TensorIterator](../ops/infrastructure/TensorIterator_1.md) and [Loop](../ops/infrastructure/Loop_5.md) operations by adding the ability to work with the state, inserting the `Assign`/`ReadValue` layers, as shown in the picture below.
|
||||
|
||||

|
||||
|
||||
After applying the transformation, `ReadValue` operations can receive other operations as an input, as shown in the picture above. These inputs should set the initial value for initialization of `ReadValue` operations. However, such initialization is not supported in the current State API implementation. Input values are ignored and the initial values for the `ReadValue` operations are set to 0 unless otherwise specified by the user via [State API](#openvino-state-api).
|
||||
|
||||
### Steps to Apply LowLatency Transformation
|
||||
|
||||
1. Get CNNNetwork. Either way is acceptable:
|
||||
|
||||
* [from IR or ONNX model](./integrate_with_your_application.md)
|
||||
* [from ov::Model](../OV_Runtime_UG/model_representation.md)
|
||||
|
||||
2. [Reshape](ShapeInference.md) the CNNNetwork network if necessary. An example of such a **necessary case** is when the `sequence_lengths` dimension of input > 1,
|
||||
and it means that `TensorIterator` layer will have `number_iterations` > 1. The inputs of the network should be reshaped to set `sequence_dimension` to exactly 1.
|
||||
|
||||
Usually, the following exception, which occurs after applying a transform when trying to infer the network in a plugin, indicates the need to apply the reshape feature:
|
||||
`C++ exception with description "Function is incorrect. The Assign and ReadValue operations must be used in pairs in the network."`
|
||||
This means that there are several pairs of `Assign`/`ReadValue` operations with the same `variable_id` in the network and operations were inserted into each iteration of the `TensorIterator`.
|
||||
|
||||
```cpp
|
||||
|
||||
// Network before reshape: Parameter (name: X, shape: [2 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 2, axis = 0) -> ...
|
||||
|
||||
cnnNetwork.reshape({"X" : {1, 1, 16});
|
||||
|
||||
// Network after reshape: Parameter (name: X, shape: [1 (sequence_lengths), 1, 16]) -> TensorIterator (num_iteration = 1, axis = 0) -> ...
|
||||
|
||||
```
|
||||
|
||||
3. Apply the LowLatency transformation.
|
||||
```cpp
|
||||
#include "ie_transformations.hpp"
|
||||
|
||||
...
|
||||
|
||||
InferenceEngine::LowLatency(cnnNetwork);
|
||||
```
|
||||
**State naming rule:** a name of a state is a concatenation of names: original `TensorIterator` operation, parameter of the body, and additional suffix `variable_` + `id` (0-base indexing, new indexing for each `TensorIterator`). Use these rules to predict the name of the inserted state after the transformation is applied. For example:
|
||||
|
||||
```cpp
|
||||
// Precondition in ngraph::function.
|
||||
// Created TensorIterator and Parameter in body of TensorIterator with names
|
||||
std::string tensor_iterator_name = "TI_name"
|
||||
std::string body_parameter_name = "param_name"
|
||||
std::string idx = "0"; // it's a first variable in the network
|
||||
|
||||
// The State will be named "TI_name/param_name/variable_0"
|
||||
auto state_name = tensor_iterator_name + "//" + body_parameter_name + "//" + "variable_" + idx;
|
||||
|
||||
InferenceEngine::CNNNetwork cnnNetwork = InferenceEngine::CNNNetwork{function};
|
||||
InferenceEngine::LowLatency(cnnNetwork);
|
||||
|
||||
InferenceEngine::ExecutableNetwork executableNetwork = core->LoadNetwork(/*cnnNetwork, targetDevice, configuration*/);
|
||||
|
||||
// Try to find the Variable by name
|
||||
auto states = executableNetwork.QueryState();
|
||||
for (auto& state : states) {
|
||||
auto name = state.GetName();
|
||||
if (name == state_name) {
|
||||
// some actions
|
||||
}
|
||||
}
|
||||
```
|
||||
4. Use state API. See the [OpenVINO state API](#openvino-state-api) and the [Example of stateful network inference](#example-of-stateful-network-inference) sections.
|
||||
|
||||
|
||||
### Known Limitations for the LowLatency [DEPRECATED]
|
||||
1. Parameters connected directly to `ReadValues` (states) after the transformation is applied are not allowed.
|
||||
|
||||
Unnecessary parameters may remain on the graph after applying the transformation. The automatic handling of this case inside the transformation is currently not possible. Such parameters should be removed manually from `ngraph::Function` or replaced with a constant.
|
||||
|
||||

|
||||
|
||||
**Current solutions:**
|
||||
* Replace a parameter with a constant (freeze) with the `[0, 0, 0 … 0]` value via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` or `--freeze_placeholder_with_value` parameters.
|
||||
* Use nGraph API to replace a parameter with a constant, as shown in the example below:
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace Parameter with Constant.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with zero values.
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, std::vector with zeros*/ );
|
||||
for (const auto& param : func->get_parameters()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (param->get_friendly_name() == "param_name") {
|
||||
// Replacing the problematic Param with a Constant.
|
||||
ngraph::replace_node(param, new_const);
|
||||
// Removing problematic Parameter from ngraph::function
|
||||
func->remove_parameter(param);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Unable to execute reshape precondition to apply the transformation correctly.
|
||||
|
||||
Networks can be non-reshapable. The most common reason is that the value of shapes is hardcoded in the constant somewhere in the network.
|
||||
|
||||

|
||||
|
||||
**Current solutions:**
|
||||
* Trim non-reshapable layers via [ModelOptimizer CLI](../MO_DG/prepare_model/convert_model/Converting_Model.md): the `--input` and `--output` parameters. For example, the parameter and the problematic constant (as shown in the picture above) can be trimmed using the `--input Reshape_layer_name` command-line option.
|
||||
* Use nGraph API to replace the problematic constant, as shown in the example below:
|
||||
|
||||
```cpp
|
||||
// nGraph example. How to replace a Constant with hardcoded values of shapes in the network with another one with the new values.
|
||||
// Assume we know which Constant (const_with_hardcoded_shape) prevents the reshape from being applied.
|
||||
// Then we can find this Constant by name on the network and replace it with a new one with the correct shape.
|
||||
auto func = cnnNetwork.getFunction();
|
||||
// Creating the new Constant with a correct shape.
|
||||
// For the example shown in the picture above, the new values of the Constant should be 1, 1, 10 instead of 1, 49, 10
|
||||
auto new_const = std::make_shared<ngraph::opset6::Constant>( /*type, shape, value_with_correct_shape*/ );
|
||||
for (const auto& node : func->get_ops()) {
|
||||
// Trying to find the problematic Constant by name.
|
||||
if (node->get_friendly_name() == "name_of_non_reshapable_const") {
|
||||
auto const_with_hardcoded_shape = std::dynamic_pointer_cast<ngraph::opset6::Constant>(node);
|
||||
// Replacing the problematic Constant with a new one. Do this for all the problematic Constants in the network, then
|
||||
// you can apply the reshape feature.
|
||||
ngraph::replace_node(const_with_hardcoded_shape, new_const);
|
||||
}
|
||||
}
|
||||
```
|
||||
* **Kaldi** - Kaldi models already contain `Assign`/`ReadValue` (Memory) operations after model conversion. The `TensorIterator`/`Loop` operations are not generated.
|
||||
|
||||
@@ -12,14 +12,13 @@
|
||||
openvino_docs_Runtime_Inference_Modes_Overview
|
||||
openvino_docs_OV_UG_Working_with_devices
|
||||
openvino_docs_OV_UG_ShapeInference
|
||||
openvino_docs_OV_UG_Preprocessing_Overview
|
||||
openvino_docs_OV_UG_DynamicShapes
|
||||
openvino_docs_OV_UG_Performance_Hints
|
||||
openvino_docs_OV_UG_network_state_intro
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
OpenVINO Runtime is a set of C++ libraries with C and Python bindings providing a common API to deliver inference solutions on the platform of your choice. Use the OpenVINO Runtime API to read an Intermediate Representation (IR), ONNX, or PaddlePaddle model and execute it on preferred devices.
|
||||
OpenVINO Runtime is a set of C++ libraries with C and Python bindings providing a common API to deliver inference solutions on the platform of your choice. Use the OpenVINO Runtime API to read an Intermediate Representation (IR),
|
||||
TensorFlow (check [TensorFlow Frontend Capabilities and Limitations](../resources/tensorflow_frontend.md)), ONNX, or PaddlePaddle model and execute it on preferred devices.
|
||||
|
||||
OpenVINO Runtime uses a plugin architecture. Its plugins are software components that contain complete implementation for inference on a particular Intel® hardware device: CPU, GPU, VPU, etc. Each plugin implements the unified API and provides additional hardware-specific APIs for configuring devices or API interoperability between OpenVINO Runtime and underlying plugin backend.
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ Previously, a certain level of automatic configuration was the result of the *de
|
||||
The hints, in contrast, respect the actual model, so the parameters for optimal throughput are calculated for each model individually (based on its compute versus memory bandwidth requirements and capabilities of the device).
|
||||
|
||||
## Performance Hints: Latency and Throughput
|
||||
As discussed in the [Optimization Guide](../optimization_guide/dldt_optimization_guide.md) there are a few different metrics associated with inference speed.
|
||||
As discussed in the [Optimization Guide](../optimization_guide/dldt_deployment_optimization_guide.md) there are a few different metrics associated with inference speed.
|
||||
Throughput and latency are some of the most widely used metrics that measure the overall performance of an application.
|
||||
|
||||
Therefore, in order to ease the configuration of the device, OpenVINO offers two dedicated hints, namely `ov::hint::PerformanceMode::THROUGHPUT` and `ov::hint::PerformanceMode::LATENCY`.
|
||||
|
||||
@@ -10,7 +10,7 @@ Most available preprocessing steps can also be performed via command-line option
|
||||
## Code example - Saving Model with Preprocessing to OpenVINO IR
|
||||
|
||||
When some preprocessing steps cannot be integrated into the execution graph using Model Optimizer command-line options (for example, `YUV`->`RGB` color space conversion, `Resize`, etc.), it is possible to write a simple code which:
|
||||
- Reads the original model (OpenVINO IR, ONNX, PaddlePaddle).
|
||||
- Reads the original model (OpenVINO IR, TensorFlow (check [TensorFlow Frontend Capabilities and Limitations](../resources/tensorflow_frontend.md)), ONNX, PaddlePaddle).
|
||||
- Adds the preprocessing/postprocessing steps.
|
||||
- Saves resulting model as IR (`.xml` and `.bin`).
|
||||
|
||||
|
||||
@@ -17,7 +17,8 @@ This guide presents how to use OpenVINO securely with protected models.
|
||||
After a model is optimized by the OpenVINO Model Optimizer, it's deployed
|
||||
to target devices in the OpenVINO Intermediate Representation (OpenVINO IR) format. An optimized
|
||||
model is stored on edge device and is executed by the OpenVINO Runtime.
|
||||
ONNX and PDPD models can be read natively by OpenVINO Runtime as well.
|
||||
TensorFlow (check [TensorFlow Frontend Capabilities and Limitations](../resources/tensorflow_frontend.md)), ONNX
|
||||
and PaddlePaddle models can be read natively by OpenVINO Runtime as well.
|
||||
|
||||
Encrypting and optimizing model before deploying it to the edge device can be
|
||||
used to protect deep-learning models. The edge device should keep the stored model
|
||||
|
||||
@@ -6,9 +6,7 @@ The Arm® CPU plugin is developed in order to enable deep neural networks infere
|
||||
|
||||
> **NOTE**: This is a community-level add-on to OpenVINO™. Intel® welcomes community participation in the OpenVINO™ ecosystem, technical questions and code contributions on community forums. However, this component has not undergone full release validation or qualification from Intel®, hence no official support is offered.
|
||||
|
||||
The Arm® CPU plugin is not a part of the Intel® Distribution of OpenVINO™ toolkit and is not distributed in the pre-built form. The plugin should be built from the source code for use. Plugin build procedure is described in [How to build Arm® CPU plugin](https://github.com/openvinotoolkit/openvino_contrib/wiki/How-to-build-ARM-CPU-plugin) guide.
|
||||
|
||||
The set of supported layers is defined on the [Op-set specification page](https://github.com/openvinotoolkit/openvino_contrib/wiki/ARM-plugin-operation-set-specification).
|
||||
The set of supported layers and their limitations are defined on the [Op-set specification page](https://github.com/openvinotoolkit/openvino_contrib/wiki/ARM-plugin-operation-set-specification).
|
||||
|
||||
|
||||
## Supported Inference Data Types
|
||||
@@ -61,28 +59,7 @@ In order to take effect, all parameters must be set before calling `ov::Core::co
|
||||
- ov::device::capabilities
|
||||
|
||||
|
||||
## Known Layers Limitation
|
||||
* `AvgPool` layer is supported via arm_compute library for 4D input tensor and via reference implementation for other cases.
|
||||
* `BatchToSpace` layer is supported for 4D tensors only and constant nodes: `block_shape` with `N` = 1 and `C`= 1, `crops_begin` with zero values and `crops_end` with zero values.
|
||||
* `ConvertLike` layer is supported for configuration like `Convert`.
|
||||
* `DepthToSpace` layer is supported for 4D tensors only and for `BLOCKS_FIRST` of `mode` attribute.
|
||||
* `Equal` does not support `broadcast` for inputs.
|
||||
* `Gather` layer is supported for constant scalar or 1D indices axes only. Layer is supported via arm_compute library for non negative indices and via reference implementation otherwise.
|
||||
* `Less` does not support `broadcast` for inputs.
|
||||
* `LessEqual` does not support `broadcast` for inputs.
|
||||
* `LRN` layer is supported for `axes = {1}` or `axes = {2, 3}` only.
|
||||
* `MaxPool-1` layer is supported via arm_compute library for 4D input tensor and via reference implementation for other cases.
|
||||
* `Mod` layer is supported for f32 only.
|
||||
* `MVN` layer is supported via arm_compute library for 2D inputs and `false` value of `normalize_variance` and `false` value of `across_channels`, for other cases layer is implemented via runtime reference.
|
||||
* `Normalize` layer is supported via arm_compute library with `MAX` value of `eps_mode` and `axes = {2 | 3}`, and for `ADD` value of `eps_mode` layer uses `DecomposeNormalizeL2Add`. For other cases layer is implemented via runtime reference.
|
||||
* `NotEqual` does not support `broadcast` for inputs.
|
||||
* `Pad` layer works with `pad_mode = {REFLECT | CONSTANT | SYMMETRIC}` parameters only.
|
||||
* `Round` layer is supported via arm_compute library with `RoundMode::HALF_AWAY_FROM_ZERO` value of `mode`, for other cases layer is implemented via runtime reference.
|
||||
* `SpaceToBatch` layer is supported for 4D tensors only and constant nodes: `shapes`, `pads_begin` or `pads_end` with zero paddings for batch or channels and one values `shapes` for batch and channels.
|
||||
* `SpaceToDepth` layer is supported for 4D tensors only and for `BLOCKS_FIRST` of `mode` attribute.
|
||||
* `StridedSlice` layer is supported via arm_compute library for tensors with dims < 5 and zero values of `ellipsis_mask` or zero values of `new_axis_mask` and `shrink_axis_mask`. For other cases, layer is implemented via runtime reference.
|
||||
* `FakeQuantize` layer is supported via arm_compute library, in Low Precision evaluation mode for suitable models, and via runtime reference otherwise.
|
||||
|
||||
## Additional Resources
|
||||
* [Arm® plugin developer documentation](https://github.com/openvinotoolkit/openvino_contrib/blob/master/modules/arm_plugin/README.md).
|
||||
* [How to run YOLOv4 model inference using OpenVINO™ and OpenCV on Arm®](https://opencv.org/how-to-run-yolov4-using-openvino-and-opencv-on-arm/).
|
||||
* [Face recognition on Android™ using OpenVINO™ toolkit with Arm® plugin](https://opencv.org/face-recognition-on-android-using-openvino-toolkit-with-arm-plugin/).
|
||||
|
||||
@@ -136,27 +136,6 @@ CPU provides full functional support for models with dynamic shapes in terms of
|
||||
|
||||
> **NOTE**: The CPU plugin does not support tensors with dynamically changing rank. In case of an attempt to infer a model with such tensors, an exception will be thrown.
|
||||
|
||||
Dynamic shapes support introduces additional overhead on memory management and may limit internal runtime optimizations.
|
||||
The more degrees of freedom are used, the more difficult it is to achieve the best performance.
|
||||
The most flexible configuration, and the most convenient approach, is the fully undefined shape, which means that no constraints to the shape dimensions are applied.
|
||||
However, reducing the level of uncertainty results in performance gains.
|
||||
You can reduce memory consumption through memory reuse, achieving better cache locality and increasing inference performance. To do so, set dynamic shapes explicitly, with defined upper bounds.
|
||||
|
||||
@sphinxtabset
|
||||
|
||||
@sphinxtab{C++}
|
||||
@snippet docs/snippets/cpu/dynamic_shape.cpp defined_upper_bound
|
||||
@endsphinxtab
|
||||
|
||||
@sphinxtab{Python}
|
||||
@snippet docs/snippets/cpu/dynamic_shape.py defined_upper_bound
|
||||
@endsphinxtab
|
||||
|
||||
@endsphinxtabset
|
||||
|
||||
> **NOTE**: Using fully undefined shapes may result in significantly higher memory consumption compared to inferring the same model with static shapes.
|
||||
> If memory consumption is unacceptable but dynamic shapes are still required, the model can be reshaped using shapes with defined upper bounds to reduce memory footprint.
|
||||
|
||||
Some runtime optimizations work better if the model shapes are known in advance.
|
||||
Therefore, if the input data shape is not changed between inference calls, it is recommended to use a model with static shapes or reshape the existing model with the static input shape to get the best performance.
|
||||
|
||||
@@ -298,5 +277,5 @@ To enable denormals optimization in the application, the `denormals_optimization
|
||||
|
||||
## Additional Resources
|
||||
* [Supported Devices](Supported_Devices.md)
|
||||
* [Optimization guide](@ref openvino_docs_optimization_guide_dldt_optimization_guide)
|
||||
* [Optimization guide](@ref openvino_docs_deployment_optimization_guide_dldt_optimization_guide)
|
||||
* [СPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/CPUPluginDevelopersDocs)
|
||||
|
||||
@@ -309,5 +309,5 @@ Since OpenVINO relies on the OpenCL kernels for the GPU implementation, many gen
|
||||
|
||||
## Additional Resources
|
||||
* [Supported Devices](Supported_Devices.md)
|
||||
* [Optimization guide](@ref openvino_docs_optimization_guide_dldt_optimization_guide)
|
||||
* [Optimization guide](@ref openvino_docs_deployment_optimization_guide_dldt_optimization_guide)
|
||||
* [GPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/GPUPluginDevelopersDocs)
|
||||
|
||||
41
docs/_static/css/custom.css
vendored
@@ -14,6 +14,43 @@ main .searchForm {
|
||||
margin-top: 2rem;
|
||||
}
|
||||
|
||||
/* navigation panels override */
|
||||
/* =================================================== */
|
||||
/* hide home item in the top bar */
|
||||
ul#navbar-main-elements li:first-of-type {
|
||||
display: none;
|
||||
}
|
||||
/* items on hover */
|
||||
#bd-docs-nav div ul a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
ul#navbar-main-elements > li:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* first-level items in the side menu */
|
||||
#bd-docs-nav > div > ul > li {
|
||||
padding-bottom: 15px;
|
||||
}
|
||||
#bd-docs-nav > div > ul > li > a {
|
||||
color: #000000;
|
||||
font-weight: bold;
|
||||
}
|
||||
/* second level items */
|
||||
#bd-docs-nav > div > ul > li > ul {
|
||||
padding-left: 0.3rem;
|
||||
}
|
||||
/* overwrite menu chevron directions for open and closed states */
|
||||
.toctree-checkbox~label i {
|
||||
transform: rotate(270deg);
|
||||
}
|
||||
.toctree-checkbox:checked~label i {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/* footer links */
|
||||
/* =================================================== */
|
||||
footer div.container div.footer-item p a {
|
||||
@@ -136,6 +173,8 @@ div.highlight {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
/* responsiveness */
|
||||
/* =================================================== */
|
||||
@media (max-width: 720px) {
|
||||
.transition-banner {
|
||||
margin-top: 2rem;
|
||||
@@ -488,7 +527,7 @@ div.highlight {
|
||||
}
|
||||
|
||||
|
||||
/* content formatting for the pages */
|
||||
/* content formatting for the benchmark pages */
|
||||
.picker-options {
|
||||
margin: 15px 0;
|
||||
}
|
||||
|
||||
177
docs/_static/css/homepage_style.css
vendored
@@ -8,181 +8,94 @@
|
||||
img {
|
||||
cursor: default;
|
||||
}
|
||||
/* === OPENVINO INTRO ================================================= */
|
||||
|
||||
.openvino-intro-text {
|
||||
|
||||
font-size: 1.3em;
|
||||
}
|
||||
/* === OPENVINO DIAGRAM ================================================= */
|
||||
|
||||
@media only screen and (min-width: 1100px) {
|
||||
.openvino-diagram {
|
||||
width: 70%;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 1099px) {
|
||||
.openvino-diagram {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* === SEPARATOR HEADERS ================================================= */
|
||||
.HP_separator-header {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
height: 2em;
|
||||
line-height: 2em;
|
||||
margin: 40px auto;
|
||||
|
||||
/* === PANELS ================================================= */
|
||||
|
||||
.homepage-panels {
|
||||
background: #0068B5;
|
||||
border: none!important;
|
||||
border-radius: 0!important;
|
||||
}
|
||||
.homepage-panels p.card-text {
|
||||
color:white;
|
||||
}
|
||||
.homepage-panels p:first-of-type {
|
||||
border-bottom: 1px solid white;
|
||||
}
|
||||
|
||||
.HP_separator-header p {
|
||||
display: inline-block;
|
||||
line-height: 1em;
|
||||
margin: 0 auto;
|
||||
color: #0068B5;
|
||||
background-color: #fff;
|
||||
padding: 0 15px;
|
||||
}
|
||||
|
||||
.HP_separator-header::before {
|
||||
display: block;
|
||||
content: "";
|
||||
position: relative;
|
||||
top: 1em;
|
||||
margin: 0;
|
||||
border-top: 1px solid #0068B5;
|
||||
width: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
img.HP_img_chart {
|
||||
width: 90%;
|
||||
display: block;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
/* === OV workflow chart ===================================================== */
|
||||
#HP_flow-container {
|
||||
/* === OV workflow chart ===================================================== */
|
||||
#hp-flow-container {
|
||||
margin: 0 auto;
|
||||
width: 90%;
|
||||
}
|
||||
|
||||
#HP_flow-container div {
|
||||
#hp-flow-container div {
|
||||
margin: 0;
|
||||
float: left;
|
||||
}
|
||||
|
||||
div.HP_flow-arrow {
|
||||
div.hp-flow-arrow {
|
||||
width: 5%;
|
||||
padding: 40px 5px;
|
||||
}
|
||||
|
||||
div.HP_flow-btn {
|
||||
div.hp-flow-btn {
|
||||
width: 30%;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
#HP_flow-container div.HP_flow-btn:nth-of-type(1) {
|
||||
#hp-flow-container div.hp-flow-btn:nth-of-type(1) {
|
||||
background-image: url("../images/OV_flow_model.svg");
|
||||
}
|
||||
#HP_flow-container div.HP_flow-btn:nth-of-type(3) {
|
||||
#hp-flow-container div.hp-flow-btn:nth-of-type(3) {
|
||||
background-image: url("../images/OV_flow_optimization.svg");
|
||||
}
|
||||
#HP_flow-container div.HP_flow-btn:nth-of-type(5) {
|
||||
#hp-flow-container div.hp-flow-btn:nth-of-type(5) {
|
||||
background-image: url("../images/OV_flow_deployment.svg");
|
||||
}
|
||||
|
||||
div.HP_flow-btn a img {
|
||||
div.hp-flow-btn a img {
|
||||
width: 100%;
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
div.HP_flow-btn:hover a img {
|
||||
div.hp-flow-btn:hover a img {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
|
||||
/* === INFORMATION BOXES ================================================= */
|
||||
.HP_infoboxes {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
margin: 0;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.HP_infoboxes a {
|
||||
background-color: #0068B5;
|
||||
height: 150px;
|
||||
padding: 5px 2%;
|
||||
float: left;
|
||||
margin: 0 1% 10px 1%;
|
||||
width: 32%;
|
||||
}
|
||||
|
||||
.HP_infoboxes a p,
|
||||
.HP_infoboxes a h3 {
|
||||
color: #fff;
|
||||
margin: 0;
|
||||
text-align: left;
|
||||
|
||||
transition: all 0.5s;
|
||||
-webkit-transition: all 0.5s;
|
||||
-moz-transition: all 0.5s;
|
||||
-o-transition: all 0.5s;
|
||||
-ms-transition: all 0.5s;
|
||||
}
|
||||
.HP_infoboxes a h3 {
|
||||
display: table-cell;
|
||||
width: 300px;
|
||||
vertical-align: middle;
|
||||
height: 35px;
|
||||
border-bottom: solid 1px #fff;
|
||||
font-weight: bold;
|
||||
font-size: 1em;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.HP_infoboxes a p {
|
||||
font-size: 0.9em;
|
||||
line-height: 1.5em;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.HP_infoboxes a:hover h3 {
|
||||
font-size: 1.1em;
|
||||
text-decoration:none !important;
|
||||
}
|
||||
/*.HP_infoboxes a:hover p {
|
||||
margin-top: 10px;
|
||||
text-decoration:none !important;
|
||||
}*/
|
||||
|
||||
/* === screen wide =================================================== */
|
||||
/* =================================================================== */
|
||||
@media only screen and (min-width: 901px) {
|
||||
.HP_infoboxes a {
|
||||
width: 32%;
|
||||
}
|
||||
.HP_infoboxes a:nth-of-type(1),
|
||||
.HP_infoboxes a:nth-of-type(4),
|
||||
.HP_infoboxes a:nth-of-type(7) {
|
||||
margin-left: 0;
|
||||
}
|
||||
.HP_infoboxes a:nth-of-type(3),
|
||||
.HP_infoboxes a:nth-of-type(6),
|
||||
.HP_infoboxes a:nth-of-type(9) {
|
||||
margin-right: 0;
|
||||
}
|
||||
}
|
||||
/* === screen narrow ================================================== */
|
||||
/* =================================================================== */
|
||||
@media only screen and (max-width: 900px) {
|
||||
.HP_infoboxes a {
|
||||
width: 48%;
|
||||
margin-left: 0;
|
||||
margin-right: 2%;
|
||||
}
|
||||
.HP_infoboxes a:nth-of-type(2n) {
|
||||
margin-left: 2%;
|
||||
margin-right: 0;
|
||||
}
|
||||
}
|
||||
/* =================================================================== */
|
||||
@media only screen and (max-width: 500px) {
|
||||
#HP_flow-container div {
|
||||
#hp-flow-container div {
|
||||
float: none;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
div.HP_flow-arrow {
|
||||
div.hp-flow-arrow {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.HP_flow-btn {
|
||||
div.hp-flow-btn {
|
||||
width: 50%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
3
docs/_static/images/openvino_chart.png
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f793cd6cdacdc7a1491ed7be8b9667668d9058baebcbd4e16eca1af7dda61d30
|
||||
size 517122
|
||||
96
docs/_static/images/openvino_diagram.svg
vendored
Normal file
|
After Width: | Height: | Size: 352 KiB |
232
docs/_static/images/state_network_example.svg
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="512"
|
||||
height="528"
|
||||
overflow="hidden"
|
||||
version="1.1"
|
||||
id="svg325"
|
||||
sodipodi:docname="state_network_example (1).svg"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview327"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="1.6742424"
|
||||
inkscape:cx="255.93665"
|
||||
inkscape:cy="264"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1129"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg325" />
|
||||
<defs
|
||||
id="defs283">
|
||||
<clipPath
|
||||
id="clip0">
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="512"
|
||||
height="528"
|
||||
id="rect280" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g
|
||||
clip-path="url(#clip0)"
|
||||
id="g323">
|
||||
<path
|
||||
id="rect285"
|
||||
style="fill:#ffffff"
|
||||
d="M 0,0 H 512 V 528 H 0 Z" />
|
||||
<path
|
||||
d="M52.5001 88.8335C52.5001 84.2311 56.2311 80.5001 60.8335 80.5001L180.167 80.5001C184.769 80.5001 188.5 84.2311 188.5 88.8335L188.5 122.167C188.5 126.769 184.769 130.5 180.167 130.5L60.8335 130.5C56.2311 130.5 52.5001 126.769 52.5001 122.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path287" />
|
||||
<g
|
||||
aria-label="input"
|
||||
transform="translate(93.5833 114)"
|
||||
id="text289"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 1.392,-15.48 h 2.16 v 2.136 H 1.392 Z M 1.488,0 V -11.592 H 3.456 V 0 Z"
|
||||
id="path434" />
|
||||
<path
|
||||
d="m 6.215985,-11.592 h 1.92 v 1.8 q 0.696,-1.008 1.752,-1.536 1.08,-0.552 2.4,-0.552 2.304,0 3.6,1.488 1.296,1.488 1.296,4.08 V 0 h -1.944 v -6.192 q 0,-1.824 -0.912,-2.88 -0.888,-1.056 -2.496,-1.056 -1.656,0 -2.664,1.128 -1.008,1.128 -1.008,2.928 V 0 h -1.944 z"
|
||||
id="path436" />
|
||||
<path
|
||||
d="m 21.527966,4.32 h -1.944 v -15.912 h 1.92 v 1.872 q 0.768,-1.008 1.896,-1.56 1.128,-0.576 2.4,-0.576 2.448,0 4.08,1.704 1.632,1.68 1.632,4.32 0,2.664 -1.704,4.392 -1.68,1.704 -4.152,1.704 -1.176,0 -2.28,-0.48 -1.08,-0.504 -1.848,-1.416 z m 4.056,-14.448 q -1.752,0 -2.952,1.272 -1.176,1.248 -1.176,3.12 0,1.848 1.152,3.072 1.176,1.224 2.904,1.224 1.776,0 2.88,-1.176 1.128,-1.2 1.128,-3.168 0,-1.944 -1.104,-3.144 -1.08,-1.2 -2.832,-1.2 z"
|
||||
id="path438" />
|
||||
<path
|
||||
d="m 44.375951,0 h -1.944 v -1.776 q -0.696,1.008 -1.752,1.536 -1.056,0.504 -2.4,0.504 -2.28,0 -3.6,-1.488 -1.296,-1.488 -1.296,-4.056 v -6.312 h 1.968 V -5.4 q 0,1.824 0.888,2.88 0.912,1.056 2.52,1.056 1.632,0 2.64,-1.128 1.008,-1.128 1.008,-2.928 v -6.072 h 1.968 z"
|
||||
id="path440" />
|
||||
<path
|
||||
d="m 47.543932,-3.096 v -6.768 h -1.584 v -1.728 h 1.584 v -3.552 h 1.944 v 3.552 h 2.688 v 1.728 h -2.688 v 6.6 q 0,0.768 0.336,1.152 0.36,0.36 1.08,0.36 h 1.272 V 0 h -1.56 q -1.608,0 -2.352,-0.744 -0.72,-0.744 -0.72,-2.352 z"
|
||||
id="path442" />
|
||||
</g>
|
||||
<path
|
||||
d="M308.5 88.8335C308.5 84.2311 312.231 80.5001 316.833 80.5001L436.167 80.5001C440.769 80.5001 444.5 84.2311 444.5 88.8335L444.5 122.167C444.5 126.769 440.769 130.5 436.167 130.5L316.833 130.5C312.231 130.5 308.5 126.769 308.5 122.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path291" />
|
||||
<g
|
||||
aria-label="read"
|
||||
transform="translate(352.333 114)"
|
||||
id="text293"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 1.272,-11.592 h 1.92 v 1.464 q 0.528,-0.768 1.368,-1.152 0.84,-0.384 1.992,-0.384 h 1.056 v 1.848 h -1.08 q -1.656,0 -2.496,0.864 -0.816,0.864 -0.816,2.616 V 0 H 1.272 Z"
|
||||
id="path445" />
|
||||
<path
|
||||
d="m 14.471986,-1.296 q 1.152,0 2.112,-0.48 0.984,-0.504 1.8,-1.512 l 1.176,1.08 q -0.984,1.296 -2.304,1.944 -1.32,0.624 -2.952,0.624 -2.616,0 -4.296,-1.704 -1.6559995,-1.728 -1.6559995,-4.416 0,-2.616 1.7039995,-4.368 1.704,-1.752 4.2,-1.752 2.544,0 4.128,1.656 1.584,1.656 1.584,4.32 0,0.192 -0.024,0.456 -0.024,0.24 -0.048,0.36 h -9.528 q 0.192,1.704 1.32,2.76 1.152,1.032 2.784,1.032 z m -0.192,-8.928 q -1.512,0 -2.592,0.984 -1.08,0.984 -1.32,2.64 h 7.632 q -0.216,-1.68 -1.224,-2.64 -1.008,-0.984 -2.496,-0.984 z"
|
||||
id="path447" />
|
||||
<path
|
||||
d="m 31.896001,0 h -1.896 v -1.896 q -0.696,1.008 -1.896,1.584 -1.176,0.552 -2.544,0.552 -1.92,0 -3.12,-0.96 -1.2,-0.984 -1.2,-2.568 0,-1.752 1.344,-2.64 1.344,-0.912 3.984,-0.912 h 3.408 q -0.024,-1.632 -0.864,-2.52 -0.816,-0.888 -2.352,-0.888 -0.912,0 -1.8,0.384 -0.888,0.384 -1.752,1.176 l -1.032,-1.296 q 1.008,-0.96 2.208,-1.44 1.2,-0.504 2.568,-0.504 2.328,0 3.624,1.392 1.32,1.368 1.32,3.792 z m -1.92,-5.328 h -3.384 q -1.752,0 -2.592,0.48 -0.84,0.48 -0.84,1.512 0,0.864 0.72,1.44 0.744,0.576 1.896,0.576 1.728,0 2.904,-1.104 1.2,-1.128 1.296,-2.904 z"
|
||||
id="path449" />
|
||||
<path
|
||||
d="m 43.751987,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path451" />
|
||||
</g>
|
||||
<path
|
||||
d="M173.5 208.833C173.5 204.231 177.231 200.5 181.833 200.5L300.167 200.5C304.769 200.5 308.5 204.231 308.5 208.833L308.5 242.167C308.5 246.769 304.769 250.5 300.167 250.5L181.833 250.5C177.231 250.5 173.5 246.769 173.5 242.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path295" />
|
||||
<g
|
||||
aria-label="add"
|
||||
transform="translate(220.561 234)"
|
||||
id="text297"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 11.232,0 H 9.336 V -1.896 Q 8.64,-0.888 7.44,-0.312 6.264,0.24 4.896,0.24 q -1.92,0 -3.12,-0.96 -1.2,-0.984 -1.2,-2.568 0,-1.752 1.344,-2.64 1.344,-0.912 3.984,-0.912 h 3.408 q -0.024,-1.632 -0.864,-2.52 -0.816,-0.888 -2.352,-0.888 -0.912,0 -1.8,0.384 -0.888,0.384 -1.752,1.176 L 1.512,-9.984 q 1.008,-0.96 2.208,-1.44 1.2,-0.504 2.568,-0.504 2.328,0 3.624,1.392 1.32,1.368 1.32,3.792 z M 9.312,-5.328 H 5.928 q -1.752,0 -2.592,0.48 -0.84,0.48 -0.84,1.512 0,0.864 0.72,1.44 0.744,0.576 1.896,0.576 1.728,0 2.904,-1.104 1.2,-1.128 1.296,-2.904 z"
|
||||
id="path454" />
|
||||
<path
|
||||
d="m 23.087985,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path456" />
|
||||
<path
|
||||
d="m 37.03197,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path458" />
|
||||
</g>
|
||||
<path
|
||||
d="M308.5 317.833C308.5 313.231 312.231 309.5 316.833 309.5L436.167 309.5C440.769 309.5 444.5 313.231 444.5 317.833L444.5 351.167C444.5 355.769 440.769 359.5 436.167 359.5L316.833 359.5C312.231 359.5 308.5 355.769 308.5 351.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path299" />
|
||||
<g
|
||||
aria-label="add"
|
||||
transform="translate(355.833 343)"
|
||||
id="text301"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 11.232,0 H 9.336 V -1.896 Q 8.64,-0.888 7.44,-0.312 6.264,0.24 4.896,0.24 q -1.92,0 -3.12,-0.96 -1.2,-0.984 -1.2,-2.568 0,-1.752 1.344,-2.64 1.344,-0.912 3.984,-0.912 h 3.408 q -0.024,-1.632 -0.864,-2.52 -0.816,-0.888 -2.352,-0.888 -0.912,0 -1.8,0.384 -0.888,0.384 -1.752,1.176 L 1.512,-9.984 q 1.008,-0.96 2.208,-1.44 1.2,-0.504 2.568,-0.504 2.328,0 3.624,1.392 1.32,1.368 1.32,3.792 z M 9.312,-5.328 H 5.928 q -1.752,0 -2.592,0.48 -0.84,0.48 -0.84,1.512 0,0.864 0.72,1.44 0.744,0.576 1.896,0.576 1.728,0 2.904,-1.104 1.2,-1.128 1.296,-2.904 z"
|
||||
id="path461" />
|
||||
<path
|
||||
d="m 23.087985,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path463" />
|
||||
<path
|
||||
d="m 37.03197,-16.8 h 1.968 V 0 h -1.944 v -1.872 q -0.792,1.008 -1.92,1.56 -1.104,0.552 -2.376,0.552 -2.424,0 -4.08,-1.68 -1.632,-1.704 -1.632,-4.32 0,-2.64 1.68,-4.368 1.704,-1.752 4.176,-1.752 1.2,0 2.256,0.504 1.08,0.48 1.872,1.392 z m -4.056,15.336 q 1.752,0 2.928,-1.248 1.2,-1.272 1.2,-3.12 0,-1.872 -1.152,-3.096 -1.152,-1.224 -2.88,-1.224 -1.776,0 -2.904,1.2 -1.128,1.2 -1.128,3.144 0,1.944 1.104,3.144 1.104,1.2 2.832,1.2 z"
|
||||
id="path465" />
|
||||
</g>
|
||||
<path
|
||||
d="M52.5001 317.833C52.5001 313.231 56.2311 309.5 60.8335 309.5L180.167 309.5C184.769 309.5 188.5 313.231 188.5 317.833L188.5 351.167C188.5 355.769 184.769 359.5 180.167 359.5L60.8335 359.5C56.2311 359.5 52.5001 355.769 52.5001 351.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path303" />
|
||||
<g
|
||||
aria-label="save"
|
||||
transform="translate(95.5 343)"
|
||||
id="text305"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="m 10.608,-3.384 q 0,1.632 -1.392,2.664 -1.392,1.032 -3.624,1.032 -1.464,0 -2.76,-0.456 Q 1.56,-0.624 0.528,-1.56 l 1.008,-1.488 q 1.104,0.912 2.064,1.32 0.96,0.408 2.04,0.408 1.392,0 2.232,-0.528 0.864,-0.552 0.864,-1.416 0,-0.888 -0.72,-1.272 Q 7.296,-4.944 5.64,-4.968 3.216,-5.064 2.088,-5.856 0.984,-6.648 0.984,-8.328 q 0,-1.536 1.272,-2.544 1.296,-1.032 3.312,-1.032 1.368,0 2.568,0.432 1.2,0.432 2.184,1.272 L 9.384,-8.736 Q 8.4,-9.528 7.488,-9.888 q -0.912,-0.36 -1.944,-0.36 -1.176,0 -1.944,0.528 -0.744,0.528 -0.744,1.296 0,0.84 0.672,1.224 0.672,0.384 2.136,0.456 2.616,0.072 3.768,0.864 1.176,0.792 1.176,2.496 z"
|
||||
id="path468" />
|
||||
<path
|
||||
d="m 22.536016,0 h -1.896 v -1.896 q -0.696,1.008 -1.896,1.584 -1.176,0.552 -2.544,0.552 -1.92,0 -3.12,-0.96 -1.2,-0.984 -1.2,-2.568 0,-1.752 1.344,-2.64 1.344,-0.912 3.984,-0.912 h 3.408 q -0.024,-1.632 -0.864,-2.52 -0.816,-0.888 -2.352,-0.888 -0.912,0 -1.8,0.384 -0.888,0.384 -1.752,1.176 l -1.032,-1.296 q 1.008,-0.96 2.208,-1.44 1.2,-0.504 2.568,-0.504 2.328,0 3.624,1.392 1.32,1.368 1.32,3.792 z m -1.92,-5.328 h -3.384 q -1.752,0 -2.592,0.48 -0.84,0.48 -0.84,1.512 0,0.864 0.72,1.44 0.744,0.576 1.896,0.576 1.728,0 2.904,-1.104 1.2,-1.128 1.296,-2.904 z"
|
||||
id="path470" />
|
||||
<path
|
||||
d="m 28.344011,0 -4.968,-11.592 h 2.136 l 3.768,9.264 3.744,-9.264 h 2.016 L 30.120011,0 Z"
|
||||
id="path472" />
|
||||
<path
|
||||
d="m 41.495973,-1.296 q 1.152,0 2.112,-0.48 0.984,-0.504 1.8,-1.512 l 1.176,1.08 q -0.984,1.296 -2.304,1.944 -1.32,0.624 -2.952,0.624 -2.616,0 -4.296,-1.704 -1.656,-1.728 -1.656,-4.416 0,-2.616 1.704,-4.368 1.704,-1.752 4.2,-1.752 2.544,0 4.128,1.656 1.584,1.656 1.584,4.32 0,0.192 -0.024,0.456 -0.024,0.24 -0.048,0.36 h -9.528 q 0.192,1.704 1.32,2.76 1.152,1.032 2.784,1.032 z m -0.192,-8.928 q -1.512,0 -2.592,0.984 -1.08,0.984 -1.32,2.64 h 7.632 q -0.216,-1.68 -1.224,-2.64 -1.008,-0.984 -2.496,-0.984 z"
|
||||
id="path474" />
|
||||
</g>
|
||||
<path
|
||||
d="M367.5 431.833C367.5 427.231 371.231 423.5 375.833 423.5L494.167 423.5C498.769 423.5 502.5 427.231 502.5 431.833L502.5 465.167C502.5 469.769 498.769 473.5 494.167 473.5L375.833 473.5C371.231 473.5 367.5 469.769 367.5 465.167Z"
|
||||
stroke="#000000"
|
||||
stroke-width="1.33333"
|
||||
stroke-miterlimit="8"
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
id="path307" />
|
||||
<g
|
||||
aria-label="Result"
|
||||
transform="translate(401.265 457)"
|
||||
id="text309"
|
||||
style="font-size:24px;font-family:'IntelOne Display Regular', 'IntelOne Display Regular_MSFontService', sans-serif">
|
||||
<path
|
||||
d="M 10.56,-7.08 15.336,0 H 12.888 L 8.352,-6.84 H 3.624 V 0 H 1.56 v -16.8 h 7.392 q 2.832,0 4.416,1.32 1.584,1.296 1.584,3.576 0,1.872 -1.152,3.144 -1.152,1.272 -3.24,1.68 z m 2.304,-4.776 q 0,-1.464 -1.056,-2.28 -1.032,-0.84 -2.904,-0.84 h -5.28 v 6.336 h 5.112 q 1.896,0 3,-0.864 1.128,-0.888 1.128,-2.352 z"
|
||||
id="path477" />
|
||||
<path
|
||||
d="m 22.535997,-1.296 q 1.152,0 2.112,-0.48 0.984,-0.504 1.8,-1.512 l 1.176,1.08 q -0.984,1.296 -2.304,1.944 -1.32,0.624 -2.952,0.624 -2.616,0 -4.296,-1.704 -1.656,-1.728 -1.656,-4.416 0,-2.616 1.704,-4.368 1.704,-1.752 4.2,-1.752 2.544,0 4.128,1.656 1.584,1.656 1.584,4.32 0,0.192 -0.024,0.456 -0.024,0.24 -0.048,0.36 h -9.528 q 0.192,1.704 1.32,2.76 1.152,1.032 2.784,1.032 z m -0.192,-8.928 q -1.512,0 -2.592,0.984 -1.08,0.984 -1.32,2.64 h 7.632 q -0.216,-1.68 -1.224,-2.64 -1.008,-0.984 -2.496,-0.984 z"
|
||||
id="path479" />
|
||||
<path
|
||||
d="m 39.336012,-3.384 q 0,1.632 -1.392,2.664 -1.392,1.032 -3.624,1.032 -1.464,0 -2.76,-0.456 -1.272,-0.48 -2.304,-1.416 l 1.008,-1.488 q 1.104,0.912 2.064,1.32 0.96,0.408 2.04,0.408 1.392,0 2.232,-0.528 0.864,-0.552 0.864,-1.416 0,-0.888 -0.72,-1.272 -0.72,-0.408 -2.376,-0.432 -2.424,-0.096 -3.552,-0.888 -1.104,-0.792 -1.104,-2.472 0,-1.536 1.272,-2.544 1.296,-1.032 3.312,-1.032 1.368,0 2.568,0.432 1.2,0.432 2.184,1.272 l -0.936,1.464 q -0.984,-0.792 -1.896,-1.152 -0.912,-0.36 -1.944,-0.36 -1.176,0 -1.944,0.528 -0.744,0.528 -0.744,1.296 0,0.84 0.672,1.224 0.672,0.384 2.136,0.456 2.616,0.072 3.768,0.864 1.176,0.792 1.176,2.496 z"
|
||||
id="path481" />
|
||||
<path
|
||||
d="m 52.152028,0 h -1.944 v -1.776 q -0.696,1.008 -1.752,1.536 -1.056,0.504 -2.4,0.504 -2.28,0 -3.6,-1.488 -1.296,-1.488 -1.296,-4.056 v -6.312 h 1.968 V -5.4 q 0,1.824 0.888,2.88 0.912,1.056 2.52,1.056 1.632,0 2.64,-1.128 1.008,-1.128 1.008,-2.928 v -6.072 h 1.968 z"
|
||||
id="path483" />
|
||||
<path
|
||||
d="m 54.888009,-16.8 h 1.968 V 0 h -1.968 z"
|
||||
id="path485" />
|
||||
<path
|
||||
d="m 60.263994,-3.096 v -6.768 h -1.584 v -1.728 h 1.584 v -3.552 h 1.944 v 3.552 h 2.688 v 1.728 h -2.688 v 6.6 q 0,0.768 0.336,1.152 0.36,0.36 1.08,0.36 h 1.272 V 0 h -1.56 q -1.608,0 -2.352,-0.744 -0.72,-0.744 -0.72,-2.352 z"
|
||||
id="path487" />
|
||||
</g>
|
||||
<path
|
||||
d="M120.753 129.341 177.454 194.143 175.948 195.46 119.247 130.659ZM178.833 191.164 181.091 199.818 172.813 196.432Z"
|
||||
id="path311" />
|
||||
<path
|
||||
d="M0.674769-0.738029 72.1183 64.5817 70.7687 66.0577-0.674769 0.738029ZM73.1585 61.4679 76.3637 69.8182 67.7604 67.3721Z"
|
||||
transform="matrix(-1 0 0 1 376.364 130)"
|
||||
id="path313" />
|
||||
<path
|
||||
d="M414 130 414 302.243 412 302.243 412 130ZM417 300.909 413 308.909 409 300.909Z"
|
||||
id="path315" />
|
||||
<path
|
||||
d="M0.694135-0.719844 56.9861 53.5617 55.5978 55.0014-0.694135 0.719844ZM58.1087 50.4766 61.0909 58.9091 52.5556 56.2354Z"
|
||||
transform="matrix(-1 0 0 1 181.091 250)"
|
||||
id="path317" />
|
||||
<path
|
||||
d="M300.611 249.208 371.696 304.045 370.474 305.629 299.389 250.792ZM372.473 300.856 376.364 308.909 367.586 307.19Z"
|
||||
id="path319" />
|
||||
<path
|
||||
d="M376.738 358.325 430.421 417.041 428.945 418.391 375.262 359.675ZM431.736 414.033 434.182 422.636 425.832 419.431Z"
|
||||
id="path321" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 17 KiB |
3
docs/_static/images/supported_devices.png
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:566aab6ef86a50dad4fba5483a9b0abffc85778dccee7a0c7e98d4b09447f9b1
|
||||
size 130586
|
||||
7
docs/_static/js/open_sidebar.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
$(document).ready(function() {
|
||||
const elems = $( "ul.bd-sidenav > li > input" )
|
||||
console.log(elems)
|
||||
for(let i = 0; i < elems.length; i++){
|
||||
elems[i].setAttribute("checked", "checked");
|
||||
}
|
||||
})
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
openvino_docs_performance_benchmarks_openvino
|
||||
openvino_docs_performance_benchmarks_ovms
|
||||
openvino_docs_MO_DG_Getting_Performance_Numbers
|
||||
|
||||
|
||||
@endsphinxdirective
|
||||
@@ -16,9 +17,12 @@ The [Intel® Distribution of OpenVINO™ toolkit](https://software.intel.com/c
|
||||
|
||||
The benchmark results below demonstrate high performance gains on several public neural networks on multiple Intel® CPUs, GPUs and VPUs covering a broad performance range. The results may be helpful when deciding which hardware is best for your applications and solutions or to plan AI workload on the Intel computing already included in your solutions.
|
||||
|
||||
The following benchmarks are available:
|
||||
Benchmarks are available for:
|
||||
|
||||
* [Intel® Distribution of OpenVINO™ toolkit Benchmark Results](performance_benchmarks_openvino.md).
|
||||
* [Intel® Distribution of OpenVINO™ toolkit](performance_benchmarks_openvino.md).
|
||||
* [OpenVINO™ Model Server](performance_benchmarks_ovms.md).
|
||||
|
||||
|
||||
You can also test performance for your system yourself, following the guide on [getting performance numbers](../MO_DG/prepare_model/Getting_performance_numbers.md).
|
||||
Performance of a particular application can also be evaluated virtually using [Intel® DevCloud for the Edge](https://devcloud.intel.com/edge/). It is a remote development environment with access to Intel® hardware and the latest versions of the Intel® Distribution of the OpenVINO™ Toolkit. To learn more about it, visit [the website](https://www.intel.com/content/www/us/en/developer/tools/devcloud/edge/overview.html) or [create an account](https://www.intel.com/content/www/us/en/forms/idz/devcloud-registration.html?tgt=https://www.intel.com/content/www/us/en/secure/forms/devcloud-enrollment/account-provisioning.html).
|
||||
|
||||
|
||||
@@ -1,71 +1,158 @@
|
||||
# Performance Information Frequently Asked Questions {#openvino_docs_performance_benchmarks_faq}
|
||||
# Performance Information F.A.Q. {#openvino_docs_performance_benchmarks_faq}
|
||||
|
||||
The following questions (Q#) and answers (A) are related to published [performance benchmarks](./performance_benchmarks.md).
|
||||
|
||||
#### Q1: How often do performance benchmarks get updated?
|
||||
**A**: New performance benchmarks are typically published on every `major.minor` release of the Intel® Distribution of OpenVINO™ toolkit.
|
||||
@sphinxdirective
|
||||
|
||||
#### Q2: Where can I find the models used in the performance benchmarks?
|
||||
**A**: All models used are included in the GitHub repository of [Open Model Zoo](https://github.com/openvinotoolkit/open_model_zoo).
|
||||
.. dropdown:: How often do performance benchmarks get updated?
|
||||
|
||||
#### Q3: Will there be any new models added to the list used for benchmarking?
|
||||
**A**: The models used in the performance benchmarks were chosen based on general adoption and usage in deployment scenarios. New models that support a diverse set of workloads and usage are added periodically.
|
||||
New performance benchmarks are typically published on every
|
||||
`major.minor` release of the Intel® Distribution of OpenVINO™ toolkit.
|
||||
|
||||
#### Q4: What does "CF" or "TF" in the graphs stand for?
|
||||
**A**: The "CF" means "Caffe", and "TF" means "TensorFlow".
|
||||
.. dropdown:: Where can I find the models used in the performance benchmarks?
|
||||
|
||||
#### Q5: How can I run the benchmark results on my own?
|
||||
**A**: All of the performance benchmarks were generated using the open-source tool within the Intel® Distribution of OpenVINO™ toolkit called `benchmark_app`. This tool is available in both [C++](../../samples/cpp/benchmark_app/README.md) and [Python](../../tools/benchmark_tool/README.md).
|
||||
All models used are included in the GitHub repository of `Open Model Zoo <https://github.com/openvinotoolkit/open_model_zoo>`_.
|
||||
|
||||
#### Q6: What image sizes are used for the classification network models?
|
||||
**A**: The image size used in inference depends on the benchmarked network. The table below presents the list of input sizes for each network model:
|
||||
.. dropdown:: Will there be any new models added to the list used for benchmarking?
|
||||
|
||||
| **Model** | **Public Network** | **Task** | **Input Size** (Height x Width) |
|
||||
|------------------------------------------------------------------------------------------------------------------------------------|------------------------------------|-----------------------------|-----------------------------------|
|
||||
| [bert-base-cased](https://github.com/PaddlePaddle/PaddleNLP/tree/v2.1.1) | BERT | question / answer | 124 |
|
||||
| [bert-large-uncased-whole-word-masking-squad-int8-0001](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/bert-large-uncased-whole-word-masking-squad-int8-0001) | BERT-large | question / answer | 384 |
|
||||
| [bert-small-uncased-whole-masking-squad-0002](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/bert-small-uncased-whole-word-masking-squad-0002) | BERT-small | question / answer | 384 |
|
||||
| [brain-tumor-segmentation-0001-MXNET](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/brain-tumor-segmentation-0001) | brain-tumor-segmentation-0001 | semantic segmentation | 128x128x128 |
|
||||
| [brain-tumor-segmentation-0002-CF2](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/brain-tumor-segmentation-0002) | brain-tumor-segmentation-0002 | semantic segmentation | 128x128x128 |
|
||||
| [deeplabv3-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/deeplabv3) | DeepLab v3 Tf | semantic segmentation | 513x513 |
|
||||
| [densenet-121-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/densenet-121-tf) | Densenet-121 Tf | classification | 224x224 |
|
||||
| [efficientdet-d0](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/efficientdet-d0-tf) | Efficientdet | classification | 512x512 |
|
||||
| [facenet-20180408-102900-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/facenet-20180408-102900) | FaceNet TF | face recognition | 160x160 |
|
||||
| [Facedetection0200](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/face-detection-0200) | FaceDetection0200 | detection | 256x256 |
|
||||
| [faster_rcnn_resnet50_coco-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/faster_rcnn_resnet50_coco) | Faster RCNN Tf | object detection | 600x1024 |
|
||||
| [forward-tacotron-duration-prediction](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/forward-tacotron) | ForwardTacotron | text to speech | 241 |
|
||||
| [inception-v4-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/googlenet-v4-tf) | Inception v4 Tf (aka GoogleNet-V4) | classification | 299x299 |
|
||||
| [inception-v3-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/googlenet-v3) | Inception v3 Tf | classification | 299x299 |
|
||||
| [mask_rcnn_resnet50_atrous_coco](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mask_rcnn_resnet50_atrous_coco) | Mask R-CNN ResNet50 Atrous | instance segmentation | 800x1365 |
|
||||
| [mobilenet-ssd-CF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-ssd) | SSD (MobileNet)_COCO-2017_Caffe | object detection | 300x300 |
|
||||
| [mobilenet-v2-1.0-224-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-v2-1.0-224) | MobileNet v2 Tf | classification | 224x224 |
|
||||
| [mobilenet-v2-pytorch](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-v2-pytorch ) | Mobilenet V2 PyTorch | classification | 224x224 |
|
||||
| [Mobilenet-V3-small](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-v3-small-1.0-224-tf) | Mobilenet-V3-1.0-224 | classifier | 224x224 |
|
||||
| [Mobilenet-V3-large](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-v3-large-1.0-224-tf) | Mobilenet-V3-1.0-224 | classifier | 224x224 |
|
||||
| [pp-ocr-rec](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/) | PP-OCR | optical character recognition | 32x640 |
|
||||
| [pp-yolo](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.1) | PP-YOLO | detection | 640x640 |
|
||||
| [resnet-18-pytorch](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-18-pytorch) | ResNet-18 PyTorch | classification | 224x224 |
|
||||
| [resnet-50-pytorch](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-50-pytorch) | ResNet-50 v1 PyTorch | classification | 224x224 |
|
||||
| [resnet-50-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-50-tf) | ResNet-50_v1_ILSVRC-2012 | classification | 224x224 |
|
||||
| [yolo_v4-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolo-v4-tf) | Yolo-V4 TF | object detection | 608x608 |
|
||||
| [ssd_mobilenet_v1_coco-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssd_mobilenet_v1_coco) | ssd_mobilenet_v1_coco | object detection | 300x300 |
|
||||
| [ssdlite_mobilenet_v2-TF](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssdlite_mobilenet_v2) | ssdlite_mobilenet_v2 | object detection | 300x300 |
|
||||
| [unet-camvid-onnx-0001](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/unet-camvid-onnx-0001) | U-Net | semantic segmentation | 368x480 |
|
||||
| [yolo-v3-tiny-tf](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolo-v3-tiny-tf) | YOLO v3 Tiny | object detection | 416x416 |
|
||||
| [yolo-v3](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolo-v3-tf) | YOLO v3 | object detection | 416x416 |
|
||||
| [ssd-resnet34-1200-onnx](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssd-resnet34-1200-onnx) | ssd-resnet34 onnx model | object detection | 1200x1200 |
|
||||
The models used in the performance benchmarks were chosen based
|
||||
on general adoption and usage in deployment scenarios. New models that
|
||||
support a diverse set of workloads and usage are added periodically.
|
||||
|
||||
#### Q7: Where can I purchase the specific hardware used in the benchmarking?
|
||||
**A**: Intel partners with vendors all over the world. For a list of Hardware Manufacturers, see the [Intel® AI: In Production Partners & Solutions Catalog](https://www.intel.com/content/www/us/en/internet-of-things/ai-in-production/partners-solutions-catalog.html) . For more details, see the [Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md) documentation. Before purchasing any hardware, you can test and run models remotely, using [Intel® DevCloud for the Edge](http://devcloud.intel.com/edge/).
|
||||
.. dropdown:: How can I run the benchmark results on my own?
|
||||
|
||||
#### Q8: How can I optimize my models for better performance or accuracy?
|
||||
**A**: Set of guidelines and recommendations to optimize models are available in the [optimization guide](../optimization_guide/dldt_optimization_guide.md). Join the conversation in the [Community Forum](https://software.intel.com/en-us/forums/intel-distribution-of-openvino-toolkit) for further support.
|
||||
All of the performance benchmarks are generated using the
|
||||
open-source tool within the Intel® Distribution of OpenVINO™ toolkit
|
||||
called `benchmark_app`. This tool is available
|
||||
`for C++ apps <http://openvino-doc.iotg.sclab.intel.com/2022.3/openvino_inference_engine_samples_benchmark_app_README.html>`_
|
||||
as well as
|
||||
`for Python apps <http://openvino-doc.iotg.sclab.intel.com/2022.3/openvino_inference_engine_tools_benchmark_tool_README.html>`_.
|
||||
|
||||
#### Q9: Why are INT8 optimized models used for benchmarking on CPUs with no VNNI support?
|
||||
**A**: The benefit of low-precision optimization using the OpenVINO™ toolkit model optimizer extends beyond processors supporting VNNI through Intel® DL Boost. The reduced bit width of INT8 compared to FP32 allows Intel® CPU to process the data faster. Therefore, it offers better throughput on any converted model, regardless of the intrinsically supported low-precision optimizations within Intel® hardware. For comparison on boost factors for different network models and a selection of Intel® CPU architectures, including AVX-2 with Intel® Core™ i7-8700T, and AVX-512 (VNNI) with Intel® Xeon® 5218T and Intel® Xeon® 8270, refer to the [Model Accuracy for INT8 and FP32 Precision](performance_int8_vs_fp32.md) article.
|
||||
For a simple instruction on testing performance, see the `Getting Performance Numbers Guide <http://openvino-doc.iotg.sclab.intel.com/2022.3/openvino_docs_MO_DG_Getting_Performance_Numbers.html>`_.
|
||||
|
||||
#### Q10: Where can I search for OpenVINO™ performance results based on HW-platforms?
|
||||
**A**: The website format has changed in order to support more common approach of searching for the performance results of a given neural network model on different HW-platforms. As opposed to reviewing performance of a given HW-platform when working with different neural network models.
|
||||
.. dropdown:: What image sizes are used for the classification network models?
|
||||
|
||||
#### Q11: How is Latency measured?
|
||||
**A**: Latency is measured by running the OpenVINO™ Runtime in synchronous mode. In this mode, each frame or image is processed through the entire set of stages (pre-processing, inference, post-processing) before the next frame or image is processed. This KPI is relevant for applications where the inference on a single image is required. For example, the analysis of an ultra sound image in a medical application or the analysis of a seismic image in the oil & gas industry. Other use cases include real or near real-time applications, e.g. the response of industrial robot to changes in its environment and obstacle avoidance for autonomous vehicles, where a quick response to the result of the inference is required.
|
||||
The image size used in inference depends on the benchmarked
|
||||
network. The table below presents the list of input sizes for each
|
||||
network model:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Model
|
||||
- Public Network
|
||||
- Task
|
||||
- Input Size
|
||||
* - :ref:`bert-base-cased<https://github.com/PaddlePaddle/PaddleNLP/tree/v2.1.1>`
|
||||
- BERT
|
||||
- question / answer
|
||||
- 124
|
||||
* - :ref:`bert-large-uncased-whole-word-masking-squad-int8-0001<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/bert-large-uncased-whole-word-masking-squad-int8-0001>`
|
||||
- BERT-large
|
||||
- question / answer
|
||||
- 384
|
||||
* - :ref:`deeplabv3-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/deeplabv3>`
|
||||
- DeepLab v3 Tf
|
||||
- semantic segmentation
|
||||
- 513x513
|
||||
* - :ref:`densenet-121-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/densenet-121-tf>`
|
||||
- Densenet-121 Tf
|
||||
- classification
|
||||
- 224x224
|
||||
* - :ref:`efficientdet-d0<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/efficientdet-d0-tf>`
|
||||
- Efficientdet
|
||||
- classification
|
||||
- 512x512
|
||||
* - :ref:`faster_rcnn_resnet50_coco-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/faster_rcnn_resnet50_coco>`
|
||||
- Faster RCNN Tf
|
||||
- object detection
|
||||
- 600x1024
|
||||
* - :ref:`inception-v4-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/googlenet-v4-tf>`
|
||||
- Inception v4 Tf (aka GoogleNet-V4)
|
||||
- classification
|
||||
- 299x299
|
||||
* - :ref:`mobilenet-ssd-CF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-ssd>`
|
||||
- SSD (MobileNet)_COCO-2017_Caffe
|
||||
- object detection
|
||||
- 300x300
|
||||
* - :ref:`mobilenet-v2-pytorch<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/mobilenet-v2-pytorch>`
|
||||
- Mobilenet V2 PyTorch
|
||||
- classification
|
||||
- 224x224
|
||||
* - :ref:`resnet-18-pytorch<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-18-pytorch>`
|
||||
- ResNet-18 PyTorch
|
||||
- classification
|
||||
- 224x224
|
||||
* - :ref:`resnet-50-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/resnet-50-tf>`
|
||||
- ResNet-50_v1_ILSVRC-2012
|
||||
- classification
|
||||
- 224x224
|
||||
* - :ref:`ssd-resnet34-1200-onnx <https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssd-resnet34-1200-onnx>`
|
||||
- ssd-resnet34 onnx model
|
||||
- object detection
|
||||
- 1200x1200
|
||||
* - :ref:`unet-camvid-onnx-0001<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/unet-camvid-onnx-0001>`
|
||||
- U-Net
|
||||
- semantic segmentation
|
||||
- 368x480
|
||||
* - :ref:`yolo-v3-tiny-tf<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolo-v3-tiny-tf>`
|
||||
- YOLO v3 Tiny
|
||||
- object detection
|
||||
- 416x416
|
||||
* - :ref:`yolo_v4-TF<https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolo-v4-tf>`
|
||||
- Yolo-V4 TF
|
||||
- object detection
|
||||
- 608x608
|
||||
|
||||
|
||||
.. dropdown:: Where can I purchase the specific hardware used in the benchmarking?
|
||||
|
||||
Intel partners with vendors all over the world. For a list of Hardware Manufacturers, see the
|
||||
`Intel® AI: In Production Partners & Solutions Catalog <https://www.intel.com/content/www/us/en/internet-of-things/ai-in-production/partners-solutions-catalog.html>`_.
|
||||
For more details, see the [Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md)
|
||||
documentation. Before purchasing any hardware, you can test and run
|
||||
models remotely, using `Intel® DevCloud for the Edge <http://devcloud.intel.com/edge/>`_.
|
||||
|
||||
.. dropdown:: How can I optimize my models for better performance or accuracy?
|
||||
|
||||
Set of guidelines and recommendations to optimize models are available in the
|
||||
[optimization guide](../optimization_guide/dldt_deployment_optimization_guide.md).
|
||||
Join the conversation in the `Community Forum <https://software.intel.com/en-us/forums/intel-distribution-of-openvino-toolkit>`
|
||||
for further support.
|
||||
|
||||
.. dropdown:: Why are INT8 optimized models used for benchmarking on CPUs with no VNNI support?
|
||||
|
||||
The benefit of low-precision optimization using the OpenVINO™
|
||||
toolkit model optimizer extends beyond processors supporting VNNI
|
||||
through Intel® DL Boost. The reduced bit width of INT8 compared to FP32
|
||||
allows Intel® CPU to process the data faster. Therefore, it offers
|
||||
better throughput on any converted model, regardless of the
|
||||
intrinsically supported low-precision optimizations within Intel®
|
||||
hardware. For comparison on boost factors for different network models
|
||||
and a selection of Intel® CPU architectures, including AVX-2 with Intel®
|
||||
Core™ i7-8700T, and AVX-512 (VNNI) with Intel® Xeon® 5218T and Intel®
|
||||
Xeon® 8270, refer to the [Model Accuracy for INT8 and FP32 Precision](performance_int8_vs_fp32.md) article.
|
||||
|
||||
.. dropdown:: Where can I search for OpenVINO™ performance results based on HW-platforms?
|
||||
|
||||
The website format has changed in order to support more common
|
||||
approach of searching for the performance results of a given neural
|
||||
network model on different HW-platforms. As opposed to reviewing
|
||||
performance of a given HW-platform when working with different neural
|
||||
network models.
|
||||
|
||||
.. dropdown:: How is Latency measured?
|
||||
|
||||
Latency is measured by running the OpenVINO™ Runtime in
|
||||
synchronous mode. In this mode, each frame or image is processed through
|
||||
the entire set of stages (pre-processing, inference, post-processing)
|
||||
before the next frame or image is processed. This KPI is relevant for
|
||||
applications where the inference on a single image is required. For
|
||||
example, the analysis of an ultra sound image in a medical application
|
||||
or the analysis of a seismic image in the oil & gas industry. Other use
|
||||
cases include real or near real-time applications, e.g. the response of
|
||||
industrial robot to changes in its environment and obstacle avoidance
|
||||
for autonomous vehicles, where a quick response to the result of the
|
||||
inference is required.
|
||||
|
||||
|
||||
@endsphinxdirective
|
||||
@@ -5,16 +5,16 @@ The following table presents the absolute accuracy drop calculated as the accura
|
||||
@sphinxdirective
|
||||
.. raw:: html
|
||||
|
||||
<table class="table">
|
||||
<table class="table" id="model-accuracy-and-perf-int8-fp32-table">
|
||||
<tr align="left">
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>Intel® Core™ i9-12900K @ 3.2 GHz (AVX2)</th>
|
||||
<th>Intel® Xeon® 6338 @ 2.0 GHz (VNNI)</th>
|
||||
<th>iGPU Gen12LP (Intel® Core™ i9-12900K @ 3.2 GHz)</th>
|
||||
<th class="light-header">Intel® Core™ i9-12900K @ 3.2 GHz (AVX2)</th>
|
||||
<th class="light-header">Intel® Xeon® 6338 @ 2.0 GHz (VNNI)</th>
|
||||
<th class="light-header">iGPU Gen12LP (Intel® Core™ i9-12900K @ 3.2 GHz)</th>
|
||||
</tr>
|
||||
<tr align="left">
|
||||
<tr align="left" class="header">
|
||||
<th>OpenVINO Benchmark <br>Model Name</th>
|
||||
<th>Dataset</th>
|
||||
<th>Metric Name</th>
|
||||
@@ -24,105 +24,105 @@ The following table presents the absolute accuracy drop calculated as the accura
|
||||
<td>bert-base-cased</td>
|
||||
<td>SST-2</td>
|
||||
<td>accuracy</td>
|
||||
<td>0.11</td>
|
||||
<td>0.34</td>
|
||||
<td>0.46</td>
|
||||
<td class="data">0.11</td>
|
||||
<td class="data">0.34</td>
|
||||
<td class="data">0.46</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>bert-large-uncased-whole-word-masking-squad-0001</td>
|
||||
<td>SQUAD</td>
|
||||
<td>F1</td>
|
||||
<td>0.87</td>
|
||||
<td>1.11</td>
|
||||
<td>0.70</td>
|
||||
<td class="data">0.87</td>
|
||||
<td class="data">1.11</td>
|
||||
<td class="data">0.70</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>deeplabv3</td>
|
||||
<td>VOC2012</td>
|
||||
<td>mean_iou</td>
|
||||
<td>0.04</td>
|
||||
<td>0.04</td>
|
||||
<td>0.11</td>
|
||||
<td class="data">0.04</td>
|
||||
<td class="data">0.04</td>
|
||||
<td class="data">0.11</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>densenet-121</td>
|
||||
<td>ImageNet</td>
|
||||
<td>accuracy@top1</td>
|
||||
<td>0.56</td>
|
||||
<td>0.56</td>
|
||||
<td>0.63</td>
|
||||
<td class="data">0.56</td>
|
||||
<td class="data">0.56</td>
|
||||
<td class="data">0.63</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>efficientdet-d0</td>
|
||||
<td>COCO2017</td>
|
||||
<td>coco_precision</td>
|
||||
<td>0.63</td>
|
||||
<td>0.62</td>
|
||||
<td>0.45</td>
|
||||
<td class="data">0.63</td>
|
||||
<td class="data">0.62</td>
|
||||
<td class="data">0.45</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>faster_rcnn_<br>resnet50_coco</td>
|
||||
<td>COCO2017</td>
|
||||
<td>coco_<br>precision</td>
|
||||
<td>0.52</td>
|
||||
<td>0.55</td>
|
||||
<td>0.31</td>
|
||||
<td class="data">0.52</td>
|
||||
<td class="data">0.55</td>
|
||||
<td class="data">0.31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>resnet-18</td>
|
||||
<td>ImageNet</td>
|
||||
<td>acc@top-1</td>
|
||||
<td>0.16</td>
|
||||
<td>0.16</td>
|
||||
<td>0.16</td>
|
||||
<td class="data">0.16</td>
|
||||
<td class="data">0.16</td>
|
||||
<td class="data">0.16</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>resnet-50</td>
|
||||
<td>ImageNet</td>
|
||||
<td>acc@top-1</td>
|
||||
<td>0.09</td>
|
||||
<td>0.09</td>
|
||||
<td>0.09</td>
|
||||
<td class="data">0.09</td>
|
||||
<td class="data">0.09</td>
|
||||
<td class="data">0.09</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>resnet-50-pytorch</td>
|
||||
<td>ImageNet</td>
|
||||
<td>acc@top-1</td>
|
||||
<td>0.13</td>
|
||||
<td>0.13</td>
|
||||
<td>0.11</td>
|
||||
<td class="data">0.13</td>
|
||||
<td class="data">0.13</td>
|
||||
<td class="data">0.11</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>ssd-resnet34-1200</td>
|
||||
<td>COCO2017</td>
|
||||
<td>COCO mAp</td>
|
||||
<td>0.09</td>
|
||||
<td>0.09</td>
|
||||
<td>0.13</td>
|
||||
<td class="data">0.09</td>
|
||||
<td class="data">0.09</td>
|
||||
<td class="data">0.13</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>unet-camvid-onnx-0001</td>
|
||||
<td>CamVid</td>
|
||||
<td>mean_iou@mean</td>
|
||||
<td>0.56</td>
|
||||
<td>0.56</td>
|
||||
<td>0.60</td>
|
||||
<td class="data">0.56</td>
|
||||
<td class="data">0.56</td>
|
||||
<td class="data">0.60</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>yolo-v3-tiny</td>
|
||||
<td>COCO2017</td>
|
||||
<td>COCO mAp</td>
|
||||
<td>0.12</td>
|
||||
<td>0.12</td>
|
||||
<td>0.17</td>
|
||||
<td class="data">0.12</td>
|
||||
<td class="data">0.12</td>
|
||||
<td class="data">0.17</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>yolo_v4</td>
|
||||
<td>COCO2017</td>
|
||||
<td>COCO mAp</td>
|
||||
<td>0.52</td>
|
||||
<td>0.52</td>
|
||||
<td>0.54</td>
|
||||
<td class="data">0.52</td>
|
||||
<td class="data">0.52</td>
|
||||
<td class="data">0.54</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
@@ -78,10 +78,12 @@ html_theme = "openvino_sphinx_theme"
|
||||
html_theme_path = ['_themes']
|
||||
|
||||
html_theme_options = {
|
||||
"navigation_depth": 5,
|
||||
"navigation_depth": 6,
|
||||
"show_nav_level": 2,
|
||||
"use_edit_page_button": True,
|
||||
"github_url": "https://github.com/openvinotoolkit/openvino",
|
||||
"footer_items": ["footer_info"],
|
||||
"show_prev_next": False,
|
||||
}
|
||||
|
||||
html_context = {
|
||||
@@ -179,4 +181,5 @@ def setup(app):
|
||||
app.add_js_file('js/custom.js')
|
||||
app.add_js_file('js/graphs.js')
|
||||
app.add_js_file('js/graphs_ov_tf.js')
|
||||
app.add_js_file('js/open_sidebar.js')
|
||||
|
||||
@@ -4,120 +4,17 @@
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: API 2.0
|
||||
:hidden:
|
||||
|
||||
openvino_2_0_transition_guide
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Model preparation
|
||||
:hidden:
|
||||
|
||||
openvino_docs_model_processing_introduction
|
||||
Supported_Model_Formats
|
||||
openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide
|
||||
omz_tools_downloader
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Running Inference
|
||||
:hidden:
|
||||
|
||||
openvino_docs_OV_UG_OV_Runtime_User_Guide
|
||||
openvino_inference_engine_tools_compile_tool_README
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Optimization and Performance
|
||||
:hidden:
|
||||
|
||||
openvino_docs_optimization_guide_dldt_optimization_guide
|
||||
openvino_docs_MO_DG_Getting_Performance_Numbers
|
||||
openvino_docs_model_optimization_guide
|
||||
openvino_docs_deployment_optimization_guide_dldt_optimization_guide
|
||||
openvino_docs_tuning_utilities
|
||||
openvino_docs_performance_benchmarks
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Deploying Inference
|
||||
:hidden:
|
||||
|
||||
openvino_docs_deployment_guide_introduction
|
||||
openvino_deployment_guide
|
||||
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
:caption: THE Ecosystem
|
||||
|
||||
openvino_ecosystem
|
||||
ovms_what_is_openvino_model_server
|
||||
ovsa_get_started
|
||||
ovtf_integration
|
||||
ote_documentation
|
||||
workbench_docs_Workbench_DG_Introduction
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: OpenVINO Extensibility
|
||||
:hidden:
|
||||
|
||||
openvino_docs_Extensibility_UG_Intro
|
||||
openvino_docs_transformations
|
||||
OpenVINO Plugin Developer Guide <openvino_docs_ie_plugin_dg_overview>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
:caption: Use OpenVINO™ Toolkit Securely
|
||||
|
||||
openvino_docs_security_guide_introduction
|
||||
openvino_docs_security_guide_workbench
|
||||
openvino_docs_OV_UG_protecting_model_guide
|
||||
ovsa_get_started
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
:caption: Media Processing and Computer Vision Libraries
|
||||
|
||||
Intel® Deep Learning Streamer <openvino_docs_dlstreamer>
|
||||
openvino_docs_gapi_gapi_intro
|
||||
OpenCV Developer Guide <https://docs.opencv.org/master/>
|
||||
OpenCL™ Developer Guide <https://software.intel.com/en-us/openclsdk-devguide>
|
||||
OneVPL Developer Guide <https://www.intel.com/content/www/us/en/developer/articles/release-notes/oneapi-video-processing-library-release-notes.html>
|
||||
API Reference <api/api_reference>
|
||||
Tool Ecosystem <openvino_ecosystem>
|
||||
OpenVINO Extensibility <openvino_docs_Extensibility_UG_Intro>
|
||||
Media Processing and CV Libraries <media_processing_cv_libraries>
|
||||
OpenVINO™ Security <openvino_docs_security_guide_introduction>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
This section provides reference documents that guide you through the OpenVINO toolkit workflow, from obtaining models, optimizing them, to deploying them in your own deep learning applications.
|
||||
|
||||
## Converting and Preparing Models
|
||||
With [Model Downloader](@ref omz_tools_downloader) and [Model Optimizer](MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) guides, you will learn to download pre-trained models and convert them for use with OpenVINO™. You can use your own models or choose some from a broad selection provided in the [Open Model Zoo](./model_zoo.md).
|
||||
This section provides reference documents that guide you through the OpenVINO toolkit workflow, from preparing models, optimizing them, to deploying them in your own deep learning applications.
|
||||
|
||||
## Optimization and Performance
|
||||
In this section you will find resources on [how to test inference performance](MO_DG/prepare_model/Getting_performance_numbers.md) and [how to increase it](optimization_guide/dldt_optimization_guide.md). It can be achieved by [optimizing the model](optimization_guide/model_optimization_guide.md) or [optimizing inference at runtime](optimization_guide/dldt_deployment_optimization_guide.md).
|
||||
|
||||
## Deploying Inference
|
||||
This section explains the process of creating your own inference application using [OpenVINO™ Runtime](./OV_Runtime_UG/openvino_intro.md) and documents the [OpenVINO Runtime API](./api_references.html) for both Python and C++.
|
||||
It also provides a [guide on deploying applications with OpenVINO](./OV_Runtime_UG/deployment/deployment_intro.md) and directs you to other sources on this topic.
|
||||
|
||||
## OpenVINO Ecosystem
|
||||
Apart from the core components, OpenVINO offers tools, plugins, and expansions revolving around it, even if not constituting necessary parts of its workflow. This section will give you an overview of [what makes up OpenVINO Toolkit](./Documentation/openvino_ecosystem.md).
|
||||
|
||||
## Media Processing and Computer Vision Libraries
|
||||
|
||||
The OpenVINO™ toolkit also works with the following media processing frameworks and libraries:
|
||||
|
||||
* [Intel® Deep Learning Streamer (Intel® DL Streamer)](@ref openvino_docs_dlstreamer) — A streaming media analytics framework based on GStreamer, for creating complex media analytics pipelines optimized for Intel hardware platforms. Go to the Intel® DL Streamer [documentation](https://dlstreamer.github.io/) website to learn more.
|
||||
* [Intel® oneAPI Video Processing Library (oneVPL)](https://www.intel.com/content/www/us/en/develop/documentation/oneapi-programming-guide/top/api-based-programming/intel-oneapi-video-processing-library-onevpl.html) — A programming interface for video decoding, encoding, and processing to build portable media pipelines on CPUs, GPUs, and other accelerators.
|
||||
|
||||
You can also add computer vision capabilities to your application using optimized versions of [OpenCV](https://opencv.org/).
|
||||
|
||||
|
||||
@@ -5,22 +5,12 @@
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
:caption: Install & Config
|
||||
|
||||
Installing OpenVINO <openvino_docs_install_guides_overview>
|
||||
Additional Configurations <openvino_docs_install_guides_configurations_header>
|
||||
Uninstalling <openvino_docs_install_guides_uninstalling_openvino>
|
||||
Troubleshooting <openvino_docs_get_started_guide_troubleshooting>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
:caption: Get Started Guides
|
||||
|
||||
Interactive Tutorials (Python) <tutorials>
|
||||
Samples <openvino_docs_OV_UG_Samples_Overview>
|
||||
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
@@ -18,6 +18,7 @@ Once the prerequisites have been installed, perform the following steps:
|
||||
To use sample applications, install OpenVINO Runtime via one of the following distribution channels (other distributions do not include sample files):
|
||||
|
||||
* Archive files (recommended) - [Linux](@ref openvino_docs_install_guides_installing_openvino_from_archive_linux) | [Windows](@ref openvino_docs_install_guides_installing_openvino_from_archive_windows) | [macOS](@ref openvino_docs_install_guides_installing_openvino_from_archive_macos)
|
||||
* [APT](@ref openvino_docs_install_guides_installing_openvino_apt) or [YUM](@ref openvino_docs_install_guides_installing_openvino_yum) for Linux
|
||||
* Docker image - [Linux](@ref openvino_docs_install_guides_installing_openvino_docker_linux) | [Windows](@ref openvino_docs_install_guides_installing_openvino_docker_windows)
|
||||
* [Build from source](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode)
|
||||
|
||||
|
||||
209
docs/home.rst
@@ -6,120 +6,125 @@
|
||||
.. meta::
|
||||
:google-site-verification: _YqumYQ98cmXUTwtzM_0WIIadtDc6r_TMYGbmGgNvrk
|
||||
|
||||
OpenVINO™ Documentation
|
||||
=======================
|
||||
.. rst-class:: openvino-intro-text
|
||||
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying deep learning models. It provides boosted deep learning performance for vision, audio, and language models from popular frameworks like TensorFlow, PyTorch, and more. `Get started with OpenVINO. <get_started.html>`__
|
||||
|
||||
.. rst-class:: openvino-diagram
|
||||
|
||||
.. image:: _static/images/openvino_diagram.svg
|
||||
:align: center
|
||||
|
||||
|
||||
Overview
|
||||
~~~~~~~~
|
||||
|
||||
OpenVINO enables you to optimize a deep learning model from almost any framework and deploy it with best-in-class performance on a range of Intel processors and other hardware platforms.
|
||||
|
||||
A typical workflow with OpenVINO is shown below.
|
||||
|
||||
.. container:: section
|
||||
:name: welcome-to-openvino-toolkit-s-documentation
|
||||
|
||||
.. container::
|
||||
:name: hp-flow-container
|
||||
|
||||
.. container:: hp-flow-btn
|
||||
|
||||
.. image:: _static/images/OV_flow_model_hvr.svg
|
||||
:alt: link to model processing introduction
|
||||
:target: openvino_docs_model_processing_introduction.html
|
||||
|
||||
.. container:: hp-flow-arrow
|
||||
|
||||
.. image:: _static/images/OV_flow_arrow.svg
|
||||
|
||||
.. container:: hp-flow-btn
|
||||
|
||||
.. image:: _static/images/OV_flow_optimization_hvr.svg
|
||||
:alt: link to an optimization guide
|
||||
:target: openvino_docs_optimization_guide_dldt_optimization_guide.html
|
||||
|
||||
.. container:: hp-flow-arrow
|
||||
|
||||
.. image:: _static/images/OV_flow_arrow.svg
|
||||
|
||||
.. container:: hp-flow-btn
|
||||
|
||||
.. image:: _static/images/OV_flow_deployment_hvr.svg
|
||||
:alt: link to deployment introduction
|
||||
:target: openvino_docs_deployment_guide_introduction.html
|
||||
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<div class="section" id="welcome-to-openvino-toolkit-s-documentation">
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="_static/css/homepage_style.css">
|
||||
<div style="clear:both;"> </div>
|
||||
|
||||
<p>
|
||||
OpenVINO™ is an open-source toolkit for optimizing and deploying AI inference.
|
||||
</p><ul>
|
||||
<li>Boost deep learning performance in computer vision, automatic speech recognition, natural language processing and other common tasks </li>
|
||||
<li>Use models trained with popular frameworks like TensorFlow, PyTorch and more </li>
|
||||
<li>Reduce resource demands and efficiently deploy on a range of Intel® platforms from edge to cloud </li>
|
||||
</ul>
|
||||
<p></p>
|
||||
|
||||
<img class="HP_img_chart" src="_static/images/ov_chart.png" alt="OpenVINO allows to process models built with Caffe, Keras, mxnet, TensorFlow, ONNX, and PyTorch. They can be easily optimized and deployed on devices running Windows, Linux, or MacOS.">
|
||||
<div style="clear:both;"> </div>
|
||||
<p>Check the full range of supported hardware in the
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_OV_UG_Working_with_devices.html"> Supported Devices page</a> and see how it stacks up in our
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_performance_benchmarks.html"> Performance Benchmarks page.</a> <br>
|
||||
Supports deployment on Windows, Linux, and macOS.
|
||||
</p>
|
||||
|
||||
<div class="HP_separator-header">
|
||||
<p> OpenVINO Workflow </p>
|
||||
</div>
|
||||
<div style="clear:both;"> </div>
|
||||
High-Performance Deep Learning
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
<div id="HP_flow-container">
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_model_processing_introduction.html">
|
||||
<img src="_static/images/OV_flow_model_hvr.svg" alt="link to model processing introduction" />
|
||||
</a>
|
||||
</div>
|
||||
<div class="HP_flow-arrow" >
|
||||
<img src="_static/images/OV_flow_arrow.svg" alt="" />
|
||||
</div>
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_optimization_guide_dldt_optimization_guide.html">
|
||||
<img src="_static/images/OV_flow_optimization_hvr.svg" alt="link to an optimization guide" />
|
||||
</a>
|
||||
</div>
|
||||
<div class="HP_flow-arrow" >
|
||||
<img src="_static/images/OV_flow_arrow.svg" alt="" />
|
||||
</div>
|
||||
<div class="HP_flow-btn">
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_deployment_guide_introduction.html">
|
||||
<img src="_static/images/OV_flow_deployment_hvr.svg" alt="link to deployment introduction" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
OpenVINO Runtime automatically optimizes deep learning pipelines using aggressive graph fusion, memory reuse, load balancing, and inferencing parallelism across CPU, GPU, VPU, and more.
|
||||
You can integrate and offload to accelerators additional operations for pre- and post-processing to reduce end-to-end latency and improve throughput.
|
||||
|
||||
Model Quantization and Compression
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Boost your model’s speed even further with quantization and other state-of-the-art compression techniques available in OpenVINO’s Post-Training Optimization Tool and Neural Network Compression Framework. These techniques also reduce your model size and memory requirements, allowing it to be deployed on resource-constrained edge hardware.
|
||||
|
||||
.. panels::
|
||||
:card: homepage-panels
|
||||
|
||||
**Local Inferencing & Model Serving**
|
||||
|
||||
You can either link directly with OpenVINO Runtime to run inference locally or use OpenVINO Model Serving to serve model inference from separate server or within Kubernetes environment
|
||||
|
||||
---
|
||||
|
||||
**Improved Application Portability**
|
||||
|
||||
Write an application once, deploy it anywhere, achieving maximum performance from hardware. Automatic device discovery allows for superior deployment flexibility. OpenVINO Runtime supports Linux, Windows and MacOS and provides Python, C++ and C API. Use your preferred language and OS.
|
||||
|
||||
---
|
||||
|
||||
**Minimal External Dependencies**
|
||||
|
||||
Designed with minimal external dependencies reduces the application footprint, simplifying installation and dependency management. Popular package managers enable application dependencies to be easily installed and upgraded. Custom compilation for your specific model(s) further reduces final binary size.
|
||||
|
||||
---
|
||||
|
||||
**Enhanced App Start-Up Time**
|
||||
|
||||
In applications where fast start-up is required, OpenVINO significantly reduces first-inference latency by using the CPU for initial inference and then switching to GPU or VPU once the model has been compiled and loaded to memory. Compiled models are cached to further improving start-up time.
|
||||
|
||||
|
||||
<div style="clear:both;"> </div>
|
||||
<div class="HP_separator-header">
|
||||
<p> Want to know more? </p>
|
||||
</div>
|
||||
<div style="clear:both;"> </div>
|
||||
|
||||
<div class="HP_infoboxes">
|
||||
<a href="https://docs.openvino.ai/latest/get_started.html">
|
||||
<h3>Get Started </h3>
|
||||
<p> Learn how to download, install, and configure OpenVINO. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/model_zoo.html">
|
||||
<h3>Open Model Zoo </h3>
|
||||
<p> Browse through over 200 publicly available neural networks and pick the right one for your solution. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html">
|
||||
<h3>Model Optimizer </h3>
|
||||
<p> Learn how to convert your model and optimize it for use with OpenVINO. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/tutorials.html">
|
||||
<h3>Tutorials </h3>
|
||||
<p> Learn how to use OpenVINO based on our training material. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_OV_UG_Samples_Overview.html">
|
||||
<h3>Samples </h3>
|
||||
<p> Try OpenVINO using ready-made applications explaining various use cases. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/workbench_docs_Workbench_DG_Introduction.html">
|
||||
<h3>DL Workbench </h3>
|
||||
<p> Learn about the alternative, web-based version of OpenVINO. DL Workbench container installation Required. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_OV_UG_OV_Runtime_User_Guide.html">
|
||||
<h3>OpenVINO™ Runtime </h3>
|
||||
<p> Learn about OpenVINO's inference mechanism which executes the IR, ONNX, Paddle models on target devices. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_optimization_guide_dldt_optimization_guide.html">
|
||||
<h3>Tune & Optimize </h3>
|
||||
<p> Model-level (e.g. quantization) and Runtime (i.e. application) -level optimizations to make your inference as fast as possible. </p>
|
||||
</a>
|
||||
<a href="https://docs.openvino.ai/latest/openvino_docs_performance_benchmarks.html">
|
||||
<h3>Performance<br> Benchmarks </h3>
|
||||
<p> View performance benchmark results for various models on Intel platforms. </p>
|
||||
</a>
|
||||
</div>
|
||||
<div style="clear:both;"> </div>
|
||||
</div>
|
||||
Supported Devices
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
OpenVINO is supported on a wide range of hardware platforms.
|
||||
|
||||
`Visit the Supported Devices page for a full list of OpenVINO-compatible platforms. <openvino_docs_OV_UG_supported_plugins_Supported_Devices.html>`__
|
||||
|
||||
Check the `Performance Benchmarks <openvino_docs_performance_benchmarks.html>`__ page to see how fast OpenVINO runs popular models on a variety of processors. OpenVINO supports deployment on Windows, Linux, and macOS.
|
||||
|
||||
.. image:: _static/images/supported_devices.png
|
||||
:width: 70%
|
||||
:align: center
|
||||
|
||||
|
||||
Get Started
|
||||
~~~~~~~~~~~
|
||||
|
||||
`Visit the Get Started Guide to learn the basics of OpenVINO and explore its features with quick start examples. <get_started.html>`__
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
|
||||
get_started
|
||||
documentation
|
||||
tutorials
|
||||
api/api_reference
|
||||
model_zoo
|
||||
resources
|
||||
|
||||
GET STARTED <get_started>
|
||||
LEARN OPENVINO <learn_openvino>
|
||||
OPENVINO WORKFLOW <openvino_workflow>
|
||||
DOCUMENTATION <documentation>
|
||||
MODEL ZOO <model_zoo>
|
||||
RESOURCES <resources>
|
||||
RELEASE NOTES <https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html>
|
||||
|
||||
3
docs/img/quantization_picture.png
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:271ec8f099a2b9c617a374934596519d228e67967e6e1d8cebbe05de5d080d3b
|
||||
size 45899
|
||||
@@ -3,4 +3,9 @@ OpenVINO™ Documentation
|
||||
|
||||
.. toctree::
|
||||
|
||||
home
|
||||
home
|
||||
Install <https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/download.html>
|
||||
Blog <https://blog.openvino.ai/>
|
||||
Forum <https://community.intel.com/t5/Intel-Distribution-of-OpenVINO/bd-p/distribution-openvino-toolkit>
|
||||
Training <https://www.intel.com/content/www/us/en/developer/tools/devcloud/edge/learn/certification.html>
|
||||
GitHub <https://github.com/openvinotoolkit>
|
||||
|
||||
@@ -10,7 +10,7 @@ OpenVINO Development Tools is a set of utilities that make it easy to develop an
|
||||
|
||||
The instructions on this page show how to install OpenVINO Development Tools. If you are a Python developer, it only takes a few simple steps to install the tools with PyPI. If you are developing in C++, OpenVINO Runtime must be installed separately before installing OpenVINO Development Tools.
|
||||
|
||||
In both cases, Python 3.6 - 3.10 need be installed on your machine before starting.
|
||||
In both cases, Python 3.7 - 3.10 needs to be installed on your machine before starting.
|
||||
|
||||
> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI.
|
||||
|
||||
@@ -57,19 +57,19 @@ Activate the newly created Python virtual environment by issuing this command:
|
||||
.. tab:: Linux and macOS
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
source openvino_env/bin/activate
|
||||
|
||||
|
||||
.. tab:: Windows
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
openvino_env\Scripts\activate
|
||||
|
||||
|
||||
.. important::
|
||||
|
||||
The above command must be re-run every time a new command terminal window is opened.
|
||||
|
||||
The above command must be re-run every time a new command terminal window is opened.
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
@@ -91,65 +91,75 @@ Note that the commands are different for a Python installation and a C++ install
|
||||
|
||||
.. tab:: Python
|
||||
|
||||
To install and configure the components of the development package for working with specific frameworks, use the following command:
|
||||
To install OpenVINO Development Tools into the existing environment with the deep learning framework of your choice, run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install openvino-dev[extras]
|
||||
|
||||
where the `extras` parameter specifies one or more deep learning frameworks via these values: `caffe`, `kaldi`, `mxnet`, `onnx`, `pytorch`, `tensorflow`, `tensorflow2`. Make sure that you install the corresponding frameworks for your models.
|
||||
|
||||
For example, to install and configure the components for working with TensorFlow 2.x and ONNX, use the following command:
|
||||
|
||||
|
||||
pip install openvino-dev
|
||||
|
||||
In case that you encounter any compatibility issues between OpenVINO and your deep learning framework, you may install OpenVINO Development Tools into a separate environment. Use the following command to get specific validated versions of your framework:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
pip install openvino-dev[extras]
|
||||
|
||||
where the `extras` parameter specifies one or more deep learning frameworks via these values: `caffe`, `kaldi`, `mxnet`, `onnx`, `pytorch`, `tensorflow`, `tensorflow2`. Make sure that you install the corresponding frameworks for your models.
|
||||
|
||||
For example, to install and configure the components for working with TensorFlow 2.x and ONNX, use the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install openvino-dev[tensorflow2,onnx]
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
Model Optimizer support for TensorFlow 1.x environment has been deprecated. Use the `tensorflow2` parameter to install a TensorFlow 2.x environment that can convert both TensorFlow 1.x and 2.x models. If your model isn't compatible with the TensorFlow 2.x environment, use the `tensorflow` parameter to install the TensorFlow 1.x environment. The TF 1.x environment is provided only for legacy compatibility reasons.
|
||||
|
||||
|
||||
.. tab:: C++
|
||||
|
||||
When using OpenVINO Development Tools for C++ development, it’s important to install the same version as OpenVINO Runtime. Following the instructions below will ensure that you are installing a version that matches that of OpenVINO Runtime.
|
||||
|
||||
**Recommended: Install Using the Requirements Files**
|
||||
After you have installed OpenVINO Runtime from an archive file, you can find a set of requirements files in the <INSTALL_DIR>\tools\ directory. The requirements files will install the matching version of OpenVINO Development Tools and its dependencies.
|
||||
|
||||
1. Install the OpenVINO Development Tools mandatory requirements using the following command:
|
||||
When using OpenVINO Development Tools for C++ development, it’s important to install the same version as OpenVINO Runtime. Following the instructions below will ensure that you are installing a version that matches that of OpenVINO Runtime.
|
||||
|
||||
**Recommended: Install Using the Requirements Files**
|
||||
|
||||
After you have installed OpenVINO Runtime from an archive file, you can find a set of requirements files in the <INSTALL_DIR>\tools\ directory. The requirements files will install the matching version of OpenVINO Development Tools and its dependencies.
|
||||
|
||||
1. Install the OpenVINO Development Tools mandatory requirements using the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install -r <INSTALL_DIR>\tools\requirements.txt
|
||||
|
||||
2. If you are using additional frameworks, you must also install the requirements for those frameworks using the corresponding requirements file. For example, if you are using a TensorFlow model, use the following command to install requirements for TensorFlow:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install -r <INSTALL_DIR>\tools\requirements_tensorflow2.txt
|
||||
|
||||
**Alternative: Install the openvino-dev Package from PyPI**
|
||||
|
||||
You can also install OpenVINO Development Tools from PyPI using the following command.
|
||||
|
||||
.. important::
|
||||
|
||||
Make sure that the `openvino-dev` version you specified matches your installed version of OpenVINO Runtime. Otherwise, compatibility errors are likely to occur.
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install -r <INSTALL_DIR>\tools\requirements.txt
|
||||
|
||||
2. If you are using additional frameworks, you must also install the requirements for those frameworks using the corresponding requirements file. For example, if you are using a TensorFlow model, use the following command to install requirements for TensorFlow:
|
||||
|
||||
|
||||
pip install openvino-dev[EXTRAS]==2022.3.0
|
||||
|
||||
where the EXTRAS parameter specifies one or more deep learning frameworks via these values: caffe, kaldi, mxnet, onnx, pytorch, tensorflow, tensorflow2. Make sure that you install the corresponding frameworks for your models. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install -r <INSTALL_DIR>\tools\requirements_tensorflow2.txt
|
||||
|
||||
**Alternative: Install the openvino-dev Package from PyPI**
|
||||
You can also install OpenVINO Development Tools from PyPI using the following command.
|
||||
|
||||
.. important::
|
||||
Make sure to specify the `openvino-dev` version that matches your installed version of OpenVINO Runtime. Otherwise, compatibility errors are likely to occur.
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install openvino-dev[EXTRAS]==2022.2
|
||||
|
||||
where the EXTRAS parameter specifies one or more deep learning frameworks via these values: caffe, kaldi, mxnet, onnx, pytorch, tensorflow, tensorflow2. Make sure that you install the corresponding frameworks for your models. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pip install openvino-dev[tensorflow2,onnx]==2022.2
|
||||
|
||||
|
||||
pip install openvino-dev[tensorflow2,onnx]==2022.3.0
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
Model Optimizer support for TensorFlow 1.x environment has been deprecated. Use the `tensorflow2` parameter or to install a TensorFlow 2.x environment that can convert both TensorFlow 1.x and 2.x models. If your model isn't compatible with the TensorFlow 2.x environment, use the `tensorflow` parameter to install the TensorFlow 1.x environment. The TF 1.x environment is provided only for legacy compatibility reasons.
|
||||
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
For more details on the openvino-dev PyPI package, see https://pypi.org/project/openvino-dev/.
|
||||
|
||||
### Step 4. Test the Installation
|
||||
@@ -170,12 +180,12 @@ Learn more about OpenVINO and use it in your own application by trying out some
|
||||
### Get started with Python
|
||||
<img src="https://user-images.githubusercontent.com/15709723/127752390-f6aa371f-31b5-4846-84b9-18dd4f662406.gif" width=400>
|
||||
|
||||
Try the [Python Quick Start Example](https://docs.openvino.ai/2022.2/notebooks/201-vision-monodepth-with-output.html) to estimate depth in a scene using an OpenVINO monodepth model in a Jupyter Notebook inside your web browser.
|
||||
Try the [Python Quick Start Example](https://docs.openvino.ai/2022.3/notebooks/201-vision-monodepth-with-output.html) to estimate depth in a scene using an OpenVINO monodepth model in a Jupyter Notebook inside your web browser.
|
||||
|
||||
Visit the [Tutorials](../tutorials.md) page for more Jupyter Notebooks to get you started with OpenVINO, such as:
|
||||
* [OpenVINO Python API Tutorial](https://docs.openvino.ai/2022.2/notebooks/002-openvino-api-with-output.html)
|
||||
* [Basic image classification program with Hello Image Classification](https://docs.openvino.ai/2022.2/notebooks/001-hello-world-with-output.html)
|
||||
* [Convert a PyTorch model and use it for image background removal](https://docs.openvino.ai/2022.2/notebooks/205-vision-background-removal-with-output.html)
|
||||
* [OpenVINO Python API Tutorial](https://docs.openvino.ai/2022.3/notebooks/002-openvino-api-with-output.html)
|
||||
* [Basic image classification program with Hello Image Classification](https://docs.openvino.ai/2022.3/notebooks/001-hello-world-with-output.html)
|
||||
* [Convert a PyTorch model and use it for image background removal](https://docs.openvino.ai/2022.3/notebooks/205-vision-background-removal-with-output.html)
|
||||
|
||||
### Get started with C++
|
||||
<img src="https://user-images.githubusercontent.com/36741649/127170593-86976dc3-e5e4-40be-b0a6-206379cd7df5.jpg" width=400>
|
||||
|
||||
@@ -1,10 +1,224 @@
|
||||
# Install OpenVINO™ Runtime for Linux Using APT Repository
|
||||
# Install Intel® Distribution of OpenVINO™ Toolkit for Linux Using APT Repository {#openvino_docs_install_guides_installing_openvino_apt}
|
||||
|
||||
Currently only the following ways are provided to install OpenVINO™:
|
||||
This guide provides detailed steps for installing OpenVINO™ Runtime through the APT repository and guidelines for installing OpenVINO Development Tools.
|
||||
|
||||
* [Install OpenVINO Runtime from an Archive File](installing-openvino-from-archive-linux.md)
|
||||
* [Install OpenVINO from PyPI](installing-openvino-pip.md)
|
||||
* [Install OpenVINO with Docker](installing-openvino-docker-linux.md)
|
||||
* [Build From Source](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode)
|
||||
> **NOTE**: From the 2022.1 release, OpenVINO™ Development Tools can be installed via PyPI only. See [Install OpenVINO Development Tools](#installing-openvino-development-tools) for more information.
|
||||
|
||||
The other installation methods are temporarily unavailable.
|
||||
> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf).
|
||||
|
||||
## System Requirements
|
||||
|
||||
@sphinxdirective
|
||||
.. tab:: Operating Systems
|
||||
|
||||
* Ubuntu 18.04 long-term support (LTS) x86, 64-bit
|
||||
* Ubuntu 20.04 long-term support (LTS) x86, 64-bit
|
||||
|
||||
.. tab:: Hardware
|
||||
|
||||
Optimized for these processors:
|
||||
|
||||
* 6th to 12th generation Intel® Core™ processors and Intel® Xeon® processors
|
||||
* 3rd generation Intel® Xeon® Scalable processor (formerly code named Cooper Lake)
|
||||
* Intel® Xeon® Scalable processor (formerly Skylake and Cascade Lake)
|
||||
* Intel Atom® processor with support for Intel® Streaming SIMD Extensions 4.1 (Intel® SSE4.1)
|
||||
* Intel Pentium® processor N4200/5, N3350/5, or N3450/5 with Intel® HD Graphics
|
||||
* Intel® Iris® Xe MAX Graphics
|
||||
|
||||
.. tab:: Processor Notes
|
||||
|
||||
Processor graphics are not included in all processors.
|
||||
See `Product Specifications`_ for information about your processor.
|
||||
|
||||
.. _Product Specifications: https://ark.intel.com/
|
||||
|
||||
.. tab:: Software
|
||||
|
||||
* `CMake 3.13 or higher, 64-bit <https://cmake.org/download/>`_
|
||||
* GCC 7.5.0 (for Ubuntu 18.04) or GCC 9.3.0 (for Ubuntu 20.04)
|
||||
* `Python 3.7 - 3.10, 64-bit <https://www.python.org/downloads/>`_
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
## Installing OpenVINO Runtime
|
||||
|
||||
### Step 1: Set Up the OpenVINO Toolkit APT Repository
|
||||
|
||||
1. Install the GPG key for the repository
|
||||
|
||||
a. Download the [GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB](https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB).
|
||||
|
||||
You can also use the following command:
|
||||
```sh
|
||||
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
|
||||
```
|
||||
|
||||
b. Add this key to the system keyring:
|
||||
```sh
|
||||
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
|
||||
```
|
||||
|
||||
> **NOTE**: You might need to install GnuPG: `sudo apt-get install gnupg`
|
||||
|
||||
2. Add the repository via the following command:
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: Ubuntu 18
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
echo "deb https://apt.repos.intel.com/openvino/2022 bionic main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2022.list
|
||||
|
||||
.. tab:: Ubuntu 20
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
echo "deb https://apt.repos.intel.com/openvino/2022 focal main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2022.list
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
3. Update the list of packages via the update command:
|
||||
```sh
|
||||
sudo apt update
|
||||
```
|
||||
|
||||
4. Verify that the APT repository is properly set up. Run the apt-cache command to see a list of all available OpenVINO packages and components:
|
||||
```sh
|
||||
apt-cache search openvino
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Install OpenVINO Runtime Using the APT Package Manager
|
||||
|
||||
#### Install OpenVINO Runtime
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: The Latest Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt install openvino
|
||||
|
||||
|
||||
.. tab:: A Specific Version
|
||||
|
||||
1. Get a list of OpenVINO packages available for installation:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt-cache search openvino
|
||||
|
||||
2. Install a specific version of an OpenVINO package:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt install openvino-<VERSION>.<UPDATE>.<PATCH>
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt install openvino-2022.3.0
|
||||
|
||||
.. note::
|
||||
|
||||
You can use `--no-install-recommends` option to install only required packages. Keep in mind that the build tools must be installed **separately** if you want to compile the samples.
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
#### Check for Installed Packages and Versions
|
||||
|
||||
Run the following command:
|
||||
```sh
|
||||
apt list --installed | grep openvino
|
||||
```
|
||||
|
||||
#### Uninstall OpenVINO Runtime
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: The Latest Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt autoremove openvino
|
||||
|
||||
|
||||
.. tab:: A Specific Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt autoremove openvino-<VERSION>.<UPDATE>.<PATCH>
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo apt autoremove openvino-2022.3.0
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
### Step 3 (Optional): Install Software Dependencies
|
||||
|
||||
After you have installed OpenVINO Runtime, if you decided to [install OpenVINO Model Development Tools](installing-model-dev-tools.md), make sure that you install external software dependencies first.
|
||||
|
||||
Refer to <a href="openvino_docs_install_guides_installing_openvino_linux.html#install-external-dependencies">Install External Software Dependencies</a> for detailed steps.
|
||||
|
||||
### Step 4 (Optional): Configure Inference on Non-CPU Devices
|
||||
|
||||
To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in [GPU Setup Guide](@ref openvino_docs_install_guides_configurations_for_intel_gpu).
|
||||
|
||||
### Step 5: Build Samples
|
||||
|
||||
To build the C++ or C sample applications for Linux, run the `build_samples.sh` script:
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: C++
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
/usr/share/openvino/samples/cpp/build_samples.sh
|
||||
|
||||
.. tab:: C
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
/usr/share/openvino/samples/c/build_samples.sh
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
For more information, refer to <a href="openvino_docs_OV_UG_Samples_Overview.html#build-samples-linux">Build the Sample Applications on Linux</a>.
|
||||
|
||||
## Installing OpenVINO Development Tools
|
||||
|
||||
> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can be installed via PyPI only.
|
||||
|
||||
To install OpenVINO Development Tools, do the following steps:
|
||||
1. [Install OpenVINO Runtime](#installing-openvino-runtime) if you haven't done it yet.
|
||||
2. <a href="openvino_docs_install_guides_installing_openvino_linux.html#install-external-dependencies">Install External Software Dependencies</a>.
|
||||
3. See the **For C++ Developers** section in [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps.
|
||||
|
||||
## What's Next?
|
||||
|
||||
Now you may continue with the following tasks:
|
||||
|
||||
* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md).
|
||||
* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md).
|
||||
* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/2022.1/notebooks/notebooks.html).
|
||||
* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md).
|
||||
* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md).
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- Intel® Distribution of OpenVINO™ toolkit home page: <https://software.intel.com/en-us/openvino-toolkit>.
|
||||
- For IoT Libraries & Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit).
|
||||
@@ -7,11 +7,15 @@ This guide provides steps on creating a Docker image with Intel® Distribution o
|
||||
@sphinxdirective
|
||||
.. tab:: Target Operating Systems with Python Version
|
||||
|
||||
+----------------------------------------------+--------------------------+
|
||||
| Operating System | Supported Python Version |
|
||||
+==============================================+==========================+
|
||||
| Ubuntu 20.04 long-term support (LTS), 64-bit | 3.8 |
|
||||
+----------------------------------------------+--------------------------+
|
||||
+----------------------------------------------+-------------------------+
|
||||
| Operating System | Included Python Version |
|
||||
+==============================================+=========================+
|
||||
| Ubuntu 18.04 long-term support (LTS), 64-bit | 3.8 |
|
||||
+----------------------------------------------+-------------------------+
|
||||
| Ubuntu 20.04 long-term support (LTS), 64-bit | 3.8 |
|
||||
+----------------------------------------------+-------------------------+
|
||||
| Red Hat Enterprise Linux 8, 64-bit | 3.8 |
|
||||
+----------------------------------------------+-------------------------+
|
||||
|
||||
.. tab:: Host Operating Systems
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
|
||||
From Archive <openvino_docs_install_guides_installing_openvino_from_archive_linux>
|
||||
From PyPI <openvino_docs_install_guides_installing_openvino_pip>
|
||||
From APT <openvino_docs_install_guides_installing_openvino_apt>
|
||||
From YUM <openvino_docs_install_guides_installing_openvino_yum>
|
||||
Using Docker <openvino_docs_install_guides_installing_openvino_docker_linux>
|
||||
|
||||
@endsphinxdirective
|
||||
@@ -16,5 +18,7 @@ If you want to install OpenVINO™ Runtime on your Linux machine, there are a fe
|
||||
|
||||
* [Install OpenVINO Runtime from an Archive File](installing-openvino-from-archive-linux.md)
|
||||
* [Install OpenVINO from PyPI](installing-openvino-pip.md)
|
||||
* [Install OpenVINO Runtime from APT](installing-openvino-apt.md)
|
||||
* [Install OpenVINO Runtime from YUM](installing-openvino-yum.md)
|
||||
* [Install OpenVINO with Docker](installing-openvino-docker-linux.md)
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ OpenVINO Runtime may also be installed on its own without OpenVINO Development T
|
||||
|
||||
The following methods are available to install OpenVINO Runtime:
|
||||
|
||||
* Linux: You can install OpenVINO Runtime using archive files or Docker. See [Install OpenVINO on Linux](installing-openvino-linux-header.md).
|
||||
* Linux: You can install OpenVINO Runtime using APT, YUM, archive files or Docker. See [Install OpenVINO on Linux](installing-openvino-linux-header.md).
|
||||
* Windows: You can install OpenVINO Runtime using archive files or Docker. See [Install OpenVINO on Windows](installing-openvino-windows-header.md).
|
||||
* macOS: You can install OpenVINO Runtime using archive files or Docker. See [Install OpenVINO on macOS](installing-openvino-macos-header.md).
|
||||
* [Raspbian OS](installing-openvino-raspbian.md)
|
||||
|
||||
@@ -1,10 +1,198 @@
|
||||
# Install OpenVINO™ Runtime on Linux Using YUM Repository
|
||||
# Install OpenVINO™ Runtime on Linux Using YUM Repository {#openvino_docs_install_guides_installing_openvino_yum}
|
||||
|
||||
Currently only the following ways are provided to install OpenVINO™:
|
||||
This guide provides installation steps for OpenVINO™ Runtime for Linux distributed through the YUM repository.
|
||||
|
||||
* [Install OpenVINO Runtime from an Archive File](installing-openvino-from-archive-linux.md)
|
||||
* [Install OpenVINO from PyPI](installing-openvino-pip.md)
|
||||
* [Install OpenVINO with Docker](installing-openvino-docker-linux.md)
|
||||
* [Build From Source](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode)
|
||||
> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. If you want to develop or optimize your models with OpenVINO, see [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps.
|
||||
|
||||
The other installation methods are temporarily unavailable.
|
||||
> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf).
|
||||
|
||||
## System Requirements
|
||||
|
||||
@sphinxdirective
|
||||
.. tab:: Operating Systems
|
||||
|
||||
* Red Hat Enterprise Linux 8 x86, 64-bit
|
||||
|
||||
.. tab:: Hardware
|
||||
|
||||
Optimized for these processors:
|
||||
|
||||
* 6th to 12th generation Intel® Core™ processors and Intel® Xeon® processors
|
||||
* 3rd generation Intel® Xeon® Scalable processor (formerly code named Cooper Lake)
|
||||
* Intel® Xeon® Scalable processor (formerly Skylake and Cascade Lake)
|
||||
* Intel Atom® processor with support for Intel® Streaming SIMD Extensions 4.1 (Intel® SSE4.1)
|
||||
* Intel Pentium® processor N4200/5, N3350/5, or N3450/5 with Intel® HD Graphics
|
||||
* Intel® Iris® Xe MAX Graphics
|
||||
|
||||
.. tab:: Processor Notes
|
||||
|
||||
Processor graphics are not included in all processors.
|
||||
See `Product Specifications`_ for information about your processor.
|
||||
|
||||
.. _Product Specifications: https://ark.intel.com/
|
||||
|
||||
.. tab:: Software
|
||||
|
||||
* `CMake 3.13 or higher, 64-bit <https://cmake.org/download/>`_
|
||||
* GCC 8.2.0
|
||||
* `Python 3.7 - 3.10, 64-bit <https://www.python.org/downloads/>`_
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
## Install OpenVINO Runtime
|
||||
|
||||
### Step 1: Set Up the Repository
|
||||
|
||||
1. Create the YUM repo file in the `/tmp` directory as a normal user:
|
||||
```
|
||||
tee > /tmp/openvino-2022.repo << EOF
|
||||
[OpenVINO]
|
||||
name=Intel(R) Distribution of OpenVINO 2022
|
||||
baseurl=https://yum.repos.intel.com/openvino/2022
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=1
|
||||
gpgkey=https://yum.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
|
||||
EOF
|
||||
```
|
||||
2. Move the new openvino-2022.repo file to the YUM configuration directory `/etc/yum.repos.d`:
|
||||
```sh
|
||||
sudo mv /tmp/openvino-2022.repo /etc/yum.repos.d
|
||||
```
|
||||
3. Verify that the new repo is properly setup by running the following command:
|
||||
```sh
|
||||
yum repolist | grep -i openvino
|
||||
```
|
||||
You will see the available list of packages.
|
||||
|
||||
|
||||
To list available OpenVINO packages, use the following command:
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
yum list 'openvino*'
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
### Step 2: Install OpenVINO Runtime Using the YUM Package Manager
|
||||
|
||||
#### Install OpenVINO Runtime
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: The Latest Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum install openvino
|
||||
|
||||
.. tab:: A Specific Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum install openvino-<VERSION>.<UPDATE>.<PATCH>
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum install openvino-2022.3.0
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
#### Check for Installed Packages and Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
yum list installed 'openvino*'
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
#### Uninstall OpenVINO Runtime
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: The Latest Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum autoremove openvino
|
||||
|
||||
|
||||
.. tab:: A Specific Version
|
||||
|
||||
Run the following command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum autoremove openvino-<VERSION>.<UPDATE>.<PATCH>
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
sudo yum autoremove openvino-2022.3.0
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||
### Step 3 (Optional): Install Software Dependencies
|
||||
|
||||
After you have installed OpenVINO Runtime, if you decided to [install OpenVINO Model Development Tools](installing-model-dev-tools.md), make sure that you install external software dependencies first.
|
||||
|
||||
Refer to <a href="openvino_docs_install_guides_installing_openvino_linux.html#install-external-dependencies">Install External Software Dependencies</a> for detailed steps.
|
||||
|
||||
### Step 4 (Optional): Configure Inference on Non-CPU Devices
|
||||
|
||||
To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in [GPU Setup Guide](@ref openvino_docs_install_guides_configurations_for_intel_gpu).
|
||||
|
||||
### Step 5: Build Samples
|
||||
|
||||
To build the C++ or C sample applications for Linux, run the `build_samples.sh` script:
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. tab:: C++
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
/usr/share/openvino/samples/cpp/build_samples.sh
|
||||
|
||||
.. tab:: C
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
/usr/share/openvino/samples/c/build_samples.sh
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
For more information, refer to <a href="openvino_docs_OV_UG_Samples_Overview.html#build-samples-linux">Build the Sample Applications on Linux</a>.
|
||||
|
||||
## What's Next?
|
||||
|
||||
Now you may continue with the following tasks:
|
||||
|
||||
* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md).
|
||||
* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md).
|
||||
* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/2022.3/notebooks/notebooks.html).
|
||||
* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md).
|
||||
* See sample applications in [OpenVINO™ Samples Overview](../OV_Runtime_UG/Samples_Overview.md).
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- OpenVINO™ home page: <https://software.intel.com/en-us/openvino-toolkit>
|
||||
- For IoT Libraries & Code Samples, see [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit).
|
||||
@@ -12,11 +12,13 @@ Before you start the installation, check the supported operating systems and req
|
||||
|
||||
| Supported Operating System | [Python* Version (64-bit)](https://www.python.org/) |
|
||||
| :------------------------------------------------------------| :---------------------------------------------------|
|
||||
| Ubuntu* 18.04 long-term support (LTS), 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Ubuntu* 20.04 long-term support (LTS), 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Red Hat* Enterprise Linux* 8, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| macOS* 10.15.x versions | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Windows 10*, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Ubuntu* 18.04 long-term support (LTS) x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Ubuntu* 20.04 long-term support (LTS) x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Red Hat* Enterprise Linux* 8 x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| CentOS 7 x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| macOS* 10.15 and higher versions, x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
| macOS* 11 and higher versions, arm64 | 3.7, 3.8, 3.9, 3.10 |
|
||||
| Windows 10* and higher versions, x86, 64-bit | 3.7, 3.8, 3.9, 3.10 |
|
||||
|
||||
**C++ libraries** are also required for the installation on Windows*. To install that, you can [download the Visual Studio Redistributable file (.exe)](https://aka.ms/vs/17/release/vc_redist.x64.exe).
|
||||
|
||||
|
||||
18
docs/learn_openvino.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Learn OpenVINO {#learn_openvino}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
Interactive Tutorials (Python) <tutorials>
|
||||
Sample Applications (Python & C++) <openvino_docs_OV_UG_Samples_Overview>
|
||||
OpenVINO API 2.0 Transition <openvino_2_0_transition_guide>
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
This section will help you get a hands-on experience with OpenVINO even if you are just starting
|
||||
to learn what OpenVINO is and how it works. It includes various types of learning materials
|
||||
accommodating different learning needs, which means you should find it useful if you are a beginning,
|
||||
as well as an experienced user.
|
||||
@@ -17,6 +17,7 @@
|
||||
<link rel="stylesheet" href="{{ pathto('_static/css/input.css', 1) }}" type="text/css" />
|
||||
<link rel="stylesheet" href="{{ pathto('_static/css/textfield.css', 1) }}" type="text/css" />
|
||||
<link rel="stylesheet" href="{{ pathto('_static/css/tabs.css', 1) }}" type="text/css" />
|
||||
<link rel="stylesheet" href="_static/css/homepage_style.css" type="text/css" />
|
||||
<script src="{{ pathto('_static/js/openvino_sphinx_theme.js', 1) }}"></script>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Runtime Inference Optimizations {#openvino_docs_deployment_optimization_guide_dldt_optimization_guide}
|
||||
# Optimize Inference {#openvino_docs_deployment_optimization_guide_dldt_optimization_guide}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
@@ -7,14 +7,16 @@
|
||||
:hidden:
|
||||
|
||||
openvino_docs_deployment_optimization_guide_common
|
||||
openvino_docs_OV_UG_Performance_Hints
|
||||
openvino_docs_deployment_optimization_guide_latency
|
||||
openvino_docs_deployment_optimization_guide_tput
|
||||
openvino_docs_deployment_optimization_guide_tput_advanced
|
||||
openvino_docs_OV_UG_Preprocessing_Overview
|
||||
openvino_docs_deployment_optimization_guide_internals
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
Runtime optimizations, or deployment optimizations, focus on tuning inference parameters and execution means (e.g., the optimum number of requests executed simultaneously). Unlike model-level optimizations, they are highly specific to the hardware and case they are used for, and often come at a cost.
|
||||
Runtime optimization, or deployment optimization, focuses on tuning inference parameters and execution means (e.g., the optimum number of requests executed simultaneously). Unlike model-level optimizations, they are highly specific to the hardware and case they are used for, and often come at a cost.
|
||||
`ov::hint::inference_precision` is a "typical runtime configuration" which trades accuracy for performance, allowing `fp16/bf16` execution for the layers that remain in `fp32` after quantization of the original `fp32` model.
|
||||
|
||||
Therefore, optimization should start with defining the use case. For example, if it is about processing millions of samples by overnight jobs in data centers, throughput could be prioritized over latency. On the other hand, real-time usages would likely trade off throughput to deliver the results at minimal latency. A combined scenario is also possible, targeting the highest possible throughput, while maintaining a specific latency threshold.
|
||||
@@ -22,12 +24,11 @@ Therefore, optimization should start with defining the use case. For example, if
|
||||
It is also important to understand how the full-stack application would use the inference component "end-to-end." For example, to know what stages need to be orchestrated to save workload devoted to fetching and preparing input data.
|
||||
|
||||
For more information on this topic, see the following articles:
|
||||
* [feature support by device](@ref features_support_matrix),
|
||||
|
||||
* [Inputs Pre-processing with the OpenVINO](@ref inputs_pre_processing).
|
||||
* [Async API](@ref async_api).
|
||||
* [The 'get_tensor' Idiom](@ref tensor_idiom).
|
||||
* For variably-sized inputs, consider [dynamic shapes](../OV_Runtime_UG/ov_dynamic_shapes.md).
|
||||
* [feature support by device](@ref features_support_matrix)
|
||||
* [Inputs Pre-processing with the OpenVINO](@ref inputs_pre_processing)
|
||||
* [Async API](@ref async_api)
|
||||
* [The 'get_tensor' Idiom](@ref tensor_idiom)
|
||||
* For variably-sized inputs, consider [dynamic shapes](../OV_Runtime_UG/ov_dynamic_shapes.md)
|
||||
|
||||
See the [latency](./dldt_deployment_optimization_latency.md) and [throughput](./dldt_deployment_optimization_tput.md) optimization guides, for **use-case-specific optimizations**
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
## Optimizing for the Latency {#openvino_docs_deployment_optimization_guide_latency}
|
||||
## Optimizing for Latency {#openvino_docs_deployment_optimization_guide_latency}
|
||||
|
||||
@sphinxdirective
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
# Introduction to Performance Optimization {#openvino_docs_optimization_guide_dldt_optimization_guide}
|
||||
Even though inference performance should be defined as a combination of many factors, including accuracy and efficiency, it is most often described as the speed of execution. As the rate with which the model processes live data, it is based on two fundamentally interconnected metrics: latency and throughput.
|
||||
|
||||
|
||||
|
||||

|
||||
|
||||
**Latency** measures inference time (in ms) required to process a single input. When it comes to executing multiple inputs simultaneously (for example, via batching), the overall throughput (inferences per second, or frames per second, FPS, in the specific case of visual processing) is usually more of a concern.
|
||||
**Throughput** is calculated by dividing the number of inputs that were processed by the processing time.
|
||||
|
||||
## End-to-End Application Performance
|
||||
It is important to separate the "pure" inference time of a neural network and the end-to-end application performance. For example, data transfers between the host and a device may unintentionally affect the performance when a host input tensor is processed on the accelerator such as dGPU.
|
||||
|
||||
Similarly, the input-preprocessing contributes significantly to the inference time. As described in the [getting performance numbers](../MO_DG/prepare_model/Getting_performance_numbers.md) section, when evaluating *inference* performance, one option is to measure all such items separately.
|
||||
For the **end-to-end scenario**, though, consider image pre-processing with OpenVINO and the asynchronous execution as a way to lessen the communication costs (like data transfers). For more details, see the [general optimizations guide](./dldt_deployment_optimization_common.md).
|
||||
|
||||
Another specific case is **first-inference latency** (for example, when a fast application start-up is required), where the resulting performance may be well dominated by the model loading time. [Model caching](../OV_Runtime_UG/Model_caching_overview.md) may be considered as a way to improve model loading/compilation time.
|
||||
|
||||
Finally, **memory footprint** restriction is another possible concern when designing an application. While this is a motivation for the use of the *model* optimization techniques, keep in mind that the throughput-oriented execution is usually much more memory consuming. For more details, see the [Runtime Inference Optimizations guide](../optimization_guide/dldt_deployment_optimization_guide.md).
|
||||
|
||||
|
||||
> **NOTE**: To get performance numbers for OpenVINO, along with the tips on how to measure and compare it with a native framework, see the [Getting performance numbers article](../MO_DG/prepare_model/Getting_performance_numbers.md).
|
||||
|
||||
## Improving Performance: Model vs Runtime Optimizations
|
||||
|
||||
> **NOTE**: First, make sure that your model can be successfully inferred with OpenVINO Runtime.
|
||||
|
||||
There are two primary optimization approaches to improving inference performance with OpenVINO: model- and runtime-level optimizations. They are **fully compatible** and can be done independently.
|
||||
|
||||
- **Model optimizations** include model modifications, such as quantization, pruning, optimization of preprocessing, etc. For more details, refer to this [document](./model_optimization_guide.md).
|
||||
- The model optimizations directly improve the inference time, even without runtime parameters tuning (described below).
|
||||
|
||||
- **Runtime (Deployment) optimizations** includes tuning of model *execution* parameters. Fore more details, see the [Runtime Inference Optimizations guide](../optimization_guide/dldt_deployment_optimization_guide.md).
|
||||
|
||||
## Performance benchmarks
|
||||
A wide range of public models for estimating performance and comparing the numbers (measured on various supported devices) are available in the [Performance benchmarks section](../benchmarks/performance_benchmarks.md).
|
||||
@@ -6,7 +6,7 @@
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
pot_introduction
|
||||
ptq_introduction
|
||||
tmo_introduction
|
||||
(Experimental) Protecting Model <pot_ranger_README>
|
||||
|
||||
@@ -19,9 +19,9 @@
|
||||
|
||||
- :ref:`Model Optimizer <openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide>` implements most of the optimization parameters to a model by default. Yet, you are free to configure mean/scale values, batch size, RGB vs BGR input channels, and other parameters to speed up preprocess of a model (:ref:`Embedding Preprocessing Computation <openvino_docs_MO_DG_Additional_Optimization_Use_Cases>`).
|
||||
|
||||
- :ref:`Post-training Optimization w/ POT <pot_introduction>` is designed to optimize inference of deep learning models by applying post-training methods that do not require model retraining or fine-tuning, for example, post-training 8-bit quantization.
|
||||
- :ref:`Post-training Quantization` is designed to optimize inference of deep learning models by applying post-training methods that do not require model retraining or fine-tuning, for example, post-training 8-bit integer quantization.
|
||||
|
||||
- :ref:`Training-time Optimization w/ NNCF <tmo_introduction>`, a suite of advanced methods for training-time model optimization within the DL framework, such as PyTorch and TensorFlow 2.x. It supports methods, like Quantization-aware Training and Filter Pruning. NNCF-optimized models can be inferred with OpenVINO using all the available workflows.
|
||||
- :ref:`Training-time Optimization`, a suite of advanced methods for training-time model optimization within the DL framework, such as PyTorch and TensorFlow 2.x. It supports methods, like Quantization-aware Training and Filter Pruning. NNCF-optimized models can be inferred with OpenVINO using all the available workflows.
|
||||
|
||||
@endsphinxdirective
|
||||
|
||||
|
||||